From cd10b05162285a9ca4e546dceb431a6a7e500c6f Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 24 Jan 2025 09:25:55 +0100 Subject: [PATCH 001/383] Turn _source meta fieldmapper's mode attribute into a no-op. (#119072) Closes #118596 --- docs/changelog/119072.yaml | 12 + .../upgrades/SourceModeRollingUpgradeIT.java | 108 ++++ rest-api-spec/build.gradle | 11 + .../test/index/91_metrics_no_subobjects.yml | 8 +- .../test/index/92_metrics_auto_subobjects.yml | 8 +- .../test/range/20_synthetic_source.yml | 4 +- .../search/aggregations/bucket/NestedIT.java | 35 +- .../elasticsearch/index/IndexVersions.java | 1 + .../index/mapper/SourceFieldMapper.java | 46 +- .../mapper/DocumentParserContextTests.java | 8 +- .../mapper/DynamicFieldsBuilderTests.java | 2 +- .../index/mapper/SourceFieldMapperTests.java | 44 +- .../query/SearchExecutionContextTests.java | 2 +- .../index/shard/ShardGetServiceTests.java | 48 +- .../test/rest/ESRestTestCase.java | 54 +- .../test/rest/yaml/section/DoSection.java | 3 - .../esql/qa/rest/FieldExtractorTestCase.java | 28 +- .../xpack/logsdb/LogsdbWithBasicRestIT.java | 24 +- .../logsdb/LogsIndexModeCustomSettingsIT.java | 80 ++- .../xpack/logsdb/LogsIndexModeRestTestIT.java | 6 - .../xpack/logsdb/LogsdbRestIT.java | 20 +- .../LogsdbIndexModeSettingsProviderTests.java | 2 +- .../test/40_source_mode_setting.yml | 477 +++--------------- 23 files changed, 401 insertions(+), 630 deletions(-) create mode 100644 docs/changelog/119072.yaml create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java diff --git a/docs/changelog/119072.yaml b/docs/changelog/119072.yaml new file mode 100644 index 0000000000000..5b08a214898e2 --- /dev/null +++ b/docs/changelog/119072.yaml @@ -0,0 +1,12 @@ +pr: 119072 +summary: Turn `_source` meta fieldmapper's mode attribute into a no-op +area: Mapping +type: breaking +issues: + - 118596 +breaking: + title: Turn `_source` meta fieldmapper's mode attribute into a no-op + area: Mapping + details: The `mode` mapping attribute of `_source` metadata field mapper has been turned into a no-op. Instead the `index.mapping.source.mode` index setting should be used to configure source mode. + impact: Configuring the `mode` attribute for the `_source` meta field mapper will have no effect on indices created with Elasticsearch 9.0.0 or later. Note that `_source.mode` configured on indices before upgrading to 9.0.0 or later will remain efficive after upgrading. + notable: false diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java new file mode 100644 index 0000000000000..f6a8b86f27bec --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.index.mapper.SourceFieldMapper; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class SourceModeRollingUpgradeIT extends AbstractRollingUpgradeTestCase { + + public SourceModeRollingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + public void testConfigureStoredSourceBeforeIndexCreationLegacy() throws IOException { + assumeTrue("testing deprecation warnings and deprecation migrations", getOldClusterTestVersion().before("9.0.0")); + String templateName = "logs@custom"; + if (isOldCluster()) { + var storedSourceMapping = """ + { + "template": { + "settings": { + "index": { + "mode": "logsdb" + } + }, + "mappings": { + "_source": { + "mode": "stored" + } + } + } + }"""; + var putComponentTemplateRequest = new Request("PUT", "/_component_template/" + templateName); + putComponentTemplateRequest.setOptions(expectWarnings(SourceFieldMapper.DEPRECATION_WARNING)); + putComponentTemplateRequest.setJsonEntity(storedSourceMapping); + assertOK(client().performRequest(putComponentTemplateRequest)); + + var request = new Request("GET", "/_migration/deprecations"); + var nodeSettings = (Map) ((List) entityAsMap(client().performRequest(request)).get("node_settings")).getFirst(); + assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); + assertThat( + (String) nodeSettings.get("details"), + containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + templateName + "]") + ); + } else if (isUpgradedCluster()) { + var request = new Request("GET", "/_migration/deprecations"); + var nodeSettings = (Map) ((List) entityAsMap(client().performRequest(request)).get("node_settings")).getFirst(); + assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); + assertThat( + (String) nodeSettings.get("details"), + containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + templateName + "]") + ); + } + } + + public void testConfigureStoredSourceWhenIndexIsCreatedLegacy() throws IOException { + assumeTrue("testing deprecation warnings and deprecation migrations", getOldClusterTestVersion().before("9.0.0")); + String templateName = "logs@custom"; + if (isOldCluster()) { + var storedSourceMapping = """ + { + "template": { + "mappings": { + "_source": { + "mode": "stored" + } + } + } + }"""; + var putComponentTemplateRequest = new Request("PUT", "/_component_template/" + templateName); + putComponentTemplateRequest.setOptions(expectWarnings(SourceFieldMapper.DEPRECATION_WARNING)); + putComponentTemplateRequest.setJsonEntity(storedSourceMapping); + assertOK(client().performRequest(putComponentTemplateRequest)); + + var request = new Request("GET", "/_migration/deprecations"); + var nodeSettings = (Map) ((List) entityAsMap(client().performRequest(request)).get("node_settings")).getFirst(); + assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); + assertThat( + (String) nodeSettings.get("details"), + containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + templateName + "]") + ); + } else if (isUpgradedCluster()) { + var request = new Request("GET", "/_migration/deprecations"); + var nodeSettings = (Map) ((List) entityAsMap(client().performRequest(request)).get("node_settings")).getFirst(); + assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); + assertThat( + (String) nodeSettings.get("details"), + containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + templateName + "]") + ); + } + } +} diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index e4b46b98cedda..68da320923898 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -87,4 +87,15 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("search.highlight/30_max_analyzed_offset/Plain highlighter with max_analyzed_offset < 0 should FAIL", "semantics of test has changed") task.skipTest("indices.create/10_basic/Create lookup index", "default auto_expand_replicas was removed") task.skipTest("indices.create/10_basic/Create lookup index with one shard", "default auto_expand_replicas was removed") + task.skipTest("range/20_synthetic_source/Double range", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("range/20_synthetic_source/Float range", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("range/20_synthetic_source/Integer range", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("range/20_synthetic_source/IP range", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("range/20_synthetic_source/Long range", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("range/20_synthetic_source/Date range Rounding Fixes", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("index/92_metrics_auto_subobjects/Metrics object indexing with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("index/92_metrics_auto_subobjects/Root without subobjects with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("index/91_metrics_no_subobjects/Metrics object indexing with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("index/91_metrics_no_subobjects/Root without subobjects with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("logsdb/10_settings/routing path allowed in logs mode with routing on sort fields", "Unknown feature routing.logsb_route_on_sort_fields") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/91_metrics_no_subobjects.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/91_metrics_no_subobjects.yml index 5881ec83ebe85..eb1771ab7f3e3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/91_metrics_no_subobjects.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/91_metrics_no_subobjects.yml @@ -142,9 +142,9 @@ body: index_patterns: test-* template: + settings: + index.mapping.source.mode: synthetic mappings: - _source: - mode: synthetic dynamic_templates: - no_subobjects: match: metrics @@ -212,9 +212,9 @@ body: index_patterns: test-* template: + settings: + index.mapping.source.mode: synthetic mappings: - _source: - mode: synthetic subobjects: false properties: host.name: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml index 7b8f785a2cb93..9d1a9793b1f55 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml @@ -139,9 +139,9 @@ body: index_patterns: test-* template: + settings: + index.mapping.source.mode: synthetic mappings: - _source: - mode: synthetic dynamic_templates: - no_subobjects: match: metrics @@ -208,9 +208,9 @@ body: index_patterns: test-* template: + settings: + index.mapping.source.mode: synthetic mappings: - _source: - mode: synthetic subobjects: auto properties: host.name: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml index de20f82f8ba2f..22cda05b074c4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml @@ -7,9 +7,9 @@ setup: indices.create: index: synthetic_source_test body: + settings: + index.mapping.source.mode: synthetic mappings: - "_source": - "mode": "synthetic" "properties": "integer_range": "type" : "integer_range" diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index 5e7cffcc8ef0d..325e9000db33d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; @@ -857,24 +858,22 @@ public void testExtractInnerHitBuildersWithDuplicatePath() throws Exception { public void testSyntheticSource() throws Exception { assertAcked( - prepareCreate("synthetic").setMapping( - jsonBuilder().startObject() - .startObject("_source") - .field("mode", "synthetic") - .endObject() - .startObject("properties") - .startObject("nested") - .field("type", "nested") - .startObject("properties") - .startObject("number") - .field("type", "long") - .field("ignore_malformed", true) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ) + prepareCreate("synthetic").setSettings(Settings.builder().put("index.mapping.source.mode", "synthetic").build()) + .setMapping( + jsonBuilder().startObject() + .startObject("properties") + .startObject("nested") + .field("type", "nested") + .startObject("properties") + .startObject("number") + .field("type", "long") + .field("ignore_malformed", true) + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ) ); ensureGreen("synthetic"); diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 69ebcd4ba3fe6..e801a07f11670 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -142,6 +142,7 @@ private static Version parseUnchecked(String version) { public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY = def(9_004_00_0, Version.LUCENE_10_0_0); public static final IndexVersion INFERENCE_METADATA_FIELDS = def(9_005_00_0, Version.LUCENE_10_0_0); public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME = def(9_006_00_0, Version.LUCENE_10_0_0); + public static final IndexVersion SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP = def(9_007_00_0, Version.LUCENE_10_0_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 6a06d8ba4df28..a6cb5561f2e7b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -73,6 +73,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, + false, false ); @@ -81,6 +82,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, + false, false ); @@ -89,6 +91,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, + false, false ); @@ -97,6 +100,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, + false, false ); @@ -149,13 +153,21 @@ public static class Builder extends MetadataFieldMapper.Builder { private boolean serializeMode; private final boolean supportsNonDefaultParameterValues; - - public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams, boolean serializeMode) { + private final boolean sourceModeIsNoop; + + public Builder( + IndexMode indexMode, + final Settings settings, + boolean sourceModeIsNoop, + boolean supportsCheckForNonDefaultParams, + boolean serializeMode + ) { super(Defaults.NAME); this.settings = settings; this.indexMode = indexMode; this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false || settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); + this.sourceModeIsNoop = sourceModeIsNoop; this.serializeMode = serializeMode; this.mode = new Parameter<>( "mode", @@ -220,7 +232,7 @@ public SourceFieldMapper build() { if (sourceMode == Mode.SYNTHETIC && (includes.getValue().isEmpty() == false || excludes.getValue().isEmpty() == false)) { throw new IllegalArgumentException("filtering the stored _source is incompatible with synthetic source"); } - if (mode.isConfigured()) { + if (mode.isConfigured() && sourceModeIsNoop == false) { serializeMode = true; } final SourceFieldMapper sourceFieldMapper; @@ -235,7 +247,8 @@ public SourceFieldMapper build() { enabled.get(), includes.getValue().toArray(Strings.EMPTY_ARRAY), excludes.getValue().toArray(Strings.EMPTY_ARRAY), - serializeMode + serializeMode, + sourceModeIsNoop ); } if (indexMode != null) { @@ -252,7 +265,7 @@ private Mode resolveSourceMode() { } // If `_source.mode` is not set we need to apply a default according to index mode. - if (mode.get() == null) { + if (mode.get() == null || sourceModeIsNoop) { if (indexMode == null || indexMode == IndexMode.STANDARD) { // Special case to avoid serializing mode. return null; @@ -288,12 +301,20 @@ private static SourceFieldMapper resolveStaticInstance(final Mode sourceMode) { if (onOrAfterDeprecateModeVersion(c.indexVersionCreated())) { return resolveStaticInstance(settingSourceMode); } else { - return new SourceFieldMapper(settingSourceMode, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, true); + return new SourceFieldMapper( + settingSourceMode, + Explicit.IMPLICIT_TRUE, + Strings.EMPTY_ARRAY, + Strings.EMPTY_ARRAY, + true, + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP) + ); } }, c -> new Builder( c.getIndexSettings().getMode(), c.getSettings(), + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP), c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), onOrAfterDeprecateModeVersion(c.indexVersionCreated()) == false ) @@ -339,6 +360,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { // nullable for bwc reasons - TODO: fold this into serializeMode private final @Nullable Mode mode; private final boolean serializeMode; + private final boolean sourceModeIsNoop; private final Explicit enabled; /** indicates whether the source will always exist and be complete, for use by features like the update API */ @@ -348,7 +370,14 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { private final String[] excludes; private final SourceFilter sourceFilter; - private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes, boolean serializeMode) { + private SourceFieldMapper( + Mode mode, + Explicit enabled, + String[] includes, + String[] excludes, + boolean serializeMode, + boolean sourceModeIsNoop + ) { super(new SourceFieldType((enabled.explicit() && enabled.value()) || (enabled.explicit() == false && mode != Mode.DISABLED))); this.mode = mode; this.enabled = enabled; @@ -357,6 +386,7 @@ private SourceFieldMapper(Mode mode, Explicit enabled, String[] include this.excludes = excludes; this.complete = stored() && sourceFilter == null; this.serializeMode = serializeMode; + this.sourceModeIsNoop = sourceModeIsNoop; } private static SourceFilter buildSourceFilter(String[] includes, String[] excludes) { @@ -488,7 +518,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(null, Settings.EMPTY, false, serializeMode).init(this); + return new Builder(null, Settings.EMPTY, sourceModeIsNoop, false, serializeMode).init(this); } public boolean isSynthetic() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java index be36ab9d6eac1..5da7d6100bf4b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java @@ -101,9 +101,6 @@ public void testCreateDynamicMapperBuilderContext() throws IOException { var mapping = XContentBuilder.builder(XContentType.JSON.xContent()) .startObject() .startObject("_doc") - .startObject("_source") - .field("mode", "synthetic") - .endObject() .startObject(DataStreamTimestampFieldMapper.NAME) .field("enabled", "true") .endObject() @@ -120,6 +117,11 @@ public void testCreateDynamicMapperBuilderContext() throws IOException { .endObject() .endObject(); var documentMapper = new MapperServiceTestCase() { + + @Override + protected Settings getIndexSettings() { + return Settings.builder().put("index.mapping.source.mode", "synthetic").build(); + } }.createDocumentMapper(mapping); var parserContext = new TestDocumentParserContext(documentMapper.mappers(), null); parserContext.path().add("foo"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index d4d0e67ff4141..58e173d1ee45f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -69,7 +69,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new PassThroughObjectMapper.Builder("labels").setPriority(0).setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index bc560d94b8f52..8ad37908b2e9c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -64,18 +64,10 @@ protected void registerParameters(ParameterChecker checker) throws IOException { checker.registerUpdateCheck( topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "stored").endObject()), topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "synthetic").endObject()), - dm -> { - assertTrue(dm.metadataMapper(SourceFieldMapper.class).isSynthetic()); - } + dm -> {} ); checker.registerConflictCheck("includes", b -> b.array("includes", "foo*")); checker.registerConflictCheck("excludes", b -> b.array("excludes", "foo*")); - checker.registerConflictCheck( - "mode", - topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "synthetic").endObject()), - topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "stored").endObject()), - d -> {} - ); } public void testNoFormat() throws Exception { @@ -219,23 +211,45 @@ public void testSyntheticUpdates() throws Exception { """); SourceFieldMapper mapper = mapperService.documentMapper().sourceMapper(); assertTrue(mapper.enabled()); - assertTrue(mapper.isSynthetic()); + assertFalse("mode is a noop parameter", mapper.isSynthetic()); merge(mapperService, """ { "_doc" : { "_source" : { "mode" : "synthetic" } } } """); mapper = mapperService.documentMapper().sourceMapper(); assertTrue(mapper.enabled()); - assertTrue(mapper.isSynthetic()); + assertFalse("mode is a noop parameter", mapper.isSynthetic()); ParsedDocument doc = mapperService.documentMapper().parse(source("{}")); assertNull(doc.rootDoc().get(SourceFieldMapper.NAME)); - Exception e = expectThrows(IllegalArgumentException.class, () -> merge(mapperService, """ - { "_doc" : { "_source" : { "mode" : "stored" } } } - """)); + merge(mapperService, """ + { "_doc" : { "_source" : { "mode" : "disabled" } } } + """); + + mapper = mapperService.documentMapper().sourceMapper(); + assertTrue("mode is a noop parameter", mapper.enabled()); + assertFalse("mode is a noop parameter", mapper.isSynthetic()); + } + + public void testSyntheticUpdatesLegacy() throws Exception { + var mappings = XContentBuilder.builder(XContentType.JSON.xContent()).startObject().startObject("_doc").startObject("_source"); + mappings.field("mode", "synthetic").endObject().endObject().endObject(); + var version = IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP); + MapperService mapperService = createMapperService(version, mappings); + SourceFieldMapper mapper = mapperService.documentMapper().sourceMapper(); + assertTrue(mapper.enabled()); + assertTrue(mapper.isSynthetic()); - assertThat(e.getMessage(), containsString("Cannot update parameter [mode] from [synthetic] to [stored]")); + merge(mapperService, """ + { "_doc" : { "_source" : { "mode" : "synthetic" } } } + """); + mapper = mapperService.documentMapper().sourceMapper(); + assertTrue(mapper.enabled()); + assertTrue(mapper.isSynthetic()); + + ParsedDocument doc = mapperService.documentMapper().parse(source("{}")); + assertNull(doc.rootDoc().get(SourceFieldMapper.NAME)); merge(mapperService, """ { "_doc" : { "_source" : { "mode" : "disabled" } } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index dc70c44a89128..0c31ab703862f 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -384,7 +384,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java index a49d895f38f67..532e30804947c 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java @@ -92,7 +92,7 @@ public void testGetFromTranslogWithStringSourceMappingOptionsAndStoredFields() t """; boolean noSource = randomBoolean(); String sourceOptions = noSource ? "\"enabled\": false" : randomBoolean() ? "\"excludes\": [\"fo*\"]" : "\"includes\": [\"ba*\"]"; - runGetFromTranslogWithOptions(docToIndex, sourceOptions, noSource ? "" : "{\"bar\":\"bar\"}", "\"text\"", "foo", false); + runGetFromTranslogWithOptions(docToIndex, sourceOptions, null, noSource ? "" : "{\"bar\":\"bar\"}", "\"text\"", "foo", false); } public void testGetFromTranslogWithLongSourceMappingOptionsAndStoredFields() throws IOException { @@ -101,7 +101,7 @@ public void testGetFromTranslogWithLongSourceMappingOptionsAndStoredFields() thr """; boolean noSource = randomBoolean(); String sourceOptions = noSource ? "\"enabled\": false" : randomBoolean() ? "\"excludes\": [\"fo*\"]" : "\"includes\": [\"ba*\"]"; - runGetFromTranslogWithOptions(docToIndex, sourceOptions, noSource ? "" : "{\"bar\":42}", "\"long\"", 7L, false); + runGetFromTranslogWithOptions(docToIndex, sourceOptions, null, noSource ? "" : "{\"bar\":42}", "\"long\"", 7L, false); } public void testGetFromTranslogWithSyntheticSource() throws IOException { @@ -110,10 +110,8 @@ public void testGetFromTranslogWithSyntheticSource() throws IOException { """; String expectedFetchedSource = """ {"bar":42,"foo":7}"""; - String sourceOptions = """ - "mode": "synthetic" - """; - runGetFromTranslogWithOptions(docToIndex, sourceOptions, expectedFetchedSource, "\"long\"", 7L, true); + var settings = Settings.builder().put("index.mapping.source.mode", "synthetic").build(); + runGetFromTranslogWithOptions(docToIndex, "", settings, expectedFetchedSource, "\"long\"", 7L, true); } public void testGetFromTranslogWithDenseVector() throws IOException { @@ -127,12 +125,13 @@ public void testGetFromTranslogWithDenseVector() throws IOException { "foo": "foo" } """, Arrays.toString(vector)); - runGetFromTranslogWithOptions(docToIndex, "\"enabled\": true", docToIndex, "\"text\"", "foo", "\"dense_vector\"", false); + runGetFromTranslogWithOptions(docToIndex, "\"enabled\": true", null, docToIndex, "\"text\"", "foo", "\"dense_vector\"", false); } private void runGetFromTranslogWithOptions( String docToIndex, String sourceOptions, + Settings settings, String expectedResult, String fieldType, Object expectedFooVal, @@ -141,6 +140,7 @@ private void runGetFromTranslogWithOptions( runGetFromTranslogWithOptions( docToIndex, sourceOptions, + settings, expectedResult, fieldType, expectedFooVal, @@ -152,28 +152,30 @@ private void runGetFromTranslogWithOptions( private void runGetFromTranslogWithOptions( String docToIndex, String sourceOptions, + Settings additionalSettings, String expectedResult, String fieldTypeFoo, Object expectedFooVal, String fieldTypeBar, boolean sourceOnlyFetchCreatesInMemoryReader ) throws IOException { - IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping(Strings.format(""" - { - "properties": { - "foo": { - "type": %s, - "store": true - }, - "bar": { "type": %s } - }, - "_source": { %s } - } - }""", fieldTypeFoo, fieldTypeBar, sourceOptions)) - .settings(indexSettings(IndexVersion.current(), 1, 1)) - .primaryTerm(0, 1) - .build(); + + var indexSettingsBuilder = indexSettings(IndexVersion.current(), 1, 1); + if (additionalSettings != null) { + indexSettingsBuilder.put(additionalSettings); + } + IndexMetadata metadata = IndexMetadata.builder("test").putMapping(Strings.format(""" + { + "properties": { + "foo": { + "type": %s, + "store": true + }, + "bar": { "type": %s } + }, + "_source": { %s } + } + }""", fieldTypeFoo, fieldTypeBar, sourceOptions)).settings(indexSettingsBuilder).primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metadata.getIndex(), 0), true, "n1", metadata, EngineTestCase.randomReaderWrapper()); recoverShardFromStore(primary); LongSupplier translogInMemorySegmentCount = ((InternalEngine) primary.getEngine()).translogInMemorySegmentsCount::get; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 5f1907b07a3ab..2647e21d34bc5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -70,7 +70,6 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.rest.RestStatus; @@ -1865,10 +1864,7 @@ public static CreateIndexResponse createIndex(RestClient client, String name, Se if (settings != null && settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) == false) { expectSoftDeletesWarning(request, name); - } else if (isSyntheticSourceConfiguredInMapping(mapping) - && SourceFieldMapper.onOrAfterDeprecateModeVersion(minimumIndexVersion())) { - request.setOptions(expectVersionSpecificWarnings(v -> v.current(SourceFieldMapper.DEPRECATION_WARNING))); - } + } final Response response = client.performRequest(request); try (var parser = responseAsParser(response)) { return TestResponseParsers.parseCreateIndexResponse(parser); @@ -1912,52 +1908,16 @@ protected static void expectSoftDeletesWarning(Request request, String indexName })); } - @SuppressWarnings("unchecked") - protected static boolean isSyntheticSourceConfiguredInMapping(String mapping) { - if (mapping == null) { - return false; - } - var mappings = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - mapping.trim().startsWith("{") ? mapping : '{' + mapping + '}', - false - ); - if (mappings.containsKey("_doc")) { - mappings = (Map) mappings.get("_doc"); - } - Map sourceMapper = (Map) mappings.get(SourceFieldMapper.NAME); - if (sourceMapper == null) { - return false; - } - return sourceMapper.get("mode") != null; - } - - @SuppressWarnings("unchecked") - protected static boolean isSyntheticSourceConfiguredInTemplate(String template) { - if (template == null) { - return false; - } - var values = XContentHelper.convertToMap(JsonXContent.jsonXContent, template, false); - for (Object value : values.values()) { - Map mappings = (Map) ((Map) value).get("mappings"); - if (mappings == null) { - continue; - } - Map sourceMapper = (Map) mappings.get(SourceFieldMapper.NAME); - if (sourceMapper == null) { - continue; - } - Object mode = sourceMapper.get("mode"); - if (mode != null) { - return true; - } - } - return false; + protected static Map getIndexSettings(String index) throws IOException { + return getIndexSettings(index, false); } - protected static Map getIndexSettings(String index) throws IOException { + protected static Map getIndexSettings(String index, boolean includeDefaults) throws IOException { Request request = new Request("GET", "/" + index + "/_settings"); request.addParameter("flat_settings", "true"); + if (includeDefaults) { + request.addParameter("include_defaults", "true"); + } Response response = client().performRequest(request); try (InputStream is = response.getEntity().getContent()) { return XContentHelper.convertToMap( diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 5a212e5b1ec58..79ceec5fdf04d 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.core.Tuple; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; @@ -485,8 +484,6 @@ public void checkWarningHeaders(final List warningHeaders, String testPa } } - unexpected.removeIf(s -> s.endsWith(SourceFieldMapper.DEPRECATION_WARNING + "\"")); - if (unexpected.isEmpty() == false || unmatched.isEmpty() == false || missing.isEmpty() == false diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java index a320cbfa459d1..5e1755adbe637 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java @@ -17,8 +17,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.BlockLoader; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; @@ -1356,7 +1356,7 @@ private CheckedConsumer empNoInObject(String empNo private enum SourceMode { DEFAULT { @Override - void sourceMapping(XContentBuilder builder) {} + void sourceMapping(Settings.Builder builder) {} @Override boolean stored() { @@ -1365,8 +1365,8 @@ boolean stored() { }, STORED { @Override - void sourceMapping(XContentBuilder builder) throws IOException { - builder.startObject(SourceFieldMapper.NAME).field("mode", "stored").endObject(); + void sourceMapping(Settings.Builder builder) throws IOException { + builder.put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "stored"); } @Override @@ -1389,8 +1389,8 @@ boolean stored() { */ SYNTHETIC { @Override - void sourceMapping(XContentBuilder builder) throws IOException { - builder.startObject(SourceFieldMapper.NAME).field("mode", "synthetic").endObject(); + void sourceMapping(Settings.Builder builder) throws IOException { + builder.put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "synthetic"); } @Override @@ -1399,7 +1399,7 @@ boolean stored() { } }; - abstract void sourceMapping(XContentBuilder builder) throws IOException; + abstract void sourceMapping(Settings.Builder builder) throws IOException; abstract boolean stored(); } @@ -1589,8 +1589,10 @@ void createIndex(String name, String fieldName) throws IOException { } logger.info("source_mode: {}", sourceMode); + Settings.Builder settings = Settings.builder(); + sourceMode.sourceMapping(settings); + FieldExtractorTestCase.createIndex(name, index -> { - sourceMode.sourceMapping(index); index.startObject("properties"); { index.startObject(fieldName); @@ -1692,6 +1694,16 @@ private static void createIndex(String name, CheckedConsumer mapping) + throws IOException { + XContentBuilder index = JsonXContent.contentBuilder().prettyPrint().startObject(); + mapping.accept(index); + index.endObject(); + String configStr = Strings.toString(index); + logger.info("index: {} {}", name, configStr); + ESRestTestCase.createIndex(name, setting, configStr); + } + /** * Yaml adds newlines and some indentation which we don't want to match. */ diff --git a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java index 4a9d13bc642d7..78d59c0af0d06 100644 --- a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java +++ b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java @@ -61,14 +61,7 @@ public void testFeatureUsageWithLogsdbIndex() throws IOException { var settings = Settings.builder().put("index.mode", "time_series").put("index.routing_path", "field1").build(); createIndex("test-index", settings, mapping); } else { - String mapping = """ - { - "_source": { - "mode": "synthetic" - } - } - """; - createIndex("test-index", Settings.EMPTY, mapping); + createIndex("test-index", Settings.builder().put("index.mapping.source.mode", "synthetic").build()); } var response = getAsMap("/_license/feature_usage"); @SuppressWarnings("unchecked") @@ -85,21 +78,6 @@ public void testLogsdbIndexGetsStoredSource() throws IOException { assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); } - public void testLogsdbOverrideSyntheticSourceModeInMapping() throws IOException { - final String index = "test-index"; - String mapping = """ - { - "_source": { - "mode": "synthetic" - } - } - """; - createIndex(index, Settings.builder().put("index.mode", "logsdb").build(), mapping); - var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); - assertEquals("logsdb", settings.get("index.mode")); - assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); - } - public void testLogsdbOverrideSyntheticSourceSetting() throws IOException { final String index = "test-index"; createIndex( diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java index b5a3ff482c3cf..b4abdfd09ffc9 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.junit.Before; @@ -24,6 +23,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; @SuppressWarnings("unchecked") public class LogsIndexModeCustomSettingsIT extends LogsIndexModeRestTestIT { @@ -102,12 +102,12 @@ public void testConfigureStoredSourceBeforeIndexCreation() throws IOException { "template": { "settings": { "index": { - "mode": "logsdb" - } - }, - "mappings": { - "_source": { - "mode": "stored" + "mode": "logsdb", + "mapping": { + "source": { + "mode": "stored" + } + } } } } @@ -115,21 +115,10 @@ public void testConfigureStoredSourceBeforeIndexCreation() throws IOException { assertOK(putComponentTemplate(client, "logs@custom", storedSourceMapping)); Request request = new Request("PUT", "_data_stream/logs-custom-dev"); - if (SourceFieldMapper.onOrAfterDeprecateModeVersion(minimumIndexVersion())) { - request.setOptions(expectVersionSpecificWarnings(v -> v.current(SourceFieldMapper.DEPRECATION_WARNING))); - } assertOK(client.performRequest(request)); - var mapping = getMapping(client, getDataStreamBackingIndex(client, "logs-custom-dev", 0)); - String sourceMode = (String) subObject("_source").apply(mapping).get("mode"); - assertThat(sourceMode, equalTo("stored")); - - request = new Request("GET", "/_migration/deprecations"); - var nodeSettings = (Map) ((List) entityAsMap(client.performRequest(request)).get("node_settings")).getFirst(); - assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); - assertThat( - (String) nodeSettings.get("details"), - containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [logs@custom]") - ); + var indexName = getDataStreamBackingIndex(client, "logs-custom-dev", 0); + var settings = (Map) ((Map) ((Map) getIndexSettings(indexName)).get(indexName)).get("settings"); + assertThat(settings, hasEntry("index.mapping.source.mode", "stored")); } public void testConfigureDisabledSourceBeforeIndexCreation() { @@ -163,12 +152,12 @@ public void testConfigureDisabledSourceModeBeforeIndexCreation() { "template": { "settings": { "index": { - "mode": "logsdb" - } - }, - "mappings": { - "_source": { - "mode": "disabled" + "mode": "logsdb", + "mapping": { + "source": { + "mode": "disabled" + } + } } } } @@ -186,9 +175,13 @@ public void testConfigureStoredSourceWhenIndexIsCreated() throws IOException { var storedSourceMapping = """ { "template": { - "mappings": { - "_source": { - "mode": "stored" + "settings": { + "index": { + "mapping": { + "source": { + "mode": "stored" + } + } } } } @@ -196,22 +189,11 @@ public void testConfigureStoredSourceWhenIndexIsCreated() throws IOException { assertOK(putComponentTemplate(client, "logs@custom", storedSourceMapping)); Request request = new Request("PUT", "_data_stream/logs-custom-dev"); - if (SourceFieldMapper.onOrAfterDeprecateModeVersion(minimumIndexVersion())) { - request.setOptions(expectVersionSpecificWarnings(v -> v.current(SourceFieldMapper.DEPRECATION_WARNING))); - } assertOK(client.performRequest(request)); - var mapping = getMapping(client, getDataStreamBackingIndex(client, "logs-custom-dev", 0)); - String sourceMode = (String) subObject("_source").apply(mapping).get("mode"); - assertThat(sourceMode, equalTo("stored")); - - request = new Request("GET", "/_migration/deprecations"); - var nodeSettings = (Map) ((List) entityAsMap(client.performRequest(request)).get("node_settings")).getFirst(); - assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); - assertThat( - (String) nodeSettings.get("details"), - containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [logs@custom]") - ); + var indexName = getDataStreamBackingIndex(client, "logs-custom-dev", 0); + var settings = (Map) ((Map) ((Map) getIndexSettings(indexName)).get(indexName)).get("settings"); + assertThat(settings, hasEntry("index.mapping.source.mode", "stored")); } public void testConfigureDisabledSourceWhenIndexIsCreated() throws IOException { @@ -235,9 +217,13 @@ public void testConfigureDisabledSourceModeWhenIndexIsCreated() throws IOExcepti var disabledModeMapping = """ { "template": { - "mappings": { - "_source": { - "mode": "disabled" + "settings": { + "index": { + "mapping": { + "source": { + "mode": "disabled" + } + } } } } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java index 0990592cef5e3..cc7f5bdb33871 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.test.rest.ESRestTestCase; import java.io.IOException; @@ -36,11 +35,6 @@ protected static Response putComponentTemplate(final RestClient client, final St throws IOException { final Request request = new Request("PUT", "/_component_template/" + componentTemplate); request.setJsonEntity(contends); - if (isSyntheticSourceConfiguredInTemplate(contends) && SourceFieldMapper.onOrAfterDeprecateModeVersion(minimumIndexVersion())) { - request.setOptions( - expectVersionSpecificWarnings((VersionSensitiveWarningsHandler v) -> v.current(SourceFieldMapper.DEPRECATION_WARNING)) - ); - } return client.performRequest(request); } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java index 177858b84ad43..675b1baad7c2c 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java @@ -59,14 +59,7 @@ public void testFeatureUsageWithLogsdbIndex() throws IOException { var settings = Settings.builder().put("index.mode", "time_series").put("index.routing_path", "field1").build(); createIndex("test-index", settings, mapping); } else { - String mapping = """ - { - "_source": { - "mode": "synthetic" - } - } - """; - createIndex("test-index", Settings.EMPTY, mapping); + createIndex("test-index", Settings.builder().put("index.mapping.source.mode", "synthetic").build()); } var response = getAsMap("/_license/feature_usage"); @SuppressWarnings("unchecked") @@ -77,8 +70,15 @@ public void testFeatureUsageWithLogsdbIndex() throws IOException { assertThat(feature.get("name"), equalTo("synthetic-source")); assertThat(feature.get("license_level"), equalTo("enterprise")); - var settings = (Map) ((Map) getIndexSettings("test-index").get("test-index")).get("settings"); - assertNull(settings.get("index.mapping.source.mode")); // Default, no downgrading. + var indexResponse = (Map) getIndexSettings("test-index", true).get("test-index"); + logger.info("indexResponse: {}", indexResponse); + var sourceMode = ((Map) indexResponse.get("settings")).get("index.mapping.source.mode"); + if (sourceMode != null) { + assertThat(sourceMode, equalTo("synthetic")); + } else { + var defaultSourceMode = ((Map) indexResponse.get("defaults")).get("index.mapping.source.mode"); + assertThat(defaultSourceMode, equalTo("SYNTHETIC")); + } } } diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java index 77319a881f1e5..5220b5eba4567 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java @@ -436,7 +436,7 @@ public void testNewIndexHasSyntheticSourceUsage() throws IOException { """; boolean result = provider.getMappingHints(indexName, null, settings, List.of(new CompressedXContent(mapping))) .hasSyntheticSourceUsage(); - assertTrue(result); + assertFalse("_source.mode is a noop", result); assertThat(newMapperServiceCounter.get(), equalTo(1)); assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml index 792df4dbf639e..08d724fa2cb71 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml @@ -7,15 +7,14 @@ create an index with disabled source mode and standard index mode without settin settings: index: mode: standard - mappings: - _source: - mode: disabled + mapping: + source: + mode: disabled - do: - indices.get_mapping: + indices.get_settings: index: test_disabled_standard - - - match: { test_disabled_standard.mappings._source.mode: disabled } + - match: { test_disabled_standard.settings.index.mapping.source.mode: disabled } --- create an index with stored source mode and standard index mode without setting: @@ -26,15 +25,15 @@ create an index with stored source mode and standard index mode without setting: settings: index: mode: standard - mappings: - _source: - mode: stored + mapping: + source: + mode: stored - do: - indices.get_mapping: + indices.get_settings: index: test_stored_standard - - match: { test_stored_standard.mappings._source.mode: stored } + - match: { test_stored_standard.settings.index.mapping.source.mode: stored } --- create an index with synthetic source mode and standard index mode without setting: @@ -45,15 +44,15 @@ create an index with synthetic source mode and standard index mode without setti settings: index: mode: standard - mappings: - _source: - mode: synthetic + mapping: + source: + mode: "synthetic" - do: - indices.get_mapping: + indices.get_settings: index: test_synthetic_standard - - match: { test_synthetic_standard.mappings._source.mode: synthetic } + - match: { test_synthetic_standard.settings.index.mapping.source.mode: synthetic } --- create an index with disabled source mode and logsdb index mode without setting: @@ -65,9 +64,9 @@ create an index with disabled source mode and logsdb index mode without setting: settings: index: mode: logsdb - mappings: - _source: - mode: disabled + mapping: + source: + mode: disabled - match: { error.type: "mapper_parsing_exception" } - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode" } @@ -81,20 +80,15 @@ create an index with stored source mode and logsdb index mode without setting: settings: index: mode: logsdb - mappings: - _source: - mode: stored + mapping: + source: + mode: stored - do: indices.get_settings: index: "test_stored_logsdb" - match: { test_stored_logsdb.settings.index.mode: logsdb } - - - do: - indices.get_mapping: - index: test_stored_logsdb - - - match: { test_stored_logsdb.mappings._source.mode: stored } + - match: { test_stored_logsdb.settings.index.mapping.source.mode: stored } --- create an index with synthetic source mode and logsdb index mode without setting: @@ -105,15 +99,15 @@ create an index with synthetic source mode and logsdb index mode without setting settings: index: mode: logsdb - mappings: - _source: - mode: synthetic + mapping: + source: + mode: synthetic - do: - indices.get_mapping: + indices.get_settings: index: test_synthetic_logsdb - - match: { test_synthetic_logsdb.mappings._source.mode: synthetic } + - match: { test_synthetic_logsdb.settings.index.mapping.source.mode: synthetic } --- create an index with disabled source mode and time series index mode without setting: @@ -125,13 +119,14 @@ create an index with disabled source mode and time series index mode without set settings: index: mode: time_series + mapping: + source: + mode: disabled routing_path: [ keyword ] time_series: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z mappings: - _source: - mode: disabled properties: keyword: type: keyword @@ -149,13 +144,14 @@ create an index with stored source mode and time series index mode without setti settings: index: mode: time_series + mapping: + source: + mode: stored routing_path: [ keyword ] time_series: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z mappings: - _source: - mode: stored properties: keyword: type: keyword @@ -167,11 +163,10 @@ create an index with stored source mode and time series index mode without setti - match: { test_stored_time_series.settings.index.mode: time_series } - do: - indices.get_mapping: + indices.get_settings: index: test_stored_time_series - - match: { test_stored_time_series.mappings._source.mode: stored } - + - match: { test_stored_time_series.settings.index.mapping.source.mode: stored } --- create an index with synthetic source mode and time series index mode without setting: @@ -182,13 +177,14 @@ create an index with synthetic source mode and time series index mode without se settings: index: mode: time_series + mapping: + source: + mode: synthetic routing_path: [ keyword ] time_series: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z mappings: - _source: - mode: synthetic properties: keyword: type: keyword @@ -198,174 +194,56 @@ create an index with synthetic source mode and time series index mode without se indices.get_settings: index: "test_synthetic_time_series" - match: { test_synthetic_time_series.settings.index.mode: time_series } - - - do: - indices.get_mapping: - index: test_synthetic_time_series - - - match: { test_synthetic_time_series.mappings._source.mode: synthetic } + - match: { test_synthetic_time_series.settings.index.mapping.source.mode: synthetic } --- create an index with stored source mode: - do: indices.create: index: test_stored_default - body: - mappings: - _source: - mode: stored - - - do: - indices.get_mapping: - index: test_stored_default - - - match: { test_stored_default.mappings._source.mode: stored } - ---- -override stored to synthetic source mode: - - do: - indices.create: - index: test_stored_override body: settings: index: - mapping.source.mode: synthetic - mappings: - _source: - mode: stored + mapping: + source: + mode: stored - do: - indices.get_mapping: - index: test_stored_override - - - match: { test_stored_override.mappings._source.mode: synthetic } - ---- -override stored to disabled source mode: - - do: - indices.create: - index: test_stored_disabled - body: - settings: - index: - mapping.source.mode: disabled - mappings: - _source: - mode: stored - - - do: - indices.get_mapping: - index: test_stored_disabled + indices.get_settings: + index: test_stored_default - - match: { test_stored_disabled.mappings._source.mode: disabled } + - match: { test_stored_default.settings.index.mapping.source.mode: stored } --- create an index with disabled source mode: - do: indices.create: index: test_disabled_default - body: - mappings: - _source: - mode: disabled - - - do: - indices.get_mapping: - index: test_disabled_default - - - match: { test_disabled_default.mappings._source.mode: disabled } - ---- -override disabled to synthetic source mode: - - do: - indices.create: - index: test_disabled_synthetic body: settings: index: - mapping.source.mode: synthetic - mappings: - _source: - mode: disabled - - - do: - indices.get_mapping: - index: test_disabled_synthetic - - - match: { test_disabled_synthetic.mappings._source.mode: synthetic } - ---- -override disabled to stored source mode: - - do: - indices.create: - index: test_disabled_stored - body: - settings: - index: - mapping.source.mode: stored - mappings: - _source: - mode: disabled + mapping.source.mode: disabled - do: - indices.get_mapping: - index: test_disabled_stored + indices.get_settings: + index: test_disabled_default - - match: { test_disabled_stored.mappings._source.mode: stored } + - match: { test_disabled_default.settings.index.mapping.source.mode: disabled } --- create an index with synthetic source mode: - do: indices.create: index: test_synthetic_default - body: - mappings: - _source: - mode: synthetic - - - do: - indices.get_mapping: - index: test_synthetic_default - - - match: { test_synthetic_default.mappings._source.mode: synthetic } - ---- -override synthetic to stored source mode: - - do: - indices.create: - index: test_synthetic_stored - body: - settings: - index: - mapping.source.mode: stored - mappings: - _source: - mode: synthetic - - - do: - indices.get_mapping: - index: test_synthetic_stored - - - match: { test_synthetic_stored.mappings._source.mode: stored } - ---- -override synthetic to disabled source mode: - - do: - indices.create: - index: test_synthetic_disabled body: settings: index: - mapping.source.mode: disabled - mappings: - _source: - mode: synthetic + mapping.source.mode: synthetic - do: - indices.get_mapping: - index: test_synthetic_disabled - - - match: { test_synthetic_disabled.mappings._source.mode: disabled } + indices.get_settings: + index: test_synthetic_default + - match: { test_synthetic_default.settings.index.mapping.source.mode: synthetic } --- create an index with unspecified source mode: @@ -374,10 +252,10 @@ create an index with unspecified source mode: index: test_unset_default - do: - indices.get_mapping: + indices.get_settings: index: test_unset_default - - match: { test_unset_default.mappings._source.mode: null } + - match: { test_unset_default.settings.index.mapping.source.mode: null } --- override unspecified to stored source mode: @@ -436,15 +314,14 @@ create an index with standard index mode: settings: index: mode: standard - mappings: - _source: - mode: stored - + mapping: + source: + mode: stored - do: - indices.get_mapping: + indices.get_settings: index: test_standard_index_mode - - match: { test_standard_index_mode.mappings._source.mode: stored } + - match: { test_standard_index_mode.settings.index.mapping.source.mode: stored } --- create an index with time_series index mode and synthetic source: @@ -590,237 +467,25 @@ modify final setting after index creation: mapping.source.mode: synthetic --- -modify source mapping from stored to disabled after index creation: - - do: - indices.create: - index: test_modify_source_mode_stored_disabled - body: - settings: - index: - mapping.source.mode: stored +use no-op _source.mode attr: + - requires: + test_runner_features: [ "warnings" ] - do: - indices.put_mapping: - index: test_modify_source_mode_stored_disabled - body: - _source: - mode: disabled - - is_true: acknowledged - - - do: - indices.get_mapping: - index: test_modify_source_mode_stored_disabled - - match: { test_modify_source_mode_stored_disabled.mappings._source.mode: stored } - ---- -modify source mapping from stored to synthetic after index creation: - - do: + warnings: + - "Configuring source mode in mappings is deprecated and will be removed in future versions. Use [index.mapping.source.mode] index setting instead." indices.create: - index: test_modify_source_mode_stored_synthetic - body: - settings: - index: - mapping.source.mode: stored - - - do: - indices.put_mapping: - index: test_modify_source_mode_stored_synthetic - body: - _source: - mode: synthetic - - is_true: acknowledged - - - do: - indices.get_mapping: - index: test_modify_source_mode_stored_synthetic - - match: { test_modify_source_mode_stored_synthetic.mappings._source.mode: stored } - ---- -modify source mapping from disabled to stored after index creation: - - do: - indices.create: - index: test_modify_source_mode_disabled_stored - body: - settings: - index: - mapping.source.mode: disabled - - - do: - indices.put_mapping: - index: test_modify_source_mode_disabled_stored - body: - _source: - mode: stored - - is_true: acknowledged - - - do: - indices.get_mapping: - index: test_modify_source_mode_disabled_stored - - match: { test_modify_source_mode_disabled_stored.mappings._source.mode: disabled } - ---- -modify source mapping from disabled to synthetic after index creation: - - do: - indices.create: - index: test_modify_source_mode_disabled_synthetic - body: - settings: - index: - mapping.source.mode: disabled - - - do: - indices.put_mapping: - index: test_modify_source_mode_disabled_synthetic - body: - _source: - mode: synthetic - - is_true: acknowledged - - - do: - indices.get_mapping: - index: test_modify_source_mode_disabled_synthetic - - match: { test_modify_source_mode_disabled_synthetic.mappings._source.mode: disabled } - ---- -modify source mapping from synthetic to stored after index creation: - - do: - indices.create: - index: test_modify_source_mode_synthetic_stored - body: - settings: - index: - mapping.source.mode: synthetic - - - do: - indices.put_mapping: - index: test_modify_source_mode_synthetic_stored - body: - _source: - mode: stored - - is_true: acknowledged - - - do: - indices.get_mapping: - index: test_modify_source_mode_synthetic_stored - - match: { test_modify_source_mode_synthetic_stored.mappings._source.mode: synthetic } - ---- -modify source mapping from synthetic to disabled after index creation: - - do: - indices.create: - index: test_modify_source_mode_synthetic_disabled + index: test body: settings: index: mapping.source.mode: synthetic - - - do: - indices.put_mapping: - index: test_modify_source_mode_synthetic_disabled - body: - _source: - mode: disabled - - is_true: acknowledged - - - do: - indices.get_mapping: - index: test_modify_source_mode_synthetic_disabled - - match: { test_modify_source_mode_synthetic_disabled.mappings._source.mode: synthetic } - ---- -modify logsdb index source mode to disabled after index creation: - - do: - indices.create: - index: test_modify_logsdb_disabled_after_creation - body: - settings: - index: - mode: logsdb - - - do: - catch: bad_request - indices.put_mapping: - index: test_modify_logsdb_disabled_after_creation - body: - _source: - mode: disabled - - match: { error.type: "mapper_parsing_exception" } - - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode" } - ---- -modify logsdb index source mode to stored after index creation: - - do: - indices.create: - index: test_modify_logsdb_stored_after_creation - body: - settings: - index: - mode: logsdb - - - do: - catch: bad_request - indices.put_mapping: - index: test_modify_logsdb_stored_after_creation - body: - _source: - mode: stored - - match: { error.type: "illegal_argument_exception" } - - match: { error.reason: "Mapper for [_source] conflicts with existing mapper:\n\tCannot update parameter [mode] from [synthetic] to [stored]" } - ---- -modify time_series index source mode to disabled after index creation: - - do: - indices.create: - index: test_modify_time_series_disabled_after_creation - body: - settings: - index: - mode: time_series - routing_path: [ keyword ] - time_series: - start_time: 2021-04-28T00:00:00Z - end_time: 2021-04-29T00:00:00Z mappings: - properties: - keyword: - type: keyword - time_series_dimension: true - - - do: - catch: bad_request - indices.put_mapping: - index: test_modify_time_series_disabled_after_creation - body: - _source: - mode: disabled - - match: { error.type: "mapper_parsing_exception" } - - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [time_series] index mode" } + _source: + mode: synthetic ---- -modify time_series index source mode to stored after index creation: - do: - indices.create: - index: test_modify_time_series_stored_after_creation - body: - settings: - index: - mode: time_series - routing_path: [ keyword ] - time_series: - start_time: 2021-04-28T00:00:00Z - end_time: 2021-04-29T00:00:00Z - mappings: - properties: - keyword: - type: keyword - time_series_dimension: true + indices.get_settings: + index: test - - do: - catch: bad_request - indices.put_mapping: - index: test_modify_time_series_stored_after_creation - body: - _source: - mode: stored - - match: { error.type: "illegal_argument_exception" } - - match: { error.reason: "Mapper for [_source] conflicts with existing mapper:\n\tCannot update parameter [mode] from [synthetic] to [stored]" } + - match: { test.settings.index.mapping.source.mode: synthetic } From a82453afe2bf1240e4509a7252cef1d4cb782fcd Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Fri, 24 Jan 2025 09:47:20 +0100 Subject: [PATCH 002/383] Add missing capability (#120771) --- .../resources/rest-api-spec/test/esql/191_lookup_join_text.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml index 1b532ab80eeb6..7d1b3a90c6081 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml @@ -6,7 +6,7 @@ setup: - method: POST path: /_query parameters: [] - capabilities: [lookup_join_text] + capabilities: [lookup_join_text, join_lookup_v11] reason: "uses LOOKUP JOIN" - do: indices.create: @@ -31,7 +31,6 @@ setup: settings: index: mode: lookup - number_of_shards: 1 mappings: properties: color: From efc18fec01a5c2f12c410490b53db3103386757a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 24 Jan 2025 20:05:47 +1100 Subject: [PATCH 003/383] Mute org.elasticsearch.action.search.SearchProgressActionListenerIT testSearchProgressWithHits #120671 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 93f798c33eba3..596001b5aac1a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -243,6 +243,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120767 - class: org.elasticsearch.search.fieldcaps.FieldCapabilitiesIT issue: https://github.com/elastic/elasticsearch/issues/120772 +- class: org.elasticsearch.action.search.SearchProgressActionListenerIT + method: testSearchProgressWithHits + issue: https://github.com/elastic/elasticsearch/issues/120671 # Examples: # From 8b9ceae2925621359af9c80aa56d5cf75ab147bc Mon Sep 17 00:00:00 2001 From: Max Jakob Date: Fri, 24 Jan 2025 10:25:44 +0100 Subject: [PATCH 004/383] [Inference API] EIS: adapt paths (#120734) The final EIS task types definitions changes the names of the routes. --- .../elastic/ElasticInferenceServiceSparseEmbeddingsModel.java | 2 +- .../completion/ElasticInferenceServiceCompletionModel.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java index 112be95dac1fd..4c1cac4d7a77b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java @@ -115,7 +115,7 @@ private URI createUri() throws ElasticsearchStatusException { try { // TODO, consider transforming the base URL into a URI for better error handling. return new URI( - elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/sparse-text-embeddings/" + modelIdUriPath + elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/embed/text/sparse/" + modelIdUriPath ); } catch (URISyntaxException e) { throw new ElasticsearchStatusException( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionModel.java index 84039cd7cc33c..b26f80efb1930 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionModel.java @@ -106,7 +106,7 @@ public URI uri() { private URI createUri() throws ElasticsearchStatusException { try { // TODO, consider transforming the base URL into a URI for better error handling. - return new URI(elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/chat/completions"); + return new URI(elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/chat"); } catch (URISyntaxException e) { throw new ElasticsearchStatusException( "Failed to create URI for service [" From be7635e33fbb3cc0e067c7cd5af0dc4cd4f49ffa Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 24 Jan 2025 09:25:51 +0000 Subject: [PATCH 005/383] Add version string to v9 transport handshake (#120744) --- .../elasticsearch/transport/TcpTransport.java | 3 +- .../transport/TransportHandshaker.java | 106 +++++++++++--- .../transport/TransportHandshakerTests.java | 135 +++++++++++++++++- 3 files changed, 216 insertions(+), 28 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index faafdf7d71e33..5eb51d3cadcc6 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; @@ -175,7 +176,7 @@ public TcpTransport( channel, requestId, TransportHandshaker.HANDSHAKE_ACTION_NAME, - new TransportHandshaker.HandshakeRequest(version), + new TransportHandshaker.HandshakeRequest(version, Build.current().version()), TransportRequestOptions.EMPTY, v, null, diff --git a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java index 4eb16f327a5e7..a5973e4001444 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java @@ -9,6 +9,7 @@ package org.elasticsearch.transport; +import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; @@ -19,10 +20,12 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.threadpool.ThreadPool; import java.io.EOFException; import java.io.IOException; +import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -206,7 +209,7 @@ void handleHandshake(TransportChannel channel, long requestId, StreamInput strea assert ignoreDeserializationErrors : exception; throw exception; } - channel.sendResponse(new HandshakeResponse(this.version)); + channel.sendResponse(new HandshakeResponse(this.version, Build.current().version())); } TransportResponseHandler removeHandlerForHandshake(long requestId) { @@ -245,7 +248,7 @@ public Executor executor() { @Override public void handleResponse(HandshakeResponse response) { if (isDone.compareAndSet(false, true)) { - TransportVersion responseVersion = response.responseVersion; + TransportVersion responseVersion = response.transportVersion; if (TransportVersion.isCompatible(responseVersion) == false) { listener.onFailure( new IllegalStateException( @@ -257,7 +260,7 @@ public void handleResponse(HandshakeResponse response) { ) ); } else { - listener.onResponse(TransportVersion.min(TransportHandshaker.this.version, response.getResponseVersion())); + listener.onResponse(TransportVersion.min(TransportHandshaker.this.version, response.getTransportVersion())); } } } @@ -278,12 +281,23 @@ void handleLocalException(TransportException e) { static final class HandshakeRequest extends TransportRequest { - private final TransportVersion version; + /** + * The {@link TransportVersion#current()} of the requesting node. + */ + final TransportVersion transportVersion; - HandshakeRequest(TransportVersion version) { - this.version = version; + /** + * The {@link Build#version()} of the requesting node, as a {@link String}, for better reporting of handshake failures due to + * an incompatible version. + */ + final String releaseVersion; + + HandshakeRequest(TransportVersion transportVersion, String releaseVersion) { + this.transportVersion = Objects.requireNonNull(transportVersion); + this.releaseVersion = Objects.requireNonNull(releaseVersion); } + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // remainingMessage == null is invalid in v9 HandshakeRequest(StreamInput streamInput) throws IOException { super(streamInput); BytesReference remainingMessage; @@ -293,10 +307,16 @@ static final class HandshakeRequest extends TransportRequest { remainingMessage = null; } if (remainingMessage == null) { - version = null; + transportVersion = null; + releaseVersion = null; } else { try (StreamInput messageStreamInput = remainingMessage.streamInput()) { - this.version = TransportVersion.readVersion(messageStreamInput); + this.transportVersion = TransportVersion.readVersion(messageStreamInput); + if (streamInput.getTransportVersion().onOrAfter(V9_HANDSHAKE_VERSION)) { + this.releaseVersion = messageStreamInput.readString(); + } else { + this.releaseVersion = this.transportVersion.toReleaseVersion(); + } } } } @@ -304,42 +324,84 @@ static final class HandshakeRequest extends TransportRequest { @Override public void writeTo(StreamOutput streamOutput) throws IOException { super.writeTo(streamOutput); - assert version != null; - try (BytesStreamOutput messageStreamOutput = new BytesStreamOutput(4)) { - TransportVersion.writeVersion(version, messageStreamOutput); + assert transportVersion != null; + try (BytesStreamOutput messageStreamOutput = new BytesStreamOutput(1024)) { + TransportVersion.writeVersion(transportVersion, messageStreamOutput); + if (streamOutput.getTransportVersion().onOrAfter(V9_HANDSHAKE_VERSION)) { + messageStreamOutput.writeString(releaseVersion); + } // else we just send the transport version and rely on a best-effort mapping to release versions BytesReference reference = messageStreamOutput.bytes(); streamOutput.writeBytesReference(reference); } } } + /** + * A response to a low-level transport handshake, carrying information about the version of the responding node. + */ static final class HandshakeResponse extends TransportResponse { - private final TransportVersion responseVersion; + /** + * The {@link TransportVersion#current()} of the responding node. + */ + private final TransportVersion transportVersion; - HandshakeResponse(TransportVersion responseVersion) { - this.responseVersion = responseVersion; + /** + * The {@link Build#version()} of the responding node, as a {@link String}, for better reporting of handshake failures due to + * an incompatible version. + */ + private final String releaseVersion; + + HandshakeResponse(TransportVersion transportVersion, String releaseVersion) { + this.transportVersion = Objects.requireNonNull(transportVersion); + this.releaseVersion = Objects.requireNonNull(releaseVersion); } - private HandshakeResponse(StreamInput in) throws IOException { + HandshakeResponse(StreamInput in) throws IOException { super(in); - responseVersion = TransportVersion.readVersion(in); + transportVersion = TransportVersion.readVersion(in); + if (in.getTransportVersion().onOrAfter(V9_HANDSHAKE_VERSION)) { + releaseVersion = in.readString(); + } else { + releaseVersion = transportVersion.toReleaseVersion(); + } } @Override public void writeTo(StreamOutput out) throws IOException { - assert responseVersion != null; - TransportVersion.writeVersion(responseVersion, out); + TransportVersion.writeVersion(transportVersion, out); + if (out.getTransportVersion().onOrAfter(V9_HANDSHAKE_VERSION)) { + out.writeString(releaseVersion); + } // else we just send the transport version and rely on a best-effort mapping to release versions + } + + /** + * @return the {@link TransportVersion#current()} of the responding node. + */ + TransportVersion getTransportVersion() { + return transportVersion; } - TransportVersion getResponseVersion() { - return responseVersion; + /** + * @return the {@link Build#version()} of the responding node, as a {@link String}, for better reporting of handshake failures due + * to an incompatible version. + */ + String getReleaseVersion() { + return releaseVersion; } } @FunctionalInterface interface HandshakeRequestSender { - - void sendRequest(DiscoveryNode node, TcpChannel channel, long requestId, TransportVersion version) throws IOException; + /** + * @param node The (expected) remote node, for error reporting and passing to + * {@link TransportMessageListener#onRequestSent}. + * @param channel The TCP channel to use to send the handshake request. + * @param requestId The transport request ID, for matching up the response. + * @param handshakeTransportVersion The {@link TransportVersion} to use for the handshake request, which will be + * {@link TransportHandshaker#V8_HANDSHAKE_VERSION} in production. + */ + void sendRequest(DiscoveryNode node, TcpChannel channel, long requestId, TransportVersion handshakeTransportVersion) + throws IOException; } } diff --git a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java index af430b2d18c51..d260d66157651 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -38,6 +39,7 @@ public class TransportHandshakerTests extends ESTestCase { private TestThreadPool threadPool; private TransportHandshaker.HandshakeRequestSender requestSender; + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) private static final TransportVersion HANDSHAKE_REQUEST_VERSION = TransportHandshaker.V8_HANDSHAKE_VERSION; @Override @@ -71,10 +73,15 @@ public void testHandshakeRequestAndResponse() throws IOException { assertFalse(versionFuture.isDone()); - TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest(TransportVersion.current()); + TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest( + TransportVersion.current(), + randomIdentifier() + ); BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(); + bytesStreamOutput.setTransportVersion(HANDSHAKE_REQUEST_VERSION); handshakeRequest.writeTo(bytesStreamOutput); StreamInput input = bytesStreamOutput.bytes().streamInput(); + input.setTransportVersion(HANDSHAKE_REQUEST_VERSION); final PlainActionFuture responseFuture = new PlainActionFuture<>(); final TestTransportChannel channel = new TestTransportChannel(responseFuture); handshaker.handleHandshake(channel, reqId, input); @@ -95,7 +102,7 @@ public void testHandshakeResponseFromOlderNode() throws Exception { assertFalse(versionFuture.isDone()); final var remoteVersion = TransportVersionUtils.randomCompatibleVersion(random()); - handler.handleResponse(new TransportHandshaker.HandshakeResponse(remoteVersion)); + handler.handleResponse(new TransportHandshaker.HandshakeResponse(remoteVersion, randomIdentifier())); assertTrue(versionFuture.isDone()); assertEquals(remoteVersion, versionFuture.result()); @@ -110,7 +117,10 @@ public void testHandshakeResponseFromNewerNode() throws Exception { assertFalse(versionFuture.isDone()); handler.handleResponse( - new TransportHandshaker.HandshakeResponse(TransportVersion.fromId(TransportVersion.current().id() + between(0, 10))) + new TransportHandshaker.HandshakeResponse( + TransportVersion.fromId(TransportVersion.current().id() + between(0, 10)), + randomIdentifier() + ) ); assertTrue(versionFuture.isDone()); @@ -123,8 +133,12 @@ public void testHandshakeRequestFutureVersionsCompatibility() throws IOException verify(requestSender).sendRequest(node, channel, reqId, HANDSHAKE_REQUEST_VERSION); - TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest(TransportVersion.current()); + TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest( + TransportVersion.current(), + randomIdentifier() + ); BytesStreamOutput currentHandshakeBytes = new BytesStreamOutput(); + currentHandshakeBytes.setTransportVersion(HANDSHAKE_REQUEST_VERSION); handshakeRequest.writeTo(currentHandshakeBytes); BytesStreamOutput lengthCheckingHandshake = new BytesStreamOutput(); @@ -149,7 +163,118 @@ public void testHandshakeRequestFutureVersionsCompatibility() throws IOException TransportHandshaker.HandshakeResponse response = (TransportHandshaker.HandshakeResponse) responseFuture.actionGet(); - assertEquals(TransportVersion.current(), response.getResponseVersion()); + assertEquals(TransportVersion.current(), response.getTransportVersion()); + } + + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // v7 handshakes are not supported in v9 + public void testReadV7HandshakeRequest() throws IOException { + final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); + + final var requestPayloadStreamOutput = new BytesStreamOutput(); + requestPayloadStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); + requestPayloadStreamOutput.writeVInt(transportVersion.id()); + + final var requestBytesStreamOutput = new BytesStreamOutput(); + requestBytesStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); + TaskId.EMPTY_TASK_ID.writeTo(requestBytesStreamOutput); + requestBytesStreamOutput.writeBytesReference(requestPayloadStreamOutput.bytes()); + + final var requestBytesStream = requestBytesStreamOutput.bytes().streamInput(); + requestBytesStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); + final var handshakeRequest = new TransportHandshaker.HandshakeRequest(requestBytesStream); + + assertEquals(transportVersion, handshakeRequest.transportVersion); + assertEquals(transportVersion.toReleaseVersion(), handshakeRequest.releaseVersion); + } + + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // v7 handshakes are not supported in v9 + public void testReadV7HandshakeResponse() throws IOException { + final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); + + final var responseBytesStreamOutput = new BytesStreamOutput(); + responseBytesStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); + responseBytesStreamOutput.writeVInt(transportVersion.id()); + + final var responseBytesStream = responseBytesStreamOutput.bytes().streamInput(); + responseBytesStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); + final var handshakeResponse = new TransportHandshaker.HandshakeResponse(responseBytesStream); + + assertEquals(transportVersion, handshakeResponse.getTransportVersion()); + assertEquals(transportVersion.toReleaseVersion(), handshakeResponse.getReleaseVersion()); + } + + public void testReadV8HandshakeRequest() throws IOException { + final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); + + final var requestPayloadStreamOutput = new BytesStreamOutput(); + requestPayloadStreamOutput.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + requestPayloadStreamOutput.writeVInt(transportVersion.id()); + + final var requestBytesStreamOutput = new BytesStreamOutput(); + requestBytesStreamOutput.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + TaskId.EMPTY_TASK_ID.writeTo(requestBytesStreamOutput); + requestBytesStreamOutput.writeBytesReference(requestPayloadStreamOutput.bytes()); + + final var requestBytesStream = requestBytesStreamOutput.bytes().streamInput(); + requestBytesStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + final var handshakeRequest = new TransportHandshaker.HandshakeRequest(requestBytesStream); + + assertEquals(transportVersion, handshakeRequest.transportVersion); + assertEquals(transportVersion.toReleaseVersion(), handshakeRequest.releaseVersion); + } + + public void testReadV8HandshakeResponse() throws IOException { + final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); + + final var responseBytesStreamOutput = new BytesStreamOutput(); + responseBytesStreamOutput.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + responseBytesStreamOutput.writeVInt(transportVersion.id()); + + final var responseBytesStream = responseBytesStreamOutput.bytes().streamInput(); + responseBytesStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + final var handshakeResponse = new TransportHandshaker.HandshakeResponse(responseBytesStream); + + assertEquals(transportVersion, handshakeResponse.getTransportVersion()); + assertEquals(transportVersion.toReleaseVersion(), handshakeResponse.getReleaseVersion()); + } + + public void testReadV9HandshakeRequest() throws IOException { + final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); + final var releaseVersion = randomIdentifier(); + + final var requestPayloadStreamOutput = new BytesStreamOutput(); + requestPayloadStreamOutput.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); + requestPayloadStreamOutput.writeVInt(transportVersion.id()); + requestPayloadStreamOutput.writeString(releaseVersion); + + final var requestBytesStreamOutput = new BytesStreamOutput(); + requestBytesStreamOutput.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); + TaskId.EMPTY_TASK_ID.writeTo(requestBytesStreamOutput); + requestBytesStreamOutput.writeBytesReference(requestPayloadStreamOutput.bytes()); + + final var requestBytesStream = requestBytesStreamOutput.bytes().streamInput(); + requestBytesStream.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); + final var handshakeRequest = new TransportHandshaker.HandshakeRequest(requestBytesStream); + + assertEquals(transportVersion, handshakeRequest.transportVersion); + assertEquals(releaseVersion, handshakeRequest.releaseVersion); + } + + public void testReadV9HandshakeResponse() throws IOException { + final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); + final var releaseVersion = randomIdentifier(); + + final var responseBytesStreamOutput = new BytesStreamOutput(); + responseBytesStreamOutput.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); + responseBytesStreamOutput.writeVInt(transportVersion.id()); + responseBytesStreamOutput.writeString(releaseVersion); + + final var responseBytesStream = responseBytesStreamOutput.bytes().streamInput(); + responseBytesStream.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); + final var handshakeResponse = new TransportHandshaker.HandshakeResponse(responseBytesStream); + + assertEquals(transportVersion, handshakeResponse.getTransportVersion()); + assertEquals(releaseVersion, handshakeResponse.getReleaseVersion()); } public void testHandshakeError() throws IOException { From 9ffe3c88354da86a806d1ee4cd64d4939362e389 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Fri, 24 Jan 2025 11:37:27 +0100 Subject: [PATCH 006/383] Update grammar to rely on indexPattern instead of identifier in join target (#120494) This replaces identifier with indexPattern in joinTarget grammar. This change is needed to make index selection consistent between FROM and [LOOKUP] JOIN commands: * Both should use the same quotes " (currently join relies on `) * Both should allow specifying indices with - without having to quote them (not possible with join at the moment) * Both should conform to allowed index names (there are number of differences today, for example it is possible to specify test? or +test in join even though it is not a valid index name.) --- docs/changelog/120494.yaml | 5 + .../xpack/esql/EsqlSecurityIT.java | 18 +- .../esql/qa/mixed/MixedClusterEsqlSpecIT.java | 4 +- .../xpack/esql/ccq/MultiClusterSpecIT.java | 4 +- .../rest/RequestIndexFilteringTestCase.java | 2 +- .../src/main/resources/lookup-join.csv-spec | 167 +- .../esql/src/main/antlr/EsqlBaseLexer.g4 | 4 + .../esql/src/main/antlr/EsqlBaseParser.g4 | 2 +- .../xpack/esql/action/EsqlCapabilities.java | 4 +- .../xpack/esql/parser/EsqlBaseLexer.interp | 5 +- .../xpack/esql/parser/EsqlBaseLexer.java | 1728 +++++++++-------- .../xpack/esql/parser/EsqlBaseParser.interp | 2 +- .../xpack/esql/parser/EsqlBaseParser.java | 16 +- .../xpack/esql/parser/LogicalPlanBuilder.java | 7 +- .../elasticsearch/xpack/esql/CsvTests.java | 2 +- .../xpack/esql/IdentifierGenerator.java | 116 ++ .../xpack/esql/analysis/AnalyzerTests.java | 8 +- .../xpack/esql/analysis/ParsingTests.java | 8 +- .../xpack/esql/analysis/VerifierTests.java | 2 +- .../optimizer/LogicalPlanOptimizerTests.java | 16 +- .../optimizer/PhysicalPlanOptimizerTests.java | 10 +- .../esql/parser/StatementParserTests.java | 33 + .../session/IndexResolverFieldNamesTests.java | 24 +- .../test/esql/190_lookup_join.yml | 38 +- .../test/esql/191_lookup_join_text.yml | 14 +- 25 files changed, 1228 insertions(+), 1011 deletions(-) create mode 100644 docs/changelog/120494.yaml create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java diff --git a/docs/changelog/120494.yaml b/docs/changelog/120494.yaml new file mode 100644 index 0000000000000..34ba7f65e591f --- /dev/null +++ b/docs/changelog/120494.yaml @@ -0,0 +1,5 @@ +pr: 120494 +summary: Update grammar to rely on `indexPattern` instead of identifier in join target +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index 7d96c400cb659..5adac8fdd70d0 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -538,12 +538,12 @@ record Listen(long timestamp, String songId, double duration) { public void testLookupJoinIndexAllowed() throws Exception { assumeTrue( "Requires LOOKUP JOIN capability", - EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V11.capabilityName())) + EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName())) ); Response resp = runESQLCommand( "metadata1_read2", - "ROW x = 40.0 | EVAL value = x | LOOKUP JOIN `lookup-user2` ON value | KEEP x, org" + "ROW x = 40.0 | EVAL value = x | LOOKUP JOIN lookup-user2 ON value | KEEP x, org" ); assertOK(resp); Map respMap = entityAsMap(resp); @@ -554,7 +554,7 @@ public void testLookupJoinIndexAllowed() throws Exception { assertThat(respMap.get("values"), equalTo(List.of(List.of(40.0, "sales")))); // Alias, should find the index and the row - resp = runESQLCommand("alias_user1", "ROW x = 31.0 | EVAL value = x | LOOKUP JOIN `lookup-first-alias` ON value | KEEP x, org"); + resp = runESQLCommand("alias_user1", "ROW x = 31.0 | EVAL value = x | LOOKUP JOIN lookup-first-alias ON value | KEEP x, org"); assertOK(resp); respMap = entityAsMap(resp); assertThat( @@ -564,7 +564,7 @@ public void testLookupJoinIndexAllowed() throws Exception { assertThat(respMap.get("values"), equalTo(List.of(List.of(31.0, "sales")))); // Alias, for a row that's filtered out - resp = runESQLCommand("alias_user1", "ROW x = 123.0 | EVAL value = x | LOOKUP JOIN `lookup-first-alias` ON value | KEEP x, org"); + resp = runESQLCommand("alias_user1", "ROW x = 123.0 | EVAL value = x | LOOKUP JOIN lookup-first-alias ON value | KEEP x, org"); assertOK(resp); respMap = entityAsMap(resp); assertThat( @@ -577,12 +577,12 @@ public void testLookupJoinIndexAllowed() throws Exception { public void testLookupJoinIndexForbidden() throws Exception { assumeTrue( "Requires LOOKUP JOIN capability", - EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V11.capabilityName())) + EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName())) ); var resp = expectThrows( ResponseException.class, - () -> runESQLCommand("metadata1_read2", "FROM lookup-user2 | EVAL value = 10.0 | LOOKUP JOIN `lookup-user1` ON value | KEEP x") + () -> runESQLCommand("metadata1_read2", "FROM lookup-user2 | EVAL value = 10.0 | LOOKUP JOIN lookup-user1 ON value | KEEP x") ); assertThat(resp.getMessage(), containsString("Unknown index [lookup-user1]")); assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); @@ -591,7 +591,7 @@ public void testLookupJoinIndexForbidden() throws Exception { ResponseException.class, () -> runESQLCommand( "metadata1_read2", - "FROM lookup-user2 | EVAL value = 10.0 | LOOKUP JOIN `lookup-first-alias` ON value | KEEP x" + "FROM lookup-user2 | EVAL value = 10.0 | LOOKUP JOIN lookup-first-alias ON value | KEEP x" ) ); assertThat(resp.getMessage(), containsString("Unknown index [lookup-first-alias]")); @@ -599,14 +599,14 @@ public void testLookupJoinIndexForbidden() throws Exception { resp = expectThrows( ResponseException.class, - () -> runESQLCommand("metadata1_read2", "ROW x = 10.0 | EVAL value = x | LOOKUP JOIN `lookup-user1` ON value | KEEP x") + () -> runESQLCommand("metadata1_read2", "ROW x = 10.0 | EVAL value = x | LOOKUP JOIN lookup-user1 ON value | KEEP x") ); assertThat(resp.getMessage(), containsString("Unknown index [lookup-user1]")); assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); resp = expectThrows( ResponseException.class, - () -> runESQLCommand("alias_user1", "ROW x = 10.0 | EVAL value = x | LOOKUP JOIN `lookup-user1` ON value | KEEP x") + () -> runESQLCommand("alias_user1", "ROW x = 10.0 | EVAL value = x | LOOKUP JOIN lookup-user1 ON value | KEEP x") ); assertThat(resp.getMessage(), containsString("Unknown index [lookup-user1]")); assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index 3b5377c2768fb..790b12346bb14 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -20,7 +20,7 @@ import java.util.List; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V11; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V12; public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase { @ClassRule @@ -82,7 +82,7 @@ protected boolean supportsInferenceTestService() { @Override protected boolean supportsIndexModeLookup() throws IOException { - return hasCapabilities(List.of(JOIN_LOOKUP_V11.capabilityName())); + return hasCapabilities(List.of(JOIN_LOOKUP_V12.capabilityName())); } @Override diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index f8b921f239923..4d06db94801bf 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -48,7 +48,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V11; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V12; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; @@ -124,7 +124,7 @@ protected void shouldSkipTest(String testName) throws IOException { assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); - assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V11.capabilityName())); + assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V12.capabilityName())); } private TestFeatureService remoteFeaturesService() throws IOException { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java index ba057cbe276ba..94f6a3c65418d 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java @@ -211,7 +211,7 @@ public void testIndicesDontExist() throws IOException { assertThat(e.getMessage(), containsString("index_not_found_exception")); assertThat(e.getMessage(), anyOf(containsString("no such index [foo]"), containsString("no such index [remote_cluster:foo]"))); - if (EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()) { + if (EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()) { e = expectThrows( ResponseException.class, () -> runEsql(timestampFilter("gte", "2020-01-01").query(from("test1") + " | LOOKUP JOIN foo ON id1")) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index 7b2395030a536..43d397c3d3764 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -8,7 +8,7 @@ ############################################### basicOnTheDataNode -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | EVAL language_code = languages @@ -25,7 +25,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; basicRow -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW language_code = 1 | LOOKUP JOIN languages_lookup ON language_code @@ -36,7 +36,7 @@ language_code:integer | language_name:keyword ; basicOnTheCoordinator -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | SORT emp_no @@ -53,7 +53,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; subsequentEvalOnTheDataNode -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | EVAL language_code = languages @@ -71,7 +71,7 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x ; subsequentEvalOnTheCoordinator -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | SORT emp_no @@ -89,7 +89,7 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x ; sortEvalBeforeLookup -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | SORT emp_no @@ -106,7 +106,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; repeatedIndexOnFrom -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM languages_lookup | LOOKUP JOIN languages_lookup ON language_code @@ -121,7 +121,7 @@ language_code:integer | language_name:keyword ; nonUniqueLeftKeyOnTheDataNode -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | WHERE emp_no <= 10030 @@ -149,7 +149,7 @@ emp_no:integer | language_code:integer | language_name:keyword ########################################################################### nonUniqueRightKeyOnTheDataNode -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | EVAL language_code = emp_no % 10 @@ -173,7 +173,7 @@ emp_no:integer | language_code:integer | language_name:keyword | country:text ; nonUniqueRightKeyOnTheCoordinator -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | SORT emp_no @@ -199,7 +199,7 @@ emp_no:integer | language_code:integer | language_name:keyword | country:text nonUniqueRightKeyOnTheCoordinatorCorrectOrdering // Same as above, but don't ignore the order completely. At least the emp_no col must remain correctly ordered. -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | SORT emp_no @@ -223,7 +223,7 @@ emp_no:integer | language_code:integer ; nonUniqueRightKeyFromRow -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW language_code = 2 | LOOKUP JOIN languages_lookup_non_unique_key ON language_code @@ -238,7 +238,7 @@ language_code:integer | country:text | language_name:keyword ; keepFieldNotInLookupOnTheDataNode -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | EVAL language_code = emp_no % 10 @@ -256,7 +256,7 @@ emp_no:integer ; dropAllFieldsUsedInLookupOnTheCoordinator -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | WHERE emp_no == 10001 @@ -278,7 +278,7 @@ emp_no:integer ########################################################################### nullJoinKeyOnTheDataNode -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | WHERE emp_no < 10004 @@ -300,7 +300,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; mvJoinKeyOnTheLookupIndex -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | WHERE 10003 < emp_no AND emp_no < 10008 @@ -319,7 +319,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; mvJoinKeyOnFrom -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 required_capability: join_lookup_skip_mv FROM employees @@ -339,7 +339,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; mvJoinKeyFromRow -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 required_capability: join_lookup_skip_mv ROW language_code = [4, 5, 6, 7] @@ -353,7 +353,7 @@ language_code:integer | language_name:keyword | country:text ; mvJoinKeyFromRowExpanded -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW language_code = [4, 5, 6, 7, 8] | MV_EXPAND language_code @@ -376,7 +376,7 @@ language_code:integer | language_name:keyword | country:text ############################################### filterOnLeftSide -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | EVAL language_code = languages @@ -393,7 +393,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnRightSide -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -409,7 +409,7 @@ FROM sample_data ; filterOnRightSideAfterStats -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -422,7 +422,7 @@ count:long | type:keyword ; filterOnJoinKey -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | EVAL language_code = languages @@ -437,7 +437,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnJoinKeyAndRightSide -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | WHERE emp_no < 10006 @@ -454,7 +454,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnRightSideOnTheCoordinator -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | SORT emp_no @@ -470,7 +470,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnJoinKeyOnTheCoordinator -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | SORT emp_no @@ -486,7 +486,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnJoinKeyAndRightSideOnTheCoordinator -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | SORT emp_no @@ -503,7 +503,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnTheDataNodeThenFilterOnTheCoordinator -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | EVAL language_code = languages @@ -524,7 +524,7 @@ emp_no:integer | language_code:integer | language_name:keyword ########################################################################### joinOnNestedField -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM employees | WHERE 10000 < emp_no AND emp_no < 10006 @@ -544,7 +544,7 @@ emp_no:integer | language.id:integer | language.name:text joinOnNestedFieldRow -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW language.code = "EN" | LOOKUP JOIN languages_nested_fields ON language.code @@ -557,7 +557,7 @@ language.id:integer | language.code:keyword | language.name.keyword:keyword joinOnNestedNestedFieldRow -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW language.name.keyword = "English" | LOOKUP JOIN languages_nested_fields ON language.name.keyword @@ -569,7 +569,7 @@ language.id:integer | language.name:text | language.name.keyword:keyword ; joinOnNestedNestedFieldRowExplicitKeyword -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 required_capability: lookup_join_text ROW language.name.keyword = "English" @@ -582,7 +582,7 @@ language.id:integer | language.name:text | language.name.keyword:keyword ; joinOnNestedNestedFieldRowExplicitKeywords -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 required_capability: lookup_join_text ROW language.name.keyword = ["English", "French"] @@ -601,7 +601,7 @@ language.id:integer | language.name:text | language.name.keyword:keyword | langu ############################################### lookupIPFromRow -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", client_ip = "172.21.0.5", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -612,7 +612,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromKeepRow -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", client_ip = "172.21.0.5", right = "right" | KEEP left, client_ip, right @@ -624,7 +624,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowing -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -635,7 +635,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowingKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -648,7 +648,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowingKeepReordered -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -661,7 +661,7 @@ right | Development | 172.21.0.5 ; lookupIPFromIndex -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -680,7 +680,7 @@ ignoreOrder:true ; lookupIPFromIndexKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -700,7 +700,7 @@ ignoreOrder:true ; lookupIPFromIndexKeepKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | KEEP client_ip, event_duration, @timestamp, message @@ -722,7 +722,7 @@ timestamp:date | client_ip:keyword | event_duration:long | msg:keyword ; lookupIPFromIndexStats -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -738,7 +738,7 @@ count:long | env:keyword ; lookupIPFromIndexStatsKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -755,7 +755,7 @@ count:long | env:keyword ; statsAndLookupIPFromIndex -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -776,7 +776,7 @@ count:long | client_ip:keyword | env:keyword ############################################### lookupMessageFromRow -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -787,7 +787,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromKeepRow -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", message = "Connected to 10.1.0.1", right = "right" | KEEP left, message, right @@ -799,7 +799,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromRowWithShadowing -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -810,7 +810,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromRowWithShadowingKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -822,7 +822,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromIndex -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -840,7 +840,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -859,7 +859,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeepKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | KEEP client_ip, event_duration, @timestamp, message @@ -879,7 +879,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeepReordered -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -898,7 +898,7 @@ Success | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 ; lookupMessageFromIndexStats -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -913,7 +913,7 @@ count:long | type:keyword ; lookupMessageFromIndexStatsKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -929,7 +929,7 @@ count:long | type:keyword ; statsAndLookupMessageFromIndex -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | STATS count = count(message) BY message @@ -947,7 +947,7 @@ count:long | type:keyword | message:keyword ; lookupMessageFromIndexTwice -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -969,7 +969,7 @@ ignoreOrder:true ; lookupMessageFromIndexTwiceKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -992,7 +992,7 @@ ignoreOrder:true ; lookupMessageFromIndexTwiceFullyShadowing -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -1016,7 +1016,7 @@ ignoreOrder:true ############################################### lookupIPAndMessageFromRow -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -1028,7 +1028,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowKeepBefore -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | KEEP left, client_ip, message, right @@ -1041,7 +1041,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowKeepBetween -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -1054,7 +1054,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowKeepAfter -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -1067,7 +1067,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowing -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", type = "type", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -1079,7 +1079,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -1093,7 +1093,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeepKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -1108,7 +1108,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeepKeepKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -1124,7 +1124,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeepReordered -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -1138,7 +1138,7 @@ right | Development | Success | 172.21.0.5 ; lookupIPAndMessageFromIndex -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1158,7 +1158,7 @@ ignoreOrder:true ; lookupIPAndMessageFromIndexKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1179,7 +1179,7 @@ ignoreOrder:true ; lookupIPAndMessageFromIndexStats -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1197,7 +1197,7 @@ count:long | env:keyword | type:keyword ; lookupIPAndMessageFromIndexStatsKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1216,7 +1216,7 @@ count:long | env:keyword | type:keyword ; statsAndLookupIPAndMessageFromIndex -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1235,7 +1235,7 @@ count:long | client_ip:keyword | message:keyword | env:keyword | type:keyw ; lookupIPAndMessageFromIndexChainedEvalKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1257,7 +1257,7 @@ ignoreOrder:true ; lookupIPAndMessageFromIndexChainedRenameKeep -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1280,7 +1280,7 @@ ignoreOrder:true lookupIndexInFromRepeatedRowBug // Test for https://github.com/elastic/elasticsearch/issues/118852 -required_capability: join_lookup_v11 +required_capability: join_lookup_v12 FROM languages_lookup_non_unique_key | WHERE language_code == 1 | LOOKUP JOIN languages_lookup ON language_code @@ -1294,3 +1294,20 @@ language_code:integer | language_name:keyword | country:text 1 | English | United States of America 1 | English | null ; + +lookupIndexQuoting +required_capability: join_lookup_v12 +FROM languages_lookup_non_unique_key +| WHERE language_code == 1 +| LOOKUP JOIN "languages_lookup" ON language_code +| LOOKUP JOIN """languages_lookup""" ON language_code +| KEEP language_code, language_name, country +| SORT language_code, language_name, country +; + +language_code:integer | language_name:keyword | country:text +1 | English | Canada +1 | English | United Kingdom +1 | English | United States of America +1 | English | null +; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 72fb491cdd982..12d990550f0f9 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -561,6 +561,10 @@ JOIN_AS : AS -> type(AS); JOIN_ON : ON -> type(ON), popMode, pushMode(EXPRESSION_MODE); USING : 'USING' -> popMode, pushMode(EXPRESSION_MODE); +JOIN_UNQUOTED_SOURCE: UNQUOTED_SOURCE -> type(UNQUOTED_SOURCE); +JOIN_QUOTED_SOURCE : QUOTED_STRING -> type(QUOTED_STRING); +JOIN_COLON : COLON -> type(COLON); + JOIN_UNQUOTED_IDENTIFER: UNQUOTED_IDENTIFIER -> type(UNQUOTED_IDENTIFIER); JOIN_QUOTED_IDENTIFIER : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER); diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index e12904a25b131..e72c0fdafd73c 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -328,7 +328,7 @@ joinCommand ; joinTarget - : index=identifier (AS alias=identifier)? + : index=indexPattern (AS alias=identifier)? ; joinCondition diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index e4c591f8f6b19..12a25c9ce2453 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -691,7 +691,7 @@ public enum Cap { /** * LOOKUP JOIN */ - JOIN_LOOKUP_V11(Build.current().isSnapshot()), + JOIN_LOOKUP_V12(Build.current().isSnapshot()), /** * LOOKUP JOIN with TEXT fields on the right (right side of the join) (#119473) @@ -701,7 +701,7 @@ public enum Cap { /** * LOOKUP JOIN without MV matching (https://github.com/elastic/elasticsearch/issues/118780) */ - JOIN_LOOKUP_SKIP_MV(JOIN_LOOKUP_V11.isEnabled()), + JOIN_LOOKUP_SKIP_MV(JOIN_LOOKUP_V12.isEnabled()), /** * Fix for https://github.com/elastic/elasticsearch/issues/117054 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 256bb094b45b7..92274ebe15513 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -460,6 +460,9 @@ JOIN_JOIN JOIN_AS JOIN_ON USING +JOIN_UNQUOTED_SOURCE +JOIN_QUOTED_SOURCE +JOIN_COLON JOIN_UNQUOTED_IDENTIFER JOIN_QUOTED_IDENTIFIER JOIN_LINE_COMMENT @@ -504,4 +507,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 130, 1611, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 2, 214, 7, 214, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 658, 8, 24, 11, 24, 12, 24, 659, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 668, 8, 25, 10, 25, 12, 25, 671, 9, 25, 1, 25, 3, 25, 674, 8, 25, 1, 25, 3, 25, 677, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 686, 8, 26, 10, 26, 12, 26, 689, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 697, 8, 27, 11, 27, 12, 27, 698, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 718, 8, 33, 1, 33, 4, 33, 721, 8, 33, 11, 33, 12, 33, 722, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 732, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 739, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 744, 8, 39, 10, 39, 12, 39, 747, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 755, 8, 39, 10, 39, 12, 39, 758, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 765, 8, 39, 1, 39, 3, 39, 768, 8, 39, 3, 39, 770, 8, 39, 1, 40, 4, 40, 773, 8, 40, 11, 40, 12, 40, 774, 1, 41, 4, 41, 778, 8, 41, 11, 41, 12, 41, 779, 1, 41, 1, 41, 5, 41, 784, 8, 41, 10, 41, 12, 41, 787, 9, 41, 1, 41, 1, 41, 4, 41, 791, 8, 41, 11, 41, 12, 41, 792, 1, 41, 4, 41, 796, 8, 41, 11, 41, 12, 41, 797, 1, 41, 1, 41, 5, 41, 802, 8, 41, 10, 41, 12, 41, 805, 9, 41, 3, 41, 807, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 813, 8, 41, 11, 41, 12, 41, 814, 1, 41, 1, 41, 3, 41, 819, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 3, 81, 953, 8, 81, 1, 81, 5, 81, 956, 8, 81, 10, 81, 12, 81, 959, 9, 81, 1, 81, 1, 81, 4, 81, 963, 8, 81, 11, 81, 12, 81, 964, 3, 81, 967, 8, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 5, 84, 981, 8, 84, 10, 84, 12, 84, 984, 9, 84, 1, 84, 1, 84, 3, 84, 988, 8, 84, 1, 84, 4, 84, 991, 8, 84, 11, 84, 12, 84, 992, 3, 84, 995, 8, 84, 1, 85, 1, 85, 4, 85, 999, 8, 85, 11, 85, 12, 85, 1000, 1, 85, 1, 85, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 3, 102, 1078, 8, 102, 1, 103, 4, 103, 1081, 8, 103, 11, 103, 12, 103, 1082, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 3, 114, 1132, 8, 114, 1, 115, 1, 115, 3, 115, 1136, 8, 115, 1, 115, 5, 115, 1139, 8, 115, 10, 115, 12, 115, 1142, 9, 115, 1, 115, 1, 115, 3, 115, 1146, 8, 115, 1, 115, 4, 115, 1149, 8, 115, 11, 115, 12, 115, 1150, 3, 115, 1153, 8, 115, 1, 116, 1, 116, 4, 116, 1157, 8, 116, 11, 116, 12, 116, 1158, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 136, 4, 136, 1244, 8, 136, 11, 136, 12, 136, 1245, 1, 136, 1, 136, 3, 136, 1250, 8, 136, 1, 136, 4, 136, 1253, 8, 136, 11, 136, 12, 136, 1254, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 4, 169, 1400, 8, 169, 11, 169, 12, 169, 1401, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 213, 1, 213, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 2, 687, 756, 0, 215, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 69, 174, 70, 176, 0, 178, 71, 180, 72, 182, 73, 184, 74, 186, 0, 188, 75, 190, 76, 192, 77, 194, 78, 196, 0, 198, 0, 200, 79, 202, 80, 204, 81, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 0, 218, 82, 220, 0, 222, 83, 224, 0, 226, 0, 228, 84, 230, 85, 232, 86, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 0, 246, 0, 248, 87, 250, 88, 252, 89, 254, 90, 256, 0, 258, 0, 260, 0, 262, 0, 264, 0, 266, 0, 268, 91, 270, 0, 272, 92, 274, 93, 276, 94, 278, 0, 280, 0, 282, 95, 284, 96, 286, 0, 288, 97, 290, 0, 292, 98, 294, 99, 296, 100, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 0, 314, 0, 316, 101, 318, 102, 320, 103, 322, 0, 324, 0, 326, 0, 328, 0, 330, 0, 332, 0, 334, 104, 336, 105, 338, 106, 340, 0, 342, 107, 344, 108, 346, 109, 348, 110, 350, 0, 352, 0, 354, 111, 356, 112, 358, 113, 360, 114, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 0, 374, 0, 376, 115, 378, 116, 380, 117, 382, 0, 384, 0, 386, 0, 388, 0, 390, 118, 392, 119, 394, 120, 396, 0, 398, 0, 400, 0, 402, 0, 404, 121, 406, 0, 408, 0, 410, 122, 412, 123, 414, 124, 416, 0, 418, 0, 420, 0, 422, 125, 424, 126, 426, 127, 428, 0, 430, 0, 432, 128, 434, 129, 436, 130, 438, 0, 440, 0, 442, 0, 444, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1638, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 182, 1, 0, 0, 0, 1, 184, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 1, 192, 1, 0, 0, 0, 1, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 2, 202, 1, 0, 0, 0, 2, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 3, 230, 1, 0, 0, 0, 3, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 240, 1, 0, 0, 0, 4, 242, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 4, 252, 1, 0, 0, 0, 4, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 5, 274, 1, 0, 0, 0, 5, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 282, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 6, 294, 1, 0, 0, 0, 6, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 7, 318, 1, 0, 0, 0, 7, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 8, 336, 1, 0, 0, 0, 8, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 9, 346, 1, 0, 0, 0, 9, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 10, 358, 1, 0, 0, 0, 10, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 11, 378, 1, 0, 0, 0, 11, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 12, 392, 1, 0, 0, 0, 12, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 13, 412, 1, 0, 0, 0, 13, 414, 1, 0, 0, 0, 14, 416, 1, 0, 0, 0, 14, 418, 1, 0, 0, 0, 14, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 14, 424, 1, 0, 0, 0, 14, 426, 1, 0, 0, 0, 15, 428, 1, 0, 0, 0, 15, 430, 1, 0, 0, 0, 15, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 15, 442, 1, 0, 0, 0, 15, 444, 1, 0, 0, 0, 16, 446, 1, 0, 0, 0, 18, 456, 1, 0, 0, 0, 20, 463, 1, 0, 0, 0, 22, 472, 1, 0, 0, 0, 24, 479, 1, 0, 0, 0, 26, 489, 1, 0, 0, 0, 28, 496, 1, 0, 0, 0, 30, 503, 1, 0, 0, 0, 32, 510, 1, 0, 0, 0, 34, 518, 1, 0, 0, 0, 36, 530, 1, 0, 0, 0, 38, 539, 1, 0, 0, 0, 40, 545, 1, 0, 0, 0, 42, 552, 1, 0, 0, 0, 44, 559, 1, 0, 0, 0, 46, 567, 1, 0, 0, 0, 48, 575, 1, 0, 0, 0, 50, 590, 1, 0, 0, 0, 52, 602, 1, 0, 0, 0, 54, 613, 1, 0, 0, 0, 56, 621, 1, 0, 0, 0, 58, 629, 1, 0, 0, 0, 60, 637, 1, 0, 0, 0, 62, 646, 1, 0, 0, 0, 64, 657, 1, 0, 0, 0, 66, 663, 1, 0, 0, 0, 68, 680, 1, 0, 0, 0, 70, 696, 1, 0, 0, 0, 72, 702, 1, 0, 0, 0, 74, 706, 1, 0, 0, 0, 76, 708, 1, 0, 0, 0, 78, 710, 1, 0, 0, 0, 80, 713, 1, 0, 0, 0, 82, 715, 1, 0, 0, 0, 84, 724, 1, 0, 0, 0, 86, 726, 1, 0, 0, 0, 88, 731, 1, 0, 0, 0, 90, 733, 1, 0, 0, 0, 92, 738, 1, 0, 0, 0, 94, 769, 1, 0, 0, 0, 96, 772, 1, 0, 0, 0, 98, 818, 1, 0, 0, 0, 100, 820, 1, 0, 0, 0, 102, 823, 1, 0, 0, 0, 104, 827, 1, 0, 0, 0, 106, 831, 1, 0, 0, 0, 108, 833, 1, 0, 0, 0, 110, 836, 1, 0, 0, 0, 112, 838, 1, 0, 0, 0, 114, 840, 1, 0, 0, 0, 116, 845, 1, 0, 0, 0, 118, 847, 1, 0, 0, 0, 120, 853, 1, 0, 0, 0, 122, 859, 1, 0, 0, 0, 124, 862, 1, 0, 0, 0, 126, 865, 1, 0, 0, 0, 128, 870, 1, 0, 0, 0, 130, 875, 1, 0, 0, 0, 132, 877, 1, 0, 0, 0, 134, 881, 1, 0, 0, 0, 136, 886, 1, 0, 0, 0, 138, 892, 1, 0, 0, 0, 140, 895, 1, 0, 0, 0, 142, 897, 1, 0, 0, 0, 144, 903, 1, 0, 0, 0, 146, 905, 1, 0, 0, 0, 148, 910, 1, 0, 0, 0, 150, 913, 1, 0, 0, 0, 152, 916, 1, 0, 0, 0, 154, 919, 1, 0, 0, 0, 156, 921, 1, 0, 0, 0, 158, 924, 1, 0, 0, 0, 160, 926, 1, 0, 0, 0, 162, 929, 1, 0, 0, 0, 164, 931, 1, 0, 0, 0, 166, 933, 1, 0, 0, 0, 168, 935, 1, 0, 0, 0, 170, 937, 1, 0, 0, 0, 172, 939, 1, 0, 0, 0, 174, 942, 1, 0, 0, 0, 176, 945, 1, 0, 0, 0, 178, 966, 1, 0, 0, 0, 180, 968, 1, 0, 0, 0, 182, 973, 1, 0, 0, 0, 184, 994, 1, 0, 0, 0, 186, 996, 1, 0, 0, 0, 188, 1004, 1, 0, 0, 0, 190, 1006, 1, 0, 0, 0, 192, 1010, 1, 0, 0, 0, 194, 1014, 1, 0, 0, 0, 196, 1018, 1, 0, 0, 0, 198, 1023, 1, 0, 0, 0, 200, 1028, 1, 0, 0, 0, 202, 1032, 1, 0, 0, 0, 204, 1036, 1, 0, 0, 0, 206, 1040, 1, 0, 0, 0, 208, 1045, 1, 0, 0, 0, 210, 1049, 1, 0, 0, 0, 212, 1053, 1, 0, 0, 0, 214, 1057, 1, 0, 0, 0, 216, 1061, 1, 0, 0, 0, 218, 1065, 1, 0, 0, 0, 220, 1077, 1, 0, 0, 0, 222, 1080, 1, 0, 0, 0, 224, 1084, 1, 0, 0, 0, 226, 1088, 1, 0, 0, 0, 228, 1092, 1, 0, 0, 0, 230, 1096, 1, 0, 0, 0, 232, 1100, 1, 0, 0, 0, 234, 1104, 1, 0, 0, 0, 236, 1109, 1, 0, 0, 0, 238, 1113, 1, 0, 0, 0, 240, 1117, 1, 0, 0, 0, 242, 1122, 1, 0, 0, 0, 244, 1131, 1, 0, 0, 0, 246, 1152, 1, 0, 0, 0, 248, 1156, 1, 0, 0, 0, 250, 1160, 1, 0, 0, 0, 252, 1164, 1, 0, 0, 0, 254, 1168, 1, 0, 0, 0, 256, 1172, 1, 0, 0, 0, 258, 1177, 1, 0, 0, 0, 260, 1181, 1, 0, 0, 0, 262, 1185, 1, 0, 0, 0, 264, 1189, 1, 0, 0, 0, 266, 1194, 1, 0, 0, 0, 268, 1199, 1, 0, 0, 0, 270, 1202, 1, 0, 0, 0, 272, 1206, 1, 0, 0, 0, 274, 1210, 1, 0, 0, 0, 276, 1214, 1, 0, 0, 0, 278, 1218, 1, 0, 0, 0, 280, 1223, 1, 0, 0, 0, 282, 1228, 1, 0, 0, 0, 284, 1233, 1, 0, 0, 0, 286, 1240, 1, 0, 0, 0, 288, 1249, 1, 0, 0, 0, 290, 1256, 1, 0, 0, 0, 292, 1260, 1, 0, 0, 0, 294, 1264, 1, 0, 0, 0, 296, 1268, 1, 0, 0, 0, 298, 1272, 1, 0, 0, 0, 300, 1278, 1, 0, 0, 0, 302, 1282, 1, 0, 0, 0, 304, 1286, 1, 0, 0, 0, 306, 1290, 1, 0, 0, 0, 308, 1294, 1, 0, 0, 0, 310, 1298, 1, 0, 0, 0, 312, 1302, 1, 0, 0, 0, 314, 1307, 1, 0, 0, 0, 316, 1312, 1, 0, 0, 0, 318, 1316, 1, 0, 0, 0, 320, 1320, 1, 0, 0, 0, 322, 1324, 1, 0, 0, 0, 324, 1329, 1, 0, 0, 0, 326, 1333, 1, 0, 0, 0, 328, 1338, 1, 0, 0, 0, 330, 1343, 1, 0, 0, 0, 332, 1347, 1, 0, 0, 0, 334, 1351, 1, 0, 0, 0, 336, 1355, 1, 0, 0, 0, 338, 1359, 1, 0, 0, 0, 340, 1363, 1, 0, 0, 0, 342, 1368, 1, 0, 0, 0, 344, 1373, 1, 0, 0, 0, 346, 1377, 1, 0, 0, 0, 348, 1381, 1, 0, 0, 0, 350, 1385, 1, 0, 0, 0, 352, 1390, 1, 0, 0, 0, 354, 1399, 1, 0, 0, 0, 356, 1403, 1, 0, 0, 0, 358, 1407, 1, 0, 0, 0, 360, 1411, 1, 0, 0, 0, 362, 1415, 1, 0, 0, 0, 364, 1420, 1, 0, 0, 0, 366, 1424, 1, 0, 0, 0, 368, 1428, 1, 0, 0, 0, 370, 1432, 1, 0, 0, 0, 372, 1437, 1, 0, 0, 0, 374, 1441, 1, 0, 0, 0, 376, 1445, 1, 0, 0, 0, 378, 1449, 1, 0, 0, 0, 380, 1453, 1, 0, 0, 0, 382, 1457, 1, 0, 0, 0, 384, 1463, 1, 0, 0, 0, 386, 1467, 1, 0, 0, 0, 388, 1471, 1, 0, 0, 0, 390, 1475, 1, 0, 0, 0, 392, 1479, 1, 0, 0, 0, 394, 1483, 1, 0, 0, 0, 396, 1487, 1, 0, 0, 0, 398, 1492, 1, 0, 0, 0, 400, 1496, 1, 0, 0, 0, 402, 1500, 1, 0, 0, 0, 404, 1506, 1, 0, 0, 0, 406, 1515, 1, 0, 0, 0, 408, 1519, 1, 0, 0, 0, 410, 1523, 1, 0, 0, 0, 412, 1527, 1, 0, 0, 0, 414, 1531, 1, 0, 0, 0, 416, 1535, 1, 0, 0, 0, 418, 1540, 1, 0, 0, 0, 420, 1546, 1, 0, 0, 0, 422, 1552, 1, 0, 0, 0, 424, 1556, 1, 0, 0, 0, 426, 1560, 1, 0, 0, 0, 428, 1564, 1, 0, 0, 0, 430, 1570, 1, 0, 0, 0, 432, 1576, 1, 0, 0, 0, 434, 1580, 1, 0, 0, 0, 436, 1584, 1, 0, 0, 0, 438, 1588, 1, 0, 0, 0, 440, 1594, 1, 0, 0, 0, 442, 1600, 1, 0, 0, 0, 444, 1606, 1, 0, 0, 0, 446, 447, 7, 0, 0, 0, 447, 448, 7, 1, 0, 0, 448, 449, 7, 2, 0, 0, 449, 450, 7, 2, 0, 0, 450, 451, 7, 3, 0, 0, 451, 452, 7, 4, 0, 0, 452, 453, 7, 5, 0, 0, 453, 454, 1, 0, 0, 0, 454, 455, 6, 0, 0, 0, 455, 17, 1, 0, 0, 0, 456, 457, 7, 0, 0, 0, 457, 458, 7, 6, 0, 0, 458, 459, 7, 7, 0, 0, 459, 460, 7, 8, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 6, 1, 1, 0, 462, 19, 1, 0, 0, 0, 463, 464, 7, 3, 0, 0, 464, 465, 7, 9, 0, 0, 465, 466, 7, 6, 0, 0, 466, 467, 7, 1, 0, 0, 467, 468, 7, 4, 0, 0, 468, 469, 7, 10, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 6, 2, 2, 0, 471, 21, 1, 0, 0, 0, 472, 473, 7, 3, 0, 0, 473, 474, 7, 11, 0, 0, 474, 475, 7, 12, 0, 0, 475, 476, 7, 13, 0, 0, 476, 477, 1, 0, 0, 0, 477, 478, 6, 3, 0, 0, 478, 23, 1, 0, 0, 0, 479, 480, 7, 3, 0, 0, 480, 481, 7, 14, 0, 0, 481, 482, 7, 8, 0, 0, 482, 483, 7, 13, 0, 0, 483, 484, 7, 12, 0, 0, 484, 485, 7, 1, 0, 0, 485, 486, 7, 9, 0, 0, 486, 487, 1, 0, 0, 0, 487, 488, 6, 4, 3, 0, 488, 25, 1, 0, 0, 0, 489, 490, 7, 15, 0, 0, 490, 491, 7, 6, 0, 0, 491, 492, 7, 7, 0, 0, 492, 493, 7, 16, 0, 0, 493, 494, 1, 0, 0, 0, 494, 495, 6, 5, 4, 0, 495, 27, 1, 0, 0, 0, 496, 497, 7, 17, 0, 0, 497, 498, 7, 6, 0, 0, 498, 499, 7, 7, 0, 0, 499, 500, 7, 18, 0, 0, 500, 501, 1, 0, 0, 0, 501, 502, 6, 6, 0, 0, 502, 29, 1, 0, 0, 0, 503, 504, 7, 18, 0, 0, 504, 505, 7, 3, 0, 0, 505, 506, 7, 3, 0, 0, 506, 507, 7, 8, 0, 0, 507, 508, 1, 0, 0, 0, 508, 509, 6, 7, 1, 0, 509, 31, 1, 0, 0, 0, 510, 511, 7, 13, 0, 0, 511, 512, 7, 1, 0, 0, 512, 513, 7, 16, 0, 0, 513, 514, 7, 1, 0, 0, 514, 515, 7, 5, 0, 0, 515, 516, 1, 0, 0, 0, 516, 517, 6, 8, 0, 0, 517, 33, 1, 0, 0, 0, 518, 519, 7, 16, 0, 0, 519, 520, 7, 11, 0, 0, 520, 521, 5, 95, 0, 0, 521, 522, 7, 3, 0, 0, 522, 523, 7, 14, 0, 0, 523, 524, 7, 8, 0, 0, 524, 525, 7, 12, 0, 0, 525, 526, 7, 9, 0, 0, 526, 527, 7, 0, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 9, 5, 0, 529, 35, 1, 0, 0, 0, 530, 531, 7, 6, 0, 0, 531, 532, 7, 3, 0, 0, 532, 533, 7, 9, 0, 0, 533, 534, 7, 12, 0, 0, 534, 535, 7, 16, 0, 0, 535, 536, 7, 3, 0, 0, 536, 537, 1, 0, 0, 0, 537, 538, 6, 10, 6, 0, 538, 37, 1, 0, 0, 0, 539, 540, 7, 6, 0, 0, 540, 541, 7, 7, 0, 0, 541, 542, 7, 19, 0, 0, 542, 543, 1, 0, 0, 0, 543, 544, 6, 11, 0, 0, 544, 39, 1, 0, 0, 0, 545, 546, 7, 2, 0, 0, 546, 547, 7, 10, 0, 0, 547, 548, 7, 7, 0, 0, 548, 549, 7, 19, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 6, 12, 7, 0, 551, 41, 1, 0, 0, 0, 552, 553, 7, 2, 0, 0, 553, 554, 7, 7, 0, 0, 554, 555, 7, 6, 0, 0, 555, 556, 7, 5, 0, 0, 556, 557, 1, 0, 0, 0, 557, 558, 6, 13, 0, 0, 558, 43, 1, 0, 0, 0, 559, 560, 7, 2, 0, 0, 560, 561, 7, 5, 0, 0, 561, 562, 7, 12, 0, 0, 562, 563, 7, 5, 0, 0, 563, 564, 7, 2, 0, 0, 564, 565, 1, 0, 0, 0, 565, 566, 6, 14, 0, 0, 566, 45, 1, 0, 0, 0, 567, 568, 7, 19, 0, 0, 568, 569, 7, 10, 0, 0, 569, 570, 7, 3, 0, 0, 570, 571, 7, 6, 0, 0, 571, 572, 7, 3, 0, 0, 572, 573, 1, 0, 0, 0, 573, 574, 6, 15, 0, 0, 574, 47, 1, 0, 0, 0, 575, 576, 4, 16, 0, 0, 576, 577, 7, 1, 0, 0, 577, 578, 7, 9, 0, 0, 578, 579, 7, 13, 0, 0, 579, 580, 7, 1, 0, 0, 580, 581, 7, 9, 0, 0, 581, 582, 7, 3, 0, 0, 582, 583, 7, 2, 0, 0, 583, 584, 7, 5, 0, 0, 584, 585, 7, 12, 0, 0, 585, 586, 7, 5, 0, 0, 586, 587, 7, 2, 0, 0, 587, 588, 1, 0, 0, 0, 588, 589, 6, 16, 0, 0, 589, 49, 1, 0, 0, 0, 590, 591, 4, 17, 1, 0, 591, 592, 7, 13, 0, 0, 592, 593, 7, 7, 0, 0, 593, 594, 7, 7, 0, 0, 594, 595, 7, 18, 0, 0, 595, 596, 7, 20, 0, 0, 596, 597, 7, 8, 0, 0, 597, 598, 5, 95, 0, 0, 598, 599, 5, 128020, 0, 0, 599, 600, 1, 0, 0, 0, 600, 601, 6, 17, 8, 0, 601, 51, 1, 0, 0, 0, 602, 603, 4, 18, 2, 0, 603, 604, 7, 16, 0, 0, 604, 605, 7, 3, 0, 0, 605, 606, 7, 5, 0, 0, 606, 607, 7, 6, 0, 0, 607, 608, 7, 1, 0, 0, 608, 609, 7, 4, 0, 0, 609, 610, 7, 2, 0, 0, 610, 611, 1, 0, 0, 0, 611, 612, 6, 18, 9, 0, 612, 53, 1, 0, 0, 0, 613, 614, 4, 19, 3, 0, 614, 615, 7, 21, 0, 0, 615, 616, 7, 7, 0, 0, 616, 617, 7, 1, 0, 0, 617, 618, 7, 9, 0, 0, 618, 619, 1, 0, 0, 0, 619, 620, 6, 19, 10, 0, 620, 55, 1, 0, 0, 0, 621, 622, 4, 20, 4, 0, 622, 623, 7, 15, 0, 0, 623, 624, 7, 20, 0, 0, 624, 625, 7, 13, 0, 0, 625, 626, 7, 13, 0, 0, 626, 627, 1, 0, 0, 0, 627, 628, 6, 20, 10, 0, 628, 57, 1, 0, 0, 0, 629, 630, 4, 21, 5, 0, 630, 631, 7, 13, 0, 0, 631, 632, 7, 3, 0, 0, 632, 633, 7, 15, 0, 0, 633, 634, 7, 5, 0, 0, 634, 635, 1, 0, 0, 0, 635, 636, 6, 21, 10, 0, 636, 59, 1, 0, 0, 0, 637, 638, 4, 22, 6, 0, 638, 639, 7, 6, 0, 0, 639, 640, 7, 1, 0, 0, 640, 641, 7, 17, 0, 0, 641, 642, 7, 10, 0, 0, 642, 643, 7, 5, 0, 0, 643, 644, 1, 0, 0, 0, 644, 645, 6, 22, 10, 0, 645, 61, 1, 0, 0, 0, 646, 647, 4, 23, 7, 0, 647, 648, 7, 13, 0, 0, 648, 649, 7, 7, 0, 0, 649, 650, 7, 7, 0, 0, 650, 651, 7, 18, 0, 0, 651, 652, 7, 20, 0, 0, 652, 653, 7, 8, 0, 0, 653, 654, 1, 0, 0, 0, 654, 655, 6, 23, 10, 0, 655, 63, 1, 0, 0, 0, 656, 658, 8, 22, 0, 0, 657, 656, 1, 0, 0, 0, 658, 659, 1, 0, 0, 0, 659, 657, 1, 0, 0, 0, 659, 660, 1, 0, 0, 0, 660, 661, 1, 0, 0, 0, 661, 662, 6, 24, 0, 0, 662, 65, 1, 0, 0, 0, 663, 664, 5, 47, 0, 0, 664, 665, 5, 47, 0, 0, 665, 669, 1, 0, 0, 0, 666, 668, 8, 23, 0, 0, 667, 666, 1, 0, 0, 0, 668, 671, 1, 0, 0, 0, 669, 667, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 673, 1, 0, 0, 0, 671, 669, 1, 0, 0, 0, 672, 674, 5, 13, 0, 0, 673, 672, 1, 0, 0, 0, 673, 674, 1, 0, 0, 0, 674, 676, 1, 0, 0, 0, 675, 677, 5, 10, 0, 0, 676, 675, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 677, 678, 1, 0, 0, 0, 678, 679, 6, 25, 11, 0, 679, 67, 1, 0, 0, 0, 680, 681, 5, 47, 0, 0, 681, 682, 5, 42, 0, 0, 682, 687, 1, 0, 0, 0, 683, 686, 3, 68, 26, 0, 684, 686, 9, 0, 0, 0, 685, 683, 1, 0, 0, 0, 685, 684, 1, 0, 0, 0, 686, 689, 1, 0, 0, 0, 687, 688, 1, 0, 0, 0, 687, 685, 1, 0, 0, 0, 688, 690, 1, 0, 0, 0, 689, 687, 1, 0, 0, 0, 690, 691, 5, 42, 0, 0, 691, 692, 5, 47, 0, 0, 692, 693, 1, 0, 0, 0, 693, 694, 6, 26, 11, 0, 694, 69, 1, 0, 0, 0, 695, 697, 7, 24, 0, 0, 696, 695, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 696, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 700, 1, 0, 0, 0, 700, 701, 6, 27, 11, 0, 701, 71, 1, 0, 0, 0, 702, 703, 5, 124, 0, 0, 703, 704, 1, 0, 0, 0, 704, 705, 6, 28, 12, 0, 705, 73, 1, 0, 0, 0, 706, 707, 7, 25, 0, 0, 707, 75, 1, 0, 0, 0, 708, 709, 7, 26, 0, 0, 709, 77, 1, 0, 0, 0, 710, 711, 5, 92, 0, 0, 711, 712, 7, 27, 0, 0, 712, 79, 1, 0, 0, 0, 713, 714, 8, 28, 0, 0, 714, 81, 1, 0, 0, 0, 715, 717, 7, 3, 0, 0, 716, 718, 7, 29, 0, 0, 717, 716, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 720, 1, 0, 0, 0, 719, 721, 3, 74, 29, 0, 720, 719, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 720, 1, 0, 0, 0, 722, 723, 1, 0, 0, 0, 723, 83, 1, 0, 0, 0, 724, 725, 5, 64, 0, 0, 725, 85, 1, 0, 0, 0, 726, 727, 5, 96, 0, 0, 727, 87, 1, 0, 0, 0, 728, 732, 8, 30, 0, 0, 729, 730, 5, 96, 0, 0, 730, 732, 5, 96, 0, 0, 731, 728, 1, 0, 0, 0, 731, 729, 1, 0, 0, 0, 732, 89, 1, 0, 0, 0, 733, 734, 5, 95, 0, 0, 734, 91, 1, 0, 0, 0, 735, 739, 3, 76, 30, 0, 736, 739, 3, 74, 29, 0, 737, 739, 3, 90, 37, 0, 738, 735, 1, 0, 0, 0, 738, 736, 1, 0, 0, 0, 738, 737, 1, 0, 0, 0, 739, 93, 1, 0, 0, 0, 740, 745, 5, 34, 0, 0, 741, 744, 3, 78, 31, 0, 742, 744, 3, 80, 32, 0, 743, 741, 1, 0, 0, 0, 743, 742, 1, 0, 0, 0, 744, 747, 1, 0, 0, 0, 745, 743, 1, 0, 0, 0, 745, 746, 1, 0, 0, 0, 746, 748, 1, 0, 0, 0, 747, 745, 1, 0, 0, 0, 748, 770, 5, 34, 0, 0, 749, 750, 5, 34, 0, 0, 750, 751, 5, 34, 0, 0, 751, 752, 5, 34, 0, 0, 752, 756, 1, 0, 0, 0, 753, 755, 8, 23, 0, 0, 754, 753, 1, 0, 0, 0, 755, 758, 1, 0, 0, 0, 756, 757, 1, 0, 0, 0, 756, 754, 1, 0, 0, 0, 757, 759, 1, 0, 0, 0, 758, 756, 1, 0, 0, 0, 759, 760, 5, 34, 0, 0, 760, 761, 5, 34, 0, 0, 761, 762, 5, 34, 0, 0, 762, 764, 1, 0, 0, 0, 763, 765, 5, 34, 0, 0, 764, 763, 1, 0, 0, 0, 764, 765, 1, 0, 0, 0, 765, 767, 1, 0, 0, 0, 766, 768, 5, 34, 0, 0, 767, 766, 1, 0, 0, 0, 767, 768, 1, 0, 0, 0, 768, 770, 1, 0, 0, 0, 769, 740, 1, 0, 0, 0, 769, 749, 1, 0, 0, 0, 770, 95, 1, 0, 0, 0, 771, 773, 3, 74, 29, 0, 772, 771, 1, 0, 0, 0, 773, 774, 1, 0, 0, 0, 774, 772, 1, 0, 0, 0, 774, 775, 1, 0, 0, 0, 775, 97, 1, 0, 0, 0, 776, 778, 3, 74, 29, 0, 777, 776, 1, 0, 0, 0, 778, 779, 1, 0, 0, 0, 779, 777, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 785, 3, 116, 50, 0, 782, 784, 3, 74, 29, 0, 783, 782, 1, 0, 0, 0, 784, 787, 1, 0, 0, 0, 785, 783, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 819, 1, 0, 0, 0, 787, 785, 1, 0, 0, 0, 788, 790, 3, 116, 50, 0, 789, 791, 3, 74, 29, 0, 790, 789, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 790, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 819, 1, 0, 0, 0, 794, 796, 3, 74, 29, 0, 795, 794, 1, 0, 0, 0, 796, 797, 1, 0, 0, 0, 797, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 806, 1, 0, 0, 0, 799, 803, 3, 116, 50, 0, 800, 802, 3, 74, 29, 0, 801, 800, 1, 0, 0, 0, 802, 805, 1, 0, 0, 0, 803, 801, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 807, 1, 0, 0, 0, 805, 803, 1, 0, 0, 0, 806, 799, 1, 0, 0, 0, 806, 807, 1, 0, 0, 0, 807, 808, 1, 0, 0, 0, 808, 809, 3, 82, 33, 0, 809, 819, 1, 0, 0, 0, 810, 812, 3, 116, 50, 0, 811, 813, 3, 74, 29, 0, 812, 811, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 812, 1, 0, 0, 0, 814, 815, 1, 0, 0, 0, 815, 816, 1, 0, 0, 0, 816, 817, 3, 82, 33, 0, 817, 819, 1, 0, 0, 0, 818, 777, 1, 0, 0, 0, 818, 788, 1, 0, 0, 0, 818, 795, 1, 0, 0, 0, 818, 810, 1, 0, 0, 0, 819, 99, 1, 0, 0, 0, 820, 821, 7, 31, 0, 0, 821, 822, 7, 32, 0, 0, 822, 101, 1, 0, 0, 0, 823, 824, 7, 12, 0, 0, 824, 825, 7, 9, 0, 0, 825, 826, 7, 0, 0, 0, 826, 103, 1, 0, 0, 0, 827, 828, 7, 12, 0, 0, 828, 829, 7, 2, 0, 0, 829, 830, 7, 4, 0, 0, 830, 105, 1, 0, 0, 0, 831, 832, 5, 61, 0, 0, 832, 107, 1, 0, 0, 0, 833, 834, 5, 58, 0, 0, 834, 835, 5, 58, 0, 0, 835, 109, 1, 0, 0, 0, 836, 837, 5, 58, 0, 0, 837, 111, 1, 0, 0, 0, 838, 839, 5, 44, 0, 0, 839, 113, 1, 0, 0, 0, 840, 841, 7, 0, 0, 0, 841, 842, 7, 3, 0, 0, 842, 843, 7, 2, 0, 0, 843, 844, 7, 4, 0, 0, 844, 115, 1, 0, 0, 0, 845, 846, 5, 46, 0, 0, 846, 117, 1, 0, 0, 0, 847, 848, 7, 15, 0, 0, 848, 849, 7, 12, 0, 0, 849, 850, 7, 13, 0, 0, 850, 851, 7, 2, 0, 0, 851, 852, 7, 3, 0, 0, 852, 119, 1, 0, 0, 0, 853, 854, 7, 15, 0, 0, 854, 855, 7, 1, 0, 0, 855, 856, 7, 6, 0, 0, 856, 857, 7, 2, 0, 0, 857, 858, 7, 5, 0, 0, 858, 121, 1, 0, 0, 0, 859, 860, 7, 1, 0, 0, 860, 861, 7, 9, 0, 0, 861, 123, 1, 0, 0, 0, 862, 863, 7, 1, 0, 0, 863, 864, 7, 2, 0, 0, 864, 125, 1, 0, 0, 0, 865, 866, 7, 13, 0, 0, 866, 867, 7, 12, 0, 0, 867, 868, 7, 2, 0, 0, 868, 869, 7, 5, 0, 0, 869, 127, 1, 0, 0, 0, 870, 871, 7, 13, 0, 0, 871, 872, 7, 1, 0, 0, 872, 873, 7, 18, 0, 0, 873, 874, 7, 3, 0, 0, 874, 129, 1, 0, 0, 0, 875, 876, 5, 40, 0, 0, 876, 131, 1, 0, 0, 0, 877, 878, 7, 9, 0, 0, 878, 879, 7, 7, 0, 0, 879, 880, 7, 5, 0, 0, 880, 133, 1, 0, 0, 0, 881, 882, 7, 9, 0, 0, 882, 883, 7, 20, 0, 0, 883, 884, 7, 13, 0, 0, 884, 885, 7, 13, 0, 0, 885, 135, 1, 0, 0, 0, 886, 887, 7, 9, 0, 0, 887, 888, 7, 20, 0, 0, 888, 889, 7, 13, 0, 0, 889, 890, 7, 13, 0, 0, 890, 891, 7, 2, 0, 0, 891, 137, 1, 0, 0, 0, 892, 893, 7, 7, 0, 0, 893, 894, 7, 6, 0, 0, 894, 139, 1, 0, 0, 0, 895, 896, 5, 63, 0, 0, 896, 141, 1, 0, 0, 0, 897, 898, 7, 6, 0, 0, 898, 899, 7, 13, 0, 0, 899, 900, 7, 1, 0, 0, 900, 901, 7, 18, 0, 0, 901, 902, 7, 3, 0, 0, 902, 143, 1, 0, 0, 0, 903, 904, 5, 41, 0, 0, 904, 145, 1, 0, 0, 0, 905, 906, 7, 5, 0, 0, 906, 907, 7, 6, 0, 0, 907, 908, 7, 20, 0, 0, 908, 909, 7, 3, 0, 0, 909, 147, 1, 0, 0, 0, 910, 911, 5, 61, 0, 0, 911, 912, 5, 61, 0, 0, 912, 149, 1, 0, 0, 0, 913, 914, 5, 61, 0, 0, 914, 915, 5, 126, 0, 0, 915, 151, 1, 0, 0, 0, 916, 917, 5, 33, 0, 0, 917, 918, 5, 61, 0, 0, 918, 153, 1, 0, 0, 0, 919, 920, 5, 60, 0, 0, 920, 155, 1, 0, 0, 0, 921, 922, 5, 60, 0, 0, 922, 923, 5, 61, 0, 0, 923, 157, 1, 0, 0, 0, 924, 925, 5, 62, 0, 0, 925, 159, 1, 0, 0, 0, 926, 927, 5, 62, 0, 0, 927, 928, 5, 61, 0, 0, 928, 161, 1, 0, 0, 0, 929, 930, 5, 43, 0, 0, 930, 163, 1, 0, 0, 0, 931, 932, 5, 45, 0, 0, 932, 165, 1, 0, 0, 0, 933, 934, 5, 42, 0, 0, 934, 167, 1, 0, 0, 0, 935, 936, 5, 47, 0, 0, 936, 169, 1, 0, 0, 0, 937, 938, 5, 37, 0, 0, 938, 171, 1, 0, 0, 0, 939, 940, 4, 78, 8, 0, 940, 941, 5, 123, 0, 0, 941, 173, 1, 0, 0, 0, 942, 943, 4, 79, 9, 0, 943, 944, 5, 125, 0, 0, 944, 175, 1, 0, 0, 0, 945, 946, 3, 46, 15, 0, 946, 947, 1, 0, 0, 0, 947, 948, 6, 80, 13, 0, 948, 177, 1, 0, 0, 0, 949, 952, 3, 140, 62, 0, 950, 953, 3, 76, 30, 0, 951, 953, 3, 90, 37, 0, 952, 950, 1, 0, 0, 0, 952, 951, 1, 0, 0, 0, 953, 957, 1, 0, 0, 0, 954, 956, 3, 92, 38, 0, 955, 954, 1, 0, 0, 0, 956, 959, 1, 0, 0, 0, 957, 955, 1, 0, 0, 0, 957, 958, 1, 0, 0, 0, 958, 967, 1, 0, 0, 0, 959, 957, 1, 0, 0, 0, 960, 962, 3, 140, 62, 0, 961, 963, 3, 74, 29, 0, 962, 961, 1, 0, 0, 0, 963, 964, 1, 0, 0, 0, 964, 962, 1, 0, 0, 0, 964, 965, 1, 0, 0, 0, 965, 967, 1, 0, 0, 0, 966, 949, 1, 0, 0, 0, 966, 960, 1, 0, 0, 0, 967, 179, 1, 0, 0, 0, 968, 969, 5, 91, 0, 0, 969, 970, 1, 0, 0, 0, 970, 971, 6, 82, 0, 0, 971, 972, 6, 82, 0, 0, 972, 181, 1, 0, 0, 0, 973, 974, 5, 93, 0, 0, 974, 975, 1, 0, 0, 0, 975, 976, 6, 83, 12, 0, 976, 977, 6, 83, 12, 0, 977, 183, 1, 0, 0, 0, 978, 982, 3, 76, 30, 0, 979, 981, 3, 92, 38, 0, 980, 979, 1, 0, 0, 0, 981, 984, 1, 0, 0, 0, 982, 980, 1, 0, 0, 0, 982, 983, 1, 0, 0, 0, 983, 995, 1, 0, 0, 0, 984, 982, 1, 0, 0, 0, 985, 988, 3, 90, 37, 0, 986, 988, 3, 84, 34, 0, 987, 985, 1, 0, 0, 0, 987, 986, 1, 0, 0, 0, 988, 990, 1, 0, 0, 0, 989, 991, 3, 92, 38, 0, 990, 989, 1, 0, 0, 0, 991, 992, 1, 0, 0, 0, 992, 990, 1, 0, 0, 0, 992, 993, 1, 0, 0, 0, 993, 995, 1, 0, 0, 0, 994, 978, 1, 0, 0, 0, 994, 987, 1, 0, 0, 0, 995, 185, 1, 0, 0, 0, 996, 998, 3, 86, 35, 0, 997, 999, 3, 88, 36, 0, 998, 997, 1, 0, 0, 0, 999, 1000, 1, 0, 0, 0, 1000, 998, 1, 0, 0, 0, 1000, 1001, 1, 0, 0, 0, 1001, 1002, 1, 0, 0, 0, 1002, 1003, 3, 86, 35, 0, 1003, 187, 1, 0, 0, 0, 1004, 1005, 3, 186, 85, 0, 1005, 189, 1, 0, 0, 0, 1006, 1007, 3, 66, 25, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 87, 11, 0, 1009, 191, 1, 0, 0, 0, 1010, 1011, 3, 68, 26, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 88, 11, 0, 1013, 193, 1, 0, 0, 0, 1014, 1015, 3, 70, 27, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 89, 11, 0, 1017, 195, 1, 0, 0, 0, 1018, 1019, 3, 180, 82, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 90, 14, 0, 1021, 1022, 6, 90, 15, 0, 1022, 197, 1, 0, 0, 0, 1023, 1024, 3, 72, 28, 0, 1024, 1025, 1, 0, 0, 0, 1025, 1026, 6, 91, 16, 0, 1026, 1027, 6, 91, 12, 0, 1027, 199, 1, 0, 0, 0, 1028, 1029, 3, 70, 27, 0, 1029, 1030, 1, 0, 0, 0, 1030, 1031, 6, 92, 11, 0, 1031, 201, 1, 0, 0, 0, 1032, 1033, 3, 66, 25, 0, 1033, 1034, 1, 0, 0, 0, 1034, 1035, 6, 93, 11, 0, 1035, 203, 1, 0, 0, 0, 1036, 1037, 3, 68, 26, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1039, 6, 94, 11, 0, 1039, 205, 1, 0, 0, 0, 1040, 1041, 3, 72, 28, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 95, 16, 0, 1043, 1044, 6, 95, 12, 0, 1044, 207, 1, 0, 0, 0, 1045, 1046, 3, 180, 82, 0, 1046, 1047, 1, 0, 0, 0, 1047, 1048, 6, 96, 14, 0, 1048, 209, 1, 0, 0, 0, 1049, 1050, 3, 182, 83, 0, 1050, 1051, 1, 0, 0, 0, 1051, 1052, 6, 97, 17, 0, 1052, 211, 1, 0, 0, 0, 1053, 1054, 3, 110, 47, 0, 1054, 1055, 1, 0, 0, 0, 1055, 1056, 6, 98, 18, 0, 1056, 213, 1, 0, 0, 0, 1057, 1058, 3, 112, 48, 0, 1058, 1059, 1, 0, 0, 0, 1059, 1060, 6, 99, 19, 0, 1060, 215, 1, 0, 0, 0, 1061, 1062, 3, 106, 45, 0, 1062, 1063, 1, 0, 0, 0, 1063, 1064, 6, 100, 20, 0, 1064, 217, 1, 0, 0, 0, 1065, 1066, 7, 16, 0, 0, 1066, 1067, 7, 3, 0, 0, 1067, 1068, 7, 5, 0, 0, 1068, 1069, 7, 12, 0, 0, 1069, 1070, 7, 0, 0, 0, 1070, 1071, 7, 12, 0, 0, 1071, 1072, 7, 5, 0, 0, 1072, 1073, 7, 12, 0, 0, 1073, 219, 1, 0, 0, 0, 1074, 1078, 8, 33, 0, 0, 1075, 1076, 5, 47, 0, 0, 1076, 1078, 8, 34, 0, 0, 1077, 1074, 1, 0, 0, 0, 1077, 1075, 1, 0, 0, 0, 1078, 221, 1, 0, 0, 0, 1079, 1081, 3, 220, 102, 0, 1080, 1079, 1, 0, 0, 0, 1081, 1082, 1, 0, 0, 0, 1082, 1080, 1, 0, 0, 0, 1082, 1083, 1, 0, 0, 0, 1083, 223, 1, 0, 0, 0, 1084, 1085, 3, 222, 103, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1087, 6, 104, 21, 0, 1087, 225, 1, 0, 0, 0, 1088, 1089, 3, 94, 39, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 105, 22, 0, 1091, 227, 1, 0, 0, 0, 1092, 1093, 3, 66, 25, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 106, 11, 0, 1095, 229, 1, 0, 0, 0, 1096, 1097, 3, 68, 26, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 107, 11, 0, 1099, 231, 1, 0, 0, 0, 1100, 1101, 3, 70, 27, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 108, 11, 0, 1103, 233, 1, 0, 0, 0, 1104, 1105, 3, 72, 28, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1107, 6, 109, 16, 0, 1107, 1108, 6, 109, 12, 0, 1108, 235, 1, 0, 0, 0, 1109, 1110, 3, 116, 50, 0, 1110, 1111, 1, 0, 0, 0, 1111, 1112, 6, 110, 23, 0, 1112, 237, 1, 0, 0, 0, 1113, 1114, 3, 112, 48, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 111, 19, 0, 1116, 239, 1, 0, 0, 0, 1117, 1118, 4, 112, 10, 0, 1118, 1119, 3, 140, 62, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1121, 6, 112, 24, 0, 1121, 241, 1, 0, 0, 0, 1122, 1123, 4, 113, 11, 0, 1123, 1124, 3, 178, 81, 0, 1124, 1125, 1, 0, 0, 0, 1125, 1126, 6, 113, 25, 0, 1126, 243, 1, 0, 0, 0, 1127, 1132, 3, 76, 30, 0, 1128, 1132, 3, 74, 29, 0, 1129, 1132, 3, 90, 37, 0, 1130, 1132, 3, 166, 75, 0, 1131, 1127, 1, 0, 0, 0, 1131, 1128, 1, 0, 0, 0, 1131, 1129, 1, 0, 0, 0, 1131, 1130, 1, 0, 0, 0, 1132, 245, 1, 0, 0, 0, 1133, 1136, 3, 76, 30, 0, 1134, 1136, 3, 166, 75, 0, 1135, 1133, 1, 0, 0, 0, 1135, 1134, 1, 0, 0, 0, 1136, 1140, 1, 0, 0, 0, 1137, 1139, 3, 244, 114, 0, 1138, 1137, 1, 0, 0, 0, 1139, 1142, 1, 0, 0, 0, 1140, 1138, 1, 0, 0, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1153, 1, 0, 0, 0, 1142, 1140, 1, 0, 0, 0, 1143, 1146, 3, 90, 37, 0, 1144, 1146, 3, 84, 34, 0, 1145, 1143, 1, 0, 0, 0, 1145, 1144, 1, 0, 0, 0, 1146, 1148, 1, 0, 0, 0, 1147, 1149, 3, 244, 114, 0, 1148, 1147, 1, 0, 0, 0, 1149, 1150, 1, 0, 0, 0, 1150, 1148, 1, 0, 0, 0, 1150, 1151, 1, 0, 0, 0, 1151, 1153, 1, 0, 0, 0, 1152, 1135, 1, 0, 0, 0, 1152, 1145, 1, 0, 0, 0, 1153, 247, 1, 0, 0, 0, 1154, 1157, 3, 246, 115, 0, 1155, 1157, 3, 186, 85, 0, 1156, 1154, 1, 0, 0, 0, 1156, 1155, 1, 0, 0, 0, 1157, 1158, 1, 0, 0, 0, 1158, 1156, 1, 0, 0, 0, 1158, 1159, 1, 0, 0, 0, 1159, 249, 1, 0, 0, 0, 1160, 1161, 3, 66, 25, 0, 1161, 1162, 1, 0, 0, 0, 1162, 1163, 6, 117, 11, 0, 1163, 251, 1, 0, 0, 0, 1164, 1165, 3, 68, 26, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1167, 6, 118, 11, 0, 1167, 253, 1, 0, 0, 0, 1168, 1169, 3, 70, 27, 0, 1169, 1170, 1, 0, 0, 0, 1170, 1171, 6, 119, 11, 0, 1171, 255, 1, 0, 0, 0, 1172, 1173, 3, 72, 28, 0, 1173, 1174, 1, 0, 0, 0, 1174, 1175, 6, 120, 16, 0, 1175, 1176, 6, 120, 12, 0, 1176, 257, 1, 0, 0, 0, 1177, 1178, 3, 106, 45, 0, 1178, 1179, 1, 0, 0, 0, 1179, 1180, 6, 121, 20, 0, 1180, 259, 1, 0, 0, 0, 1181, 1182, 3, 112, 48, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1184, 6, 122, 19, 0, 1184, 261, 1, 0, 0, 0, 1185, 1186, 3, 116, 50, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 123, 23, 0, 1188, 263, 1, 0, 0, 0, 1189, 1190, 4, 124, 12, 0, 1190, 1191, 3, 140, 62, 0, 1191, 1192, 1, 0, 0, 0, 1192, 1193, 6, 124, 24, 0, 1193, 265, 1, 0, 0, 0, 1194, 1195, 4, 125, 13, 0, 1195, 1196, 3, 178, 81, 0, 1196, 1197, 1, 0, 0, 0, 1197, 1198, 6, 125, 25, 0, 1198, 267, 1, 0, 0, 0, 1199, 1200, 7, 12, 0, 0, 1200, 1201, 7, 2, 0, 0, 1201, 269, 1, 0, 0, 0, 1202, 1203, 3, 248, 116, 0, 1203, 1204, 1, 0, 0, 0, 1204, 1205, 6, 127, 26, 0, 1205, 271, 1, 0, 0, 0, 1206, 1207, 3, 66, 25, 0, 1207, 1208, 1, 0, 0, 0, 1208, 1209, 6, 128, 11, 0, 1209, 273, 1, 0, 0, 0, 1210, 1211, 3, 68, 26, 0, 1211, 1212, 1, 0, 0, 0, 1212, 1213, 6, 129, 11, 0, 1213, 275, 1, 0, 0, 0, 1214, 1215, 3, 70, 27, 0, 1215, 1216, 1, 0, 0, 0, 1216, 1217, 6, 130, 11, 0, 1217, 277, 1, 0, 0, 0, 1218, 1219, 3, 72, 28, 0, 1219, 1220, 1, 0, 0, 0, 1220, 1221, 6, 131, 16, 0, 1221, 1222, 6, 131, 12, 0, 1222, 279, 1, 0, 0, 0, 1223, 1224, 3, 180, 82, 0, 1224, 1225, 1, 0, 0, 0, 1225, 1226, 6, 132, 14, 0, 1226, 1227, 6, 132, 27, 0, 1227, 281, 1, 0, 0, 0, 1228, 1229, 7, 7, 0, 0, 1229, 1230, 7, 9, 0, 0, 1230, 1231, 1, 0, 0, 0, 1231, 1232, 6, 133, 28, 0, 1232, 283, 1, 0, 0, 0, 1233, 1234, 7, 19, 0, 0, 1234, 1235, 7, 1, 0, 0, 1235, 1236, 7, 5, 0, 0, 1236, 1237, 7, 10, 0, 0, 1237, 1238, 1, 0, 0, 0, 1238, 1239, 6, 134, 28, 0, 1239, 285, 1, 0, 0, 0, 1240, 1241, 8, 35, 0, 0, 1241, 287, 1, 0, 0, 0, 1242, 1244, 3, 286, 135, 0, 1243, 1242, 1, 0, 0, 0, 1244, 1245, 1, 0, 0, 0, 1245, 1243, 1, 0, 0, 0, 1245, 1246, 1, 0, 0, 0, 1246, 1247, 1, 0, 0, 0, 1247, 1248, 3, 110, 47, 0, 1248, 1250, 1, 0, 0, 0, 1249, 1243, 1, 0, 0, 0, 1249, 1250, 1, 0, 0, 0, 1250, 1252, 1, 0, 0, 0, 1251, 1253, 3, 286, 135, 0, 1252, 1251, 1, 0, 0, 0, 1253, 1254, 1, 0, 0, 0, 1254, 1252, 1, 0, 0, 0, 1254, 1255, 1, 0, 0, 0, 1255, 289, 1, 0, 0, 0, 1256, 1257, 3, 288, 136, 0, 1257, 1258, 1, 0, 0, 0, 1258, 1259, 6, 137, 29, 0, 1259, 291, 1, 0, 0, 0, 1260, 1261, 3, 66, 25, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 138, 11, 0, 1263, 293, 1, 0, 0, 0, 1264, 1265, 3, 68, 26, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 139, 11, 0, 1267, 295, 1, 0, 0, 0, 1268, 1269, 3, 70, 27, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 140, 11, 0, 1271, 297, 1, 0, 0, 0, 1272, 1273, 3, 72, 28, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 141, 16, 0, 1275, 1276, 6, 141, 12, 0, 1276, 1277, 6, 141, 12, 0, 1277, 299, 1, 0, 0, 0, 1278, 1279, 3, 106, 45, 0, 1279, 1280, 1, 0, 0, 0, 1280, 1281, 6, 142, 20, 0, 1281, 301, 1, 0, 0, 0, 1282, 1283, 3, 112, 48, 0, 1283, 1284, 1, 0, 0, 0, 1284, 1285, 6, 143, 19, 0, 1285, 303, 1, 0, 0, 0, 1286, 1287, 3, 116, 50, 0, 1287, 1288, 1, 0, 0, 0, 1288, 1289, 6, 144, 23, 0, 1289, 305, 1, 0, 0, 0, 1290, 1291, 3, 284, 134, 0, 1291, 1292, 1, 0, 0, 0, 1292, 1293, 6, 145, 30, 0, 1293, 307, 1, 0, 0, 0, 1294, 1295, 3, 248, 116, 0, 1295, 1296, 1, 0, 0, 0, 1296, 1297, 6, 146, 26, 0, 1297, 309, 1, 0, 0, 0, 1298, 1299, 3, 188, 86, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 147, 31, 0, 1301, 311, 1, 0, 0, 0, 1302, 1303, 4, 148, 14, 0, 1303, 1304, 3, 140, 62, 0, 1304, 1305, 1, 0, 0, 0, 1305, 1306, 6, 148, 24, 0, 1306, 313, 1, 0, 0, 0, 1307, 1308, 4, 149, 15, 0, 1308, 1309, 3, 178, 81, 0, 1309, 1310, 1, 0, 0, 0, 1310, 1311, 6, 149, 25, 0, 1311, 315, 1, 0, 0, 0, 1312, 1313, 3, 66, 25, 0, 1313, 1314, 1, 0, 0, 0, 1314, 1315, 6, 150, 11, 0, 1315, 317, 1, 0, 0, 0, 1316, 1317, 3, 68, 26, 0, 1317, 1318, 1, 0, 0, 0, 1318, 1319, 6, 151, 11, 0, 1319, 319, 1, 0, 0, 0, 1320, 1321, 3, 70, 27, 0, 1321, 1322, 1, 0, 0, 0, 1322, 1323, 6, 152, 11, 0, 1323, 321, 1, 0, 0, 0, 1324, 1325, 3, 72, 28, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 153, 16, 0, 1327, 1328, 6, 153, 12, 0, 1328, 323, 1, 0, 0, 0, 1329, 1330, 3, 116, 50, 0, 1330, 1331, 1, 0, 0, 0, 1331, 1332, 6, 154, 23, 0, 1332, 325, 1, 0, 0, 0, 1333, 1334, 4, 155, 16, 0, 1334, 1335, 3, 140, 62, 0, 1335, 1336, 1, 0, 0, 0, 1336, 1337, 6, 155, 24, 0, 1337, 327, 1, 0, 0, 0, 1338, 1339, 4, 156, 17, 0, 1339, 1340, 3, 178, 81, 0, 1340, 1341, 1, 0, 0, 0, 1341, 1342, 6, 156, 25, 0, 1342, 329, 1, 0, 0, 0, 1343, 1344, 3, 188, 86, 0, 1344, 1345, 1, 0, 0, 0, 1345, 1346, 6, 157, 31, 0, 1346, 331, 1, 0, 0, 0, 1347, 1348, 3, 184, 84, 0, 1348, 1349, 1, 0, 0, 0, 1349, 1350, 6, 158, 32, 0, 1350, 333, 1, 0, 0, 0, 1351, 1352, 3, 66, 25, 0, 1352, 1353, 1, 0, 0, 0, 1353, 1354, 6, 159, 11, 0, 1354, 335, 1, 0, 0, 0, 1355, 1356, 3, 68, 26, 0, 1356, 1357, 1, 0, 0, 0, 1357, 1358, 6, 160, 11, 0, 1358, 337, 1, 0, 0, 0, 1359, 1360, 3, 70, 27, 0, 1360, 1361, 1, 0, 0, 0, 1361, 1362, 6, 161, 11, 0, 1362, 339, 1, 0, 0, 0, 1363, 1364, 3, 72, 28, 0, 1364, 1365, 1, 0, 0, 0, 1365, 1366, 6, 162, 16, 0, 1366, 1367, 6, 162, 12, 0, 1367, 341, 1, 0, 0, 0, 1368, 1369, 7, 1, 0, 0, 1369, 1370, 7, 9, 0, 0, 1370, 1371, 7, 15, 0, 0, 1371, 1372, 7, 7, 0, 0, 1372, 343, 1, 0, 0, 0, 1373, 1374, 3, 66, 25, 0, 1374, 1375, 1, 0, 0, 0, 1375, 1376, 6, 164, 11, 0, 1376, 345, 1, 0, 0, 0, 1377, 1378, 3, 68, 26, 0, 1378, 1379, 1, 0, 0, 0, 1379, 1380, 6, 165, 11, 0, 1380, 347, 1, 0, 0, 0, 1381, 1382, 3, 70, 27, 0, 1382, 1383, 1, 0, 0, 0, 1383, 1384, 6, 166, 11, 0, 1384, 349, 1, 0, 0, 0, 1385, 1386, 3, 182, 83, 0, 1386, 1387, 1, 0, 0, 0, 1387, 1388, 6, 167, 17, 0, 1388, 1389, 6, 167, 12, 0, 1389, 351, 1, 0, 0, 0, 1390, 1391, 3, 110, 47, 0, 1391, 1392, 1, 0, 0, 0, 1392, 1393, 6, 168, 18, 0, 1393, 353, 1, 0, 0, 0, 1394, 1400, 3, 84, 34, 0, 1395, 1400, 3, 74, 29, 0, 1396, 1400, 3, 116, 50, 0, 1397, 1400, 3, 76, 30, 0, 1398, 1400, 3, 90, 37, 0, 1399, 1394, 1, 0, 0, 0, 1399, 1395, 1, 0, 0, 0, 1399, 1396, 1, 0, 0, 0, 1399, 1397, 1, 0, 0, 0, 1399, 1398, 1, 0, 0, 0, 1400, 1401, 1, 0, 0, 0, 1401, 1399, 1, 0, 0, 0, 1401, 1402, 1, 0, 0, 0, 1402, 355, 1, 0, 0, 0, 1403, 1404, 3, 66, 25, 0, 1404, 1405, 1, 0, 0, 0, 1405, 1406, 6, 170, 11, 0, 1406, 357, 1, 0, 0, 0, 1407, 1408, 3, 68, 26, 0, 1408, 1409, 1, 0, 0, 0, 1409, 1410, 6, 171, 11, 0, 1410, 359, 1, 0, 0, 0, 1411, 1412, 3, 70, 27, 0, 1412, 1413, 1, 0, 0, 0, 1413, 1414, 6, 172, 11, 0, 1414, 361, 1, 0, 0, 0, 1415, 1416, 3, 72, 28, 0, 1416, 1417, 1, 0, 0, 0, 1417, 1418, 6, 173, 16, 0, 1418, 1419, 6, 173, 12, 0, 1419, 363, 1, 0, 0, 0, 1420, 1421, 3, 110, 47, 0, 1421, 1422, 1, 0, 0, 0, 1422, 1423, 6, 174, 18, 0, 1423, 365, 1, 0, 0, 0, 1424, 1425, 3, 112, 48, 0, 1425, 1426, 1, 0, 0, 0, 1426, 1427, 6, 175, 19, 0, 1427, 367, 1, 0, 0, 0, 1428, 1429, 3, 116, 50, 0, 1429, 1430, 1, 0, 0, 0, 1430, 1431, 6, 176, 23, 0, 1431, 369, 1, 0, 0, 0, 1432, 1433, 3, 282, 133, 0, 1433, 1434, 1, 0, 0, 0, 1434, 1435, 6, 177, 33, 0, 1435, 1436, 6, 177, 34, 0, 1436, 371, 1, 0, 0, 0, 1437, 1438, 3, 222, 103, 0, 1438, 1439, 1, 0, 0, 0, 1439, 1440, 6, 178, 21, 0, 1440, 373, 1, 0, 0, 0, 1441, 1442, 3, 94, 39, 0, 1442, 1443, 1, 0, 0, 0, 1443, 1444, 6, 179, 22, 0, 1444, 375, 1, 0, 0, 0, 1445, 1446, 3, 66, 25, 0, 1446, 1447, 1, 0, 0, 0, 1447, 1448, 6, 180, 11, 0, 1448, 377, 1, 0, 0, 0, 1449, 1450, 3, 68, 26, 0, 1450, 1451, 1, 0, 0, 0, 1451, 1452, 6, 181, 11, 0, 1452, 379, 1, 0, 0, 0, 1453, 1454, 3, 70, 27, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 182, 11, 0, 1456, 381, 1, 0, 0, 0, 1457, 1458, 3, 72, 28, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 183, 16, 0, 1460, 1461, 6, 183, 12, 0, 1461, 1462, 6, 183, 12, 0, 1462, 383, 1, 0, 0, 0, 1463, 1464, 3, 112, 48, 0, 1464, 1465, 1, 0, 0, 0, 1465, 1466, 6, 184, 19, 0, 1466, 385, 1, 0, 0, 0, 1467, 1468, 3, 116, 50, 0, 1468, 1469, 1, 0, 0, 0, 1469, 1470, 6, 185, 23, 0, 1470, 387, 1, 0, 0, 0, 1471, 1472, 3, 248, 116, 0, 1472, 1473, 1, 0, 0, 0, 1473, 1474, 6, 186, 26, 0, 1474, 389, 1, 0, 0, 0, 1475, 1476, 3, 66, 25, 0, 1476, 1477, 1, 0, 0, 0, 1477, 1478, 6, 187, 11, 0, 1478, 391, 1, 0, 0, 0, 1479, 1480, 3, 68, 26, 0, 1480, 1481, 1, 0, 0, 0, 1481, 1482, 6, 188, 11, 0, 1482, 393, 1, 0, 0, 0, 1483, 1484, 3, 70, 27, 0, 1484, 1485, 1, 0, 0, 0, 1485, 1486, 6, 189, 11, 0, 1486, 395, 1, 0, 0, 0, 1487, 1488, 3, 72, 28, 0, 1488, 1489, 1, 0, 0, 0, 1489, 1490, 6, 190, 16, 0, 1490, 1491, 6, 190, 12, 0, 1491, 397, 1, 0, 0, 0, 1492, 1493, 3, 54, 19, 0, 1493, 1494, 1, 0, 0, 0, 1494, 1495, 6, 191, 35, 0, 1495, 399, 1, 0, 0, 0, 1496, 1497, 3, 268, 126, 0, 1497, 1498, 1, 0, 0, 0, 1498, 1499, 6, 192, 36, 0, 1499, 401, 1, 0, 0, 0, 1500, 1501, 3, 282, 133, 0, 1501, 1502, 1, 0, 0, 0, 1502, 1503, 6, 193, 33, 0, 1503, 1504, 6, 193, 12, 0, 1504, 1505, 6, 193, 0, 0, 1505, 403, 1, 0, 0, 0, 1506, 1507, 7, 20, 0, 0, 1507, 1508, 7, 2, 0, 0, 1508, 1509, 7, 1, 0, 0, 1509, 1510, 7, 9, 0, 0, 1510, 1511, 7, 17, 0, 0, 1511, 1512, 1, 0, 0, 0, 1512, 1513, 6, 194, 12, 0, 1513, 1514, 6, 194, 0, 0, 1514, 405, 1, 0, 0, 0, 1515, 1516, 3, 184, 84, 0, 1516, 1517, 1, 0, 0, 0, 1517, 1518, 6, 195, 32, 0, 1518, 407, 1, 0, 0, 0, 1519, 1520, 3, 188, 86, 0, 1520, 1521, 1, 0, 0, 0, 1521, 1522, 6, 196, 31, 0, 1522, 409, 1, 0, 0, 0, 1523, 1524, 3, 66, 25, 0, 1524, 1525, 1, 0, 0, 0, 1525, 1526, 6, 197, 11, 0, 1526, 411, 1, 0, 0, 0, 1527, 1528, 3, 68, 26, 0, 1528, 1529, 1, 0, 0, 0, 1529, 1530, 6, 198, 11, 0, 1530, 413, 1, 0, 0, 0, 1531, 1532, 3, 70, 27, 0, 1532, 1533, 1, 0, 0, 0, 1533, 1534, 6, 199, 11, 0, 1534, 415, 1, 0, 0, 0, 1535, 1536, 3, 72, 28, 0, 1536, 1537, 1, 0, 0, 0, 1537, 1538, 6, 200, 16, 0, 1538, 1539, 6, 200, 12, 0, 1539, 417, 1, 0, 0, 0, 1540, 1541, 3, 222, 103, 0, 1541, 1542, 1, 0, 0, 0, 1542, 1543, 6, 201, 21, 0, 1543, 1544, 6, 201, 12, 0, 1544, 1545, 6, 201, 37, 0, 1545, 419, 1, 0, 0, 0, 1546, 1547, 3, 94, 39, 0, 1547, 1548, 1, 0, 0, 0, 1548, 1549, 6, 202, 22, 0, 1549, 1550, 6, 202, 12, 0, 1550, 1551, 6, 202, 37, 0, 1551, 421, 1, 0, 0, 0, 1552, 1553, 3, 66, 25, 0, 1553, 1554, 1, 0, 0, 0, 1554, 1555, 6, 203, 11, 0, 1555, 423, 1, 0, 0, 0, 1556, 1557, 3, 68, 26, 0, 1557, 1558, 1, 0, 0, 0, 1558, 1559, 6, 204, 11, 0, 1559, 425, 1, 0, 0, 0, 1560, 1561, 3, 70, 27, 0, 1561, 1562, 1, 0, 0, 0, 1562, 1563, 6, 205, 11, 0, 1563, 427, 1, 0, 0, 0, 1564, 1565, 3, 110, 47, 0, 1565, 1566, 1, 0, 0, 0, 1566, 1567, 6, 206, 18, 0, 1567, 1568, 6, 206, 12, 0, 1568, 1569, 6, 206, 9, 0, 1569, 429, 1, 0, 0, 0, 1570, 1571, 3, 112, 48, 0, 1571, 1572, 1, 0, 0, 0, 1572, 1573, 6, 207, 19, 0, 1573, 1574, 6, 207, 12, 0, 1574, 1575, 6, 207, 9, 0, 1575, 431, 1, 0, 0, 0, 1576, 1577, 3, 66, 25, 0, 1577, 1578, 1, 0, 0, 0, 1578, 1579, 6, 208, 11, 0, 1579, 433, 1, 0, 0, 0, 1580, 1581, 3, 68, 26, 0, 1581, 1582, 1, 0, 0, 0, 1582, 1583, 6, 209, 11, 0, 1583, 435, 1, 0, 0, 0, 1584, 1585, 3, 70, 27, 0, 1585, 1586, 1, 0, 0, 0, 1586, 1587, 6, 210, 11, 0, 1587, 437, 1, 0, 0, 0, 1588, 1589, 3, 188, 86, 0, 1589, 1590, 1, 0, 0, 0, 1590, 1591, 6, 211, 12, 0, 1591, 1592, 6, 211, 0, 0, 1592, 1593, 6, 211, 31, 0, 1593, 439, 1, 0, 0, 0, 1594, 1595, 3, 184, 84, 0, 1595, 1596, 1, 0, 0, 0, 1596, 1597, 6, 212, 12, 0, 1597, 1598, 6, 212, 0, 0, 1598, 1599, 6, 212, 32, 0, 1599, 441, 1, 0, 0, 0, 1600, 1601, 3, 100, 42, 0, 1601, 1602, 1, 0, 0, 0, 1602, 1603, 6, 213, 12, 0, 1603, 1604, 6, 213, 0, 0, 1604, 1605, 6, 213, 38, 0, 1605, 443, 1, 0, 0, 0, 1606, 1607, 3, 72, 28, 0, 1607, 1608, 1, 0, 0, 0, 1608, 1609, 6, 214, 16, 0, 1609, 1610, 6, 214, 12, 0, 1610, 445, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 659, 669, 673, 676, 685, 687, 698, 717, 722, 731, 738, 743, 745, 756, 764, 767, 769, 774, 779, 785, 792, 797, 803, 806, 814, 818, 952, 957, 964, 966, 982, 987, 992, 994, 1000, 1077, 1082, 1131, 1135, 1140, 1145, 1150, 1152, 1156, 1158, 1245, 1249, 1254, 1399, 1401, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 72, 0, 5, 0, 0, 7, 29, 0, 7, 73, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 83, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 71, 0, 7, 87, 0, 5, 10, 0, 5, 7, 0, 7, 97, 0, 7, 96, 0, 7, 75, 0, 7, 74, 0, 7, 95, 0, 5, 12, 0, 7, 20, 0, 7, 91, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file +[4, 0, 130, 1629, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 2, 214, 7, 214, 2, 215, 7, 215, 2, 216, 7, 216, 2, 217, 7, 217, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 664, 8, 24, 11, 24, 12, 24, 665, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 674, 8, 25, 10, 25, 12, 25, 677, 9, 25, 1, 25, 3, 25, 680, 8, 25, 1, 25, 3, 25, 683, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 692, 8, 26, 10, 26, 12, 26, 695, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 703, 8, 27, 11, 27, 12, 27, 704, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 724, 8, 33, 1, 33, 4, 33, 727, 8, 33, 11, 33, 12, 33, 728, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 738, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 745, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 750, 8, 39, 10, 39, 12, 39, 753, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 761, 8, 39, 10, 39, 12, 39, 764, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 771, 8, 39, 1, 39, 3, 39, 774, 8, 39, 3, 39, 776, 8, 39, 1, 40, 4, 40, 779, 8, 40, 11, 40, 12, 40, 780, 1, 41, 4, 41, 784, 8, 41, 11, 41, 12, 41, 785, 1, 41, 1, 41, 5, 41, 790, 8, 41, 10, 41, 12, 41, 793, 9, 41, 1, 41, 1, 41, 4, 41, 797, 8, 41, 11, 41, 12, 41, 798, 1, 41, 4, 41, 802, 8, 41, 11, 41, 12, 41, 803, 1, 41, 1, 41, 5, 41, 808, 8, 41, 10, 41, 12, 41, 811, 9, 41, 3, 41, 813, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 819, 8, 41, 11, 41, 12, 41, 820, 1, 41, 1, 41, 3, 41, 825, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 3, 81, 959, 8, 81, 1, 81, 5, 81, 962, 8, 81, 10, 81, 12, 81, 965, 9, 81, 1, 81, 1, 81, 4, 81, 969, 8, 81, 11, 81, 12, 81, 970, 3, 81, 973, 8, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 5, 84, 987, 8, 84, 10, 84, 12, 84, 990, 9, 84, 1, 84, 1, 84, 3, 84, 994, 8, 84, 1, 84, 4, 84, 997, 8, 84, 11, 84, 12, 84, 998, 3, 84, 1001, 8, 84, 1, 85, 1, 85, 4, 85, 1005, 8, 85, 11, 85, 12, 85, 1006, 1, 85, 1, 85, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 3, 102, 1084, 8, 102, 1, 103, 4, 103, 1087, 8, 103, 11, 103, 12, 103, 1088, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 3, 114, 1138, 8, 114, 1, 115, 1, 115, 3, 115, 1142, 8, 115, 1, 115, 5, 115, 1145, 8, 115, 10, 115, 12, 115, 1148, 9, 115, 1, 115, 1, 115, 3, 115, 1152, 8, 115, 1, 115, 4, 115, 1155, 8, 115, 11, 115, 12, 115, 1156, 3, 115, 1159, 8, 115, 1, 116, 1, 116, 4, 116, 1163, 8, 116, 11, 116, 12, 116, 1164, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 136, 4, 136, 1250, 8, 136, 11, 136, 12, 136, 1251, 1, 136, 1, 136, 3, 136, 1256, 8, 136, 1, 136, 4, 136, 1259, 8, 136, 11, 136, 12, 136, 1260, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 4, 169, 1406, 8, 169, 11, 169, 12, 169, 1407, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 217, 1, 217, 1, 217, 1, 217, 1, 217, 2, 693, 762, 0, 218, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 69, 174, 70, 176, 0, 178, 71, 180, 72, 182, 73, 184, 74, 186, 0, 188, 75, 190, 76, 192, 77, 194, 78, 196, 0, 198, 0, 200, 79, 202, 80, 204, 81, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 0, 218, 82, 220, 0, 222, 83, 224, 0, 226, 0, 228, 84, 230, 85, 232, 86, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 0, 246, 0, 248, 87, 250, 88, 252, 89, 254, 90, 256, 0, 258, 0, 260, 0, 262, 0, 264, 0, 266, 0, 268, 91, 270, 0, 272, 92, 274, 93, 276, 94, 278, 0, 280, 0, 282, 95, 284, 96, 286, 0, 288, 97, 290, 0, 292, 98, 294, 99, 296, 100, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 0, 314, 0, 316, 101, 318, 102, 320, 103, 322, 0, 324, 0, 326, 0, 328, 0, 330, 0, 332, 0, 334, 104, 336, 105, 338, 106, 340, 0, 342, 107, 344, 108, 346, 109, 348, 110, 350, 0, 352, 0, 354, 111, 356, 112, 358, 113, 360, 114, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 0, 374, 0, 376, 115, 378, 116, 380, 117, 382, 0, 384, 0, 386, 0, 388, 0, 390, 118, 392, 119, 394, 120, 396, 0, 398, 0, 400, 0, 402, 0, 404, 121, 406, 0, 408, 0, 410, 0, 412, 0, 414, 0, 416, 122, 418, 123, 420, 124, 422, 0, 424, 0, 426, 0, 428, 125, 430, 126, 432, 127, 434, 0, 436, 0, 438, 128, 440, 129, 442, 130, 444, 0, 446, 0, 448, 0, 450, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1656, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 182, 1, 0, 0, 0, 1, 184, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 1, 192, 1, 0, 0, 0, 1, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 2, 202, 1, 0, 0, 0, 2, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 3, 230, 1, 0, 0, 0, 3, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 240, 1, 0, 0, 0, 4, 242, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 4, 252, 1, 0, 0, 0, 4, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 5, 274, 1, 0, 0, 0, 5, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 282, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 6, 294, 1, 0, 0, 0, 6, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 7, 318, 1, 0, 0, 0, 7, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 8, 336, 1, 0, 0, 0, 8, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 9, 346, 1, 0, 0, 0, 9, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 10, 358, 1, 0, 0, 0, 10, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 11, 378, 1, 0, 0, 0, 11, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 12, 392, 1, 0, 0, 0, 12, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 13, 412, 1, 0, 0, 0, 13, 414, 1, 0, 0, 0, 13, 416, 1, 0, 0, 0, 13, 418, 1, 0, 0, 0, 13, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 14, 424, 1, 0, 0, 0, 14, 426, 1, 0, 0, 0, 14, 428, 1, 0, 0, 0, 14, 430, 1, 0, 0, 0, 14, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 15, 442, 1, 0, 0, 0, 15, 444, 1, 0, 0, 0, 15, 446, 1, 0, 0, 0, 15, 448, 1, 0, 0, 0, 15, 450, 1, 0, 0, 0, 16, 452, 1, 0, 0, 0, 18, 462, 1, 0, 0, 0, 20, 469, 1, 0, 0, 0, 22, 478, 1, 0, 0, 0, 24, 485, 1, 0, 0, 0, 26, 495, 1, 0, 0, 0, 28, 502, 1, 0, 0, 0, 30, 509, 1, 0, 0, 0, 32, 516, 1, 0, 0, 0, 34, 524, 1, 0, 0, 0, 36, 536, 1, 0, 0, 0, 38, 545, 1, 0, 0, 0, 40, 551, 1, 0, 0, 0, 42, 558, 1, 0, 0, 0, 44, 565, 1, 0, 0, 0, 46, 573, 1, 0, 0, 0, 48, 581, 1, 0, 0, 0, 50, 596, 1, 0, 0, 0, 52, 608, 1, 0, 0, 0, 54, 619, 1, 0, 0, 0, 56, 627, 1, 0, 0, 0, 58, 635, 1, 0, 0, 0, 60, 643, 1, 0, 0, 0, 62, 652, 1, 0, 0, 0, 64, 663, 1, 0, 0, 0, 66, 669, 1, 0, 0, 0, 68, 686, 1, 0, 0, 0, 70, 702, 1, 0, 0, 0, 72, 708, 1, 0, 0, 0, 74, 712, 1, 0, 0, 0, 76, 714, 1, 0, 0, 0, 78, 716, 1, 0, 0, 0, 80, 719, 1, 0, 0, 0, 82, 721, 1, 0, 0, 0, 84, 730, 1, 0, 0, 0, 86, 732, 1, 0, 0, 0, 88, 737, 1, 0, 0, 0, 90, 739, 1, 0, 0, 0, 92, 744, 1, 0, 0, 0, 94, 775, 1, 0, 0, 0, 96, 778, 1, 0, 0, 0, 98, 824, 1, 0, 0, 0, 100, 826, 1, 0, 0, 0, 102, 829, 1, 0, 0, 0, 104, 833, 1, 0, 0, 0, 106, 837, 1, 0, 0, 0, 108, 839, 1, 0, 0, 0, 110, 842, 1, 0, 0, 0, 112, 844, 1, 0, 0, 0, 114, 846, 1, 0, 0, 0, 116, 851, 1, 0, 0, 0, 118, 853, 1, 0, 0, 0, 120, 859, 1, 0, 0, 0, 122, 865, 1, 0, 0, 0, 124, 868, 1, 0, 0, 0, 126, 871, 1, 0, 0, 0, 128, 876, 1, 0, 0, 0, 130, 881, 1, 0, 0, 0, 132, 883, 1, 0, 0, 0, 134, 887, 1, 0, 0, 0, 136, 892, 1, 0, 0, 0, 138, 898, 1, 0, 0, 0, 140, 901, 1, 0, 0, 0, 142, 903, 1, 0, 0, 0, 144, 909, 1, 0, 0, 0, 146, 911, 1, 0, 0, 0, 148, 916, 1, 0, 0, 0, 150, 919, 1, 0, 0, 0, 152, 922, 1, 0, 0, 0, 154, 925, 1, 0, 0, 0, 156, 927, 1, 0, 0, 0, 158, 930, 1, 0, 0, 0, 160, 932, 1, 0, 0, 0, 162, 935, 1, 0, 0, 0, 164, 937, 1, 0, 0, 0, 166, 939, 1, 0, 0, 0, 168, 941, 1, 0, 0, 0, 170, 943, 1, 0, 0, 0, 172, 945, 1, 0, 0, 0, 174, 948, 1, 0, 0, 0, 176, 951, 1, 0, 0, 0, 178, 972, 1, 0, 0, 0, 180, 974, 1, 0, 0, 0, 182, 979, 1, 0, 0, 0, 184, 1000, 1, 0, 0, 0, 186, 1002, 1, 0, 0, 0, 188, 1010, 1, 0, 0, 0, 190, 1012, 1, 0, 0, 0, 192, 1016, 1, 0, 0, 0, 194, 1020, 1, 0, 0, 0, 196, 1024, 1, 0, 0, 0, 198, 1029, 1, 0, 0, 0, 200, 1034, 1, 0, 0, 0, 202, 1038, 1, 0, 0, 0, 204, 1042, 1, 0, 0, 0, 206, 1046, 1, 0, 0, 0, 208, 1051, 1, 0, 0, 0, 210, 1055, 1, 0, 0, 0, 212, 1059, 1, 0, 0, 0, 214, 1063, 1, 0, 0, 0, 216, 1067, 1, 0, 0, 0, 218, 1071, 1, 0, 0, 0, 220, 1083, 1, 0, 0, 0, 222, 1086, 1, 0, 0, 0, 224, 1090, 1, 0, 0, 0, 226, 1094, 1, 0, 0, 0, 228, 1098, 1, 0, 0, 0, 230, 1102, 1, 0, 0, 0, 232, 1106, 1, 0, 0, 0, 234, 1110, 1, 0, 0, 0, 236, 1115, 1, 0, 0, 0, 238, 1119, 1, 0, 0, 0, 240, 1123, 1, 0, 0, 0, 242, 1128, 1, 0, 0, 0, 244, 1137, 1, 0, 0, 0, 246, 1158, 1, 0, 0, 0, 248, 1162, 1, 0, 0, 0, 250, 1166, 1, 0, 0, 0, 252, 1170, 1, 0, 0, 0, 254, 1174, 1, 0, 0, 0, 256, 1178, 1, 0, 0, 0, 258, 1183, 1, 0, 0, 0, 260, 1187, 1, 0, 0, 0, 262, 1191, 1, 0, 0, 0, 264, 1195, 1, 0, 0, 0, 266, 1200, 1, 0, 0, 0, 268, 1205, 1, 0, 0, 0, 270, 1208, 1, 0, 0, 0, 272, 1212, 1, 0, 0, 0, 274, 1216, 1, 0, 0, 0, 276, 1220, 1, 0, 0, 0, 278, 1224, 1, 0, 0, 0, 280, 1229, 1, 0, 0, 0, 282, 1234, 1, 0, 0, 0, 284, 1239, 1, 0, 0, 0, 286, 1246, 1, 0, 0, 0, 288, 1255, 1, 0, 0, 0, 290, 1262, 1, 0, 0, 0, 292, 1266, 1, 0, 0, 0, 294, 1270, 1, 0, 0, 0, 296, 1274, 1, 0, 0, 0, 298, 1278, 1, 0, 0, 0, 300, 1284, 1, 0, 0, 0, 302, 1288, 1, 0, 0, 0, 304, 1292, 1, 0, 0, 0, 306, 1296, 1, 0, 0, 0, 308, 1300, 1, 0, 0, 0, 310, 1304, 1, 0, 0, 0, 312, 1308, 1, 0, 0, 0, 314, 1313, 1, 0, 0, 0, 316, 1318, 1, 0, 0, 0, 318, 1322, 1, 0, 0, 0, 320, 1326, 1, 0, 0, 0, 322, 1330, 1, 0, 0, 0, 324, 1335, 1, 0, 0, 0, 326, 1339, 1, 0, 0, 0, 328, 1344, 1, 0, 0, 0, 330, 1349, 1, 0, 0, 0, 332, 1353, 1, 0, 0, 0, 334, 1357, 1, 0, 0, 0, 336, 1361, 1, 0, 0, 0, 338, 1365, 1, 0, 0, 0, 340, 1369, 1, 0, 0, 0, 342, 1374, 1, 0, 0, 0, 344, 1379, 1, 0, 0, 0, 346, 1383, 1, 0, 0, 0, 348, 1387, 1, 0, 0, 0, 350, 1391, 1, 0, 0, 0, 352, 1396, 1, 0, 0, 0, 354, 1405, 1, 0, 0, 0, 356, 1409, 1, 0, 0, 0, 358, 1413, 1, 0, 0, 0, 360, 1417, 1, 0, 0, 0, 362, 1421, 1, 0, 0, 0, 364, 1426, 1, 0, 0, 0, 366, 1430, 1, 0, 0, 0, 368, 1434, 1, 0, 0, 0, 370, 1438, 1, 0, 0, 0, 372, 1443, 1, 0, 0, 0, 374, 1447, 1, 0, 0, 0, 376, 1451, 1, 0, 0, 0, 378, 1455, 1, 0, 0, 0, 380, 1459, 1, 0, 0, 0, 382, 1463, 1, 0, 0, 0, 384, 1469, 1, 0, 0, 0, 386, 1473, 1, 0, 0, 0, 388, 1477, 1, 0, 0, 0, 390, 1481, 1, 0, 0, 0, 392, 1485, 1, 0, 0, 0, 394, 1489, 1, 0, 0, 0, 396, 1493, 1, 0, 0, 0, 398, 1498, 1, 0, 0, 0, 400, 1502, 1, 0, 0, 0, 402, 1506, 1, 0, 0, 0, 404, 1512, 1, 0, 0, 0, 406, 1521, 1, 0, 0, 0, 408, 1525, 1, 0, 0, 0, 410, 1529, 1, 0, 0, 0, 412, 1533, 1, 0, 0, 0, 414, 1537, 1, 0, 0, 0, 416, 1541, 1, 0, 0, 0, 418, 1545, 1, 0, 0, 0, 420, 1549, 1, 0, 0, 0, 422, 1553, 1, 0, 0, 0, 424, 1558, 1, 0, 0, 0, 426, 1564, 1, 0, 0, 0, 428, 1570, 1, 0, 0, 0, 430, 1574, 1, 0, 0, 0, 432, 1578, 1, 0, 0, 0, 434, 1582, 1, 0, 0, 0, 436, 1588, 1, 0, 0, 0, 438, 1594, 1, 0, 0, 0, 440, 1598, 1, 0, 0, 0, 442, 1602, 1, 0, 0, 0, 444, 1606, 1, 0, 0, 0, 446, 1612, 1, 0, 0, 0, 448, 1618, 1, 0, 0, 0, 450, 1624, 1, 0, 0, 0, 452, 453, 7, 0, 0, 0, 453, 454, 7, 1, 0, 0, 454, 455, 7, 2, 0, 0, 455, 456, 7, 2, 0, 0, 456, 457, 7, 3, 0, 0, 457, 458, 7, 4, 0, 0, 458, 459, 7, 5, 0, 0, 459, 460, 1, 0, 0, 0, 460, 461, 6, 0, 0, 0, 461, 17, 1, 0, 0, 0, 462, 463, 7, 0, 0, 0, 463, 464, 7, 6, 0, 0, 464, 465, 7, 7, 0, 0, 465, 466, 7, 8, 0, 0, 466, 467, 1, 0, 0, 0, 467, 468, 6, 1, 1, 0, 468, 19, 1, 0, 0, 0, 469, 470, 7, 3, 0, 0, 470, 471, 7, 9, 0, 0, 471, 472, 7, 6, 0, 0, 472, 473, 7, 1, 0, 0, 473, 474, 7, 4, 0, 0, 474, 475, 7, 10, 0, 0, 475, 476, 1, 0, 0, 0, 476, 477, 6, 2, 2, 0, 477, 21, 1, 0, 0, 0, 478, 479, 7, 3, 0, 0, 479, 480, 7, 11, 0, 0, 480, 481, 7, 12, 0, 0, 481, 482, 7, 13, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 3, 0, 0, 484, 23, 1, 0, 0, 0, 485, 486, 7, 3, 0, 0, 486, 487, 7, 14, 0, 0, 487, 488, 7, 8, 0, 0, 488, 489, 7, 13, 0, 0, 489, 490, 7, 12, 0, 0, 490, 491, 7, 1, 0, 0, 491, 492, 7, 9, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 4, 3, 0, 494, 25, 1, 0, 0, 0, 495, 496, 7, 15, 0, 0, 496, 497, 7, 6, 0, 0, 497, 498, 7, 7, 0, 0, 498, 499, 7, 16, 0, 0, 499, 500, 1, 0, 0, 0, 500, 501, 6, 5, 4, 0, 501, 27, 1, 0, 0, 0, 502, 503, 7, 17, 0, 0, 503, 504, 7, 6, 0, 0, 504, 505, 7, 7, 0, 0, 505, 506, 7, 18, 0, 0, 506, 507, 1, 0, 0, 0, 507, 508, 6, 6, 0, 0, 508, 29, 1, 0, 0, 0, 509, 510, 7, 18, 0, 0, 510, 511, 7, 3, 0, 0, 511, 512, 7, 3, 0, 0, 512, 513, 7, 8, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 6, 7, 1, 0, 515, 31, 1, 0, 0, 0, 516, 517, 7, 13, 0, 0, 517, 518, 7, 1, 0, 0, 518, 519, 7, 16, 0, 0, 519, 520, 7, 1, 0, 0, 520, 521, 7, 5, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 8, 0, 0, 523, 33, 1, 0, 0, 0, 524, 525, 7, 16, 0, 0, 525, 526, 7, 11, 0, 0, 526, 527, 5, 95, 0, 0, 527, 528, 7, 3, 0, 0, 528, 529, 7, 14, 0, 0, 529, 530, 7, 8, 0, 0, 530, 531, 7, 12, 0, 0, 531, 532, 7, 9, 0, 0, 532, 533, 7, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 535, 6, 9, 5, 0, 535, 35, 1, 0, 0, 0, 536, 537, 7, 6, 0, 0, 537, 538, 7, 3, 0, 0, 538, 539, 7, 9, 0, 0, 539, 540, 7, 12, 0, 0, 540, 541, 7, 16, 0, 0, 541, 542, 7, 3, 0, 0, 542, 543, 1, 0, 0, 0, 543, 544, 6, 10, 6, 0, 544, 37, 1, 0, 0, 0, 545, 546, 7, 6, 0, 0, 546, 547, 7, 7, 0, 0, 547, 548, 7, 19, 0, 0, 548, 549, 1, 0, 0, 0, 549, 550, 6, 11, 0, 0, 550, 39, 1, 0, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 7, 10, 0, 0, 553, 554, 7, 7, 0, 0, 554, 555, 7, 19, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 6, 12, 7, 0, 557, 41, 1, 0, 0, 0, 558, 559, 7, 2, 0, 0, 559, 560, 7, 7, 0, 0, 560, 561, 7, 6, 0, 0, 561, 562, 7, 5, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 13, 0, 0, 564, 43, 1, 0, 0, 0, 565, 566, 7, 2, 0, 0, 566, 567, 7, 5, 0, 0, 567, 568, 7, 12, 0, 0, 568, 569, 7, 5, 0, 0, 569, 570, 7, 2, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 6, 14, 0, 0, 572, 45, 1, 0, 0, 0, 573, 574, 7, 19, 0, 0, 574, 575, 7, 10, 0, 0, 575, 576, 7, 3, 0, 0, 576, 577, 7, 6, 0, 0, 577, 578, 7, 3, 0, 0, 578, 579, 1, 0, 0, 0, 579, 580, 6, 15, 0, 0, 580, 47, 1, 0, 0, 0, 581, 582, 4, 16, 0, 0, 582, 583, 7, 1, 0, 0, 583, 584, 7, 9, 0, 0, 584, 585, 7, 13, 0, 0, 585, 586, 7, 1, 0, 0, 586, 587, 7, 9, 0, 0, 587, 588, 7, 3, 0, 0, 588, 589, 7, 2, 0, 0, 589, 590, 7, 5, 0, 0, 590, 591, 7, 12, 0, 0, 591, 592, 7, 5, 0, 0, 592, 593, 7, 2, 0, 0, 593, 594, 1, 0, 0, 0, 594, 595, 6, 16, 0, 0, 595, 49, 1, 0, 0, 0, 596, 597, 4, 17, 1, 0, 597, 598, 7, 13, 0, 0, 598, 599, 7, 7, 0, 0, 599, 600, 7, 7, 0, 0, 600, 601, 7, 18, 0, 0, 601, 602, 7, 20, 0, 0, 602, 603, 7, 8, 0, 0, 603, 604, 5, 95, 0, 0, 604, 605, 5, 128020, 0, 0, 605, 606, 1, 0, 0, 0, 606, 607, 6, 17, 8, 0, 607, 51, 1, 0, 0, 0, 608, 609, 4, 18, 2, 0, 609, 610, 7, 16, 0, 0, 610, 611, 7, 3, 0, 0, 611, 612, 7, 5, 0, 0, 612, 613, 7, 6, 0, 0, 613, 614, 7, 1, 0, 0, 614, 615, 7, 4, 0, 0, 615, 616, 7, 2, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 6, 18, 9, 0, 618, 53, 1, 0, 0, 0, 619, 620, 4, 19, 3, 0, 620, 621, 7, 21, 0, 0, 621, 622, 7, 7, 0, 0, 622, 623, 7, 1, 0, 0, 623, 624, 7, 9, 0, 0, 624, 625, 1, 0, 0, 0, 625, 626, 6, 19, 10, 0, 626, 55, 1, 0, 0, 0, 627, 628, 4, 20, 4, 0, 628, 629, 7, 15, 0, 0, 629, 630, 7, 20, 0, 0, 630, 631, 7, 13, 0, 0, 631, 632, 7, 13, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 6, 20, 10, 0, 634, 57, 1, 0, 0, 0, 635, 636, 4, 21, 5, 0, 636, 637, 7, 13, 0, 0, 637, 638, 7, 3, 0, 0, 638, 639, 7, 15, 0, 0, 639, 640, 7, 5, 0, 0, 640, 641, 1, 0, 0, 0, 641, 642, 6, 21, 10, 0, 642, 59, 1, 0, 0, 0, 643, 644, 4, 22, 6, 0, 644, 645, 7, 6, 0, 0, 645, 646, 7, 1, 0, 0, 646, 647, 7, 17, 0, 0, 647, 648, 7, 10, 0, 0, 648, 649, 7, 5, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 6, 22, 10, 0, 651, 61, 1, 0, 0, 0, 652, 653, 4, 23, 7, 0, 653, 654, 7, 13, 0, 0, 654, 655, 7, 7, 0, 0, 655, 656, 7, 7, 0, 0, 656, 657, 7, 18, 0, 0, 657, 658, 7, 20, 0, 0, 658, 659, 7, 8, 0, 0, 659, 660, 1, 0, 0, 0, 660, 661, 6, 23, 10, 0, 661, 63, 1, 0, 0, 0, 662, 664, 8, 22, 0, 0, 663, 662, 1, 0, 0, 0, 664, 665, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 667, 1, 0, 0, 0, 667, 668, 6, 24, 0, 0, 668, 65, 1, 0, 0, 0, 669, 670, 5, 47, 0, 0, 670, 671, 5, 47, 0, 0, 671, 675, 1, 0, 0, 0, 672, 674, 8, 23, 0, 0, 673, 672, 1, 0, 0, 0, 674, 677, 1, 0, 0, 0, 675, 673, 1, 0, 0, 0, 675, 676, 1, 0, 0, 0, 676, 679, 1, 0, 0, 0, 677, 675, 1, 0, 0, 0, 678, 680, 5, 13, 0, 0, 679, 678, 1, 0, 0, 0, 679, 680, 1, 0, 0, 0, 680, 682, 1, 0, 0, 0, 681, 683, 5, 10, 0, 0, 682, 681, 1, 0, 0, 0, 682, 683, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 684, 685, 6, 25, 11, 0, 685, 67, 1, 0, 0, 0, 686, 687, 5, 47, 0, 0, 687, 688, 5, 42, 0, 0, 688, 693, 1, 0, 0, 0, 689, 692, 3, 68, 26, 0, 690, 692, 9, 0, 0, 0, 691, 689, 1, 0, 0, 0, 691, 690, 1, 0, 0, 0, 692, 695, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 693, 691, 1, 0, 0, 0, 694, 696, 1, 0, 0, 0, 695, 693, 1, 0, 0, 0, 696, 697, 5, 42, 0, 0, 697, 698, 5, 47, 0, 0, 698, 699, 1, 0, 0, 0, 699, 700, 6, 26, 11, 0, 700, 69, 1, 0, 0, 0, 701, 703, 7, 24, 0, 0, 702, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 702, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 707, 6, 27, 11, 0, 707, 71, 1, 0, 0, 0, 708, 709, 5, 124, 0, 0, 709, 710, 1, 0, 0, 0, 710, 711, 6, 28, 12, 0, 711, 73, 1, 0, 0, 0, 712, 713, 7, 25, 0, 0, 713, 75, 1, 0, 0, 0, 714, 715, 7, 26, 0, 0, 715, 77, 1, 0, 0, 0, 716, 717, 5, 92, 0, 0, 717, 718, 7, 27, 0, 0, 718, 79, 1, 0, 0, 0, 719, 720, 8, 28, 0, 0, 720, 81, 1, 0, 0, 0, 721, 723, 7, 3, 0, 0, 722, 724, 7, 29, 0, 0, 723, 722, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 726, 1, 0, 0, 0, 725, 727, 3, 74, 29, 0, 726, 725, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 726, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 83, 1, 0, 0, 0, 730, 731, 5, 64, 0, 0, 731, 85, 1, 0, 0, 0, 732, 733, 5, 96, 0, 0, 733, 87, 1, 0, 0, 0, 734, 738, 8, 30, 0, 0, 735, 736, 5, 96, 0, 0, 736, 738, 5, 96, 0, 0, 737, 734, 1, 0, 0, 0, 737, 735, 1, 0, 0, 0, 738, 89, 1, 0, 0, 0, 739, 740, 5, 95, 0, 0, 740, 91, 1, 0, 0, 0, 741, 745, 3, 76, 30, 0, 742, 745, 3, 74, 29, 0, 743, 745, 3, 90, 37, 0, 744, 741, 1, 0, 0, 0, 744, 742, 1, 0, 0, 0, 744, 743, 1, 0, 0, 0, 745, 93, 1, 0, 0, 0, 746, 751, 5, 34, 0, 0, 747, 750, 3, 78, 31, 0, 748, 750, 3, 80, 32, 0, 749, 747, 1, 0, 0, 0, 749, 748, 1, 0, 0, 0, 750, 753, 1, 0, 0, 0, 751, 749, 1, 0, 0, 0, 751, 752, 1, 0, 0, 0, 752, 754, 1, 0, 0, 0, 753, 751, 1, 0, 0, 0, 754, 776, 5, 34, 0, 0, 755, 756, 5, 34, 0, 0, 756, 757, 5, 34, 0, 0, 757, 758, 5, 34, 0, 0, 758, 762, 1, 0, 0, 0, 759, 761, 8, 23, 0, 0, 760, 759, 1, 0, 0, 0, 761, 764, 1, 0, 0, 0, 762, 763, 1, 0, 0, 0, 762, 760, 1, 0, 0, 0, 763, 765, 1, 0, 0, 0, 764, 762, 1, 0, 0, 0, 765, 766, 5, 34, 0, 0, 766, 767, 5, 34, 0, 0, 767, 768, 5, 34, 0, 0, 768, 770, 1, 0, 0, 0, 769, 771, 5, 34, 0, 0, 770, 769, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 773, 1, 0, 0, 0, 772, 774, 5, 34, 0, 0, 773, 772, 1, 0, 0, 0, 773, 774, 1, 0, 0, 0, 774, 776, 1, 0, 0, 0, 775, 746, 1, 0, 0, 0, 775, 755, 1, 0, 0, 0, 776, 95, 1, 0, 0, 0, 777, 779, 3, 74, 29, 0, 778, 777, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 778, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 97, 1, 0, 0, 0, 782, 784, 3, 74, 29, 0, 783, 782, 1, 0, 0, 0, 784, 785, 1, 0, 0, 0, 785, 783, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 787, 1, 0, 0, 0, 787, 791, 3, 116, 50, 0, 788, 790, 3, 74, 29, 0, 789, 788, 1, 0, 0, 0, 790, 793, 1, 0, 0, 0, 791, 789, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 825, 1, 0, 0, 0, 793, 791, 1, 0, 0, 0, 794, 796, 3, 116, 50, 0, 795, 797, 3, 74, 29, 0, 796, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 796, 1, 0, 0, 0, 798, 799, 1, 0, 0, 0, 799, 825, 1, 0, 0, 0, 800, 802, 3, 74, 29, 0, 801, 800, 1, 0, 0, 0, 802, 803, 1, 0, 0, 0, 803, 801, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 812, 1, 0, 0, 0, 805, 809, 3, 116, 50, 0, 806, 808, 3, 74, 29, 0, 807, 806, 1, 0, 0, 0, 808, 811, 1, 0, 0, 0, 809, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 813, 1, 0, 0, 0, 811, 809, 1, 0, 0, 0, 812, 805, 1, 0, 0, 0, 812, 813, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 815, 3, 82, 33, 0, 815, 825, 1, 0, 0, 0, 816, 818, 3, 116, 50, 0, 817, 819, 3, 74, 29, 0, 818, 817, 1, 0, 0, 0, 819, 820, 1, 0, 0, 0, 820, 818, 1, 0, 0, 0, 820, 821, 1, 0, 0, 0, 821, 822, 1, 0, 0, 0, 822, 823, 3, 82, 33, 0, 823, 825, 1, 0, 0, 0, 824, 783, 1, 0, 0, 0, 824, 794, 1, 0, 0, 0, 824, 801, 1, 0, 0, 0, 824, 816, 1, 0, 0, 0, 825, 99, 1, 0, 0, 0, 826, 827, 7, 31, 0, 0, 827, 828, 7, 32, 0, 0, 828, 101, 1, 0, 0, 0, 829, 830, 7, 12, 0, 0, 830, 831, 7, 9, 0, 0, 831, 832, 7, 0, 0, 0, 832, 103, 1, 0, 0, 0, 833, 834, 7, 12, 0, 0, 834, 835, 7, 2, 0, 0, 835, 836, 7, 4, 0, 0, 836, 105, 1, 0, 0, 0, 837, 838, 5, 61, 0, 0, 838, 107, 1, 0, 0, 0, 839, 840, 5, 58, 0, 0, 840, 841, 5, 58, 0, 0, 841, 109, 1, 0, 0, 0, 842, 843, 5, 58, 0, 0, 843, 111, 1, 0, 0, 0, 844, 845, 5, 44, 0, 0, 845, 113, 1, 0, 0, 0, 846, 847, 7, 0, 0, 0, 847, 848, 7, 3, 0, 0, 848, 849, 7, 2, 0, 0, 849, 850, 7, 4, 0, 0, 850, 115, 1, 0, 0, 0, 851, 852, 5, 46, 0, 0, 852, 117, 1, 0, 0, 0, 853, 854, 7, 15, 0, 0, 854, 855, 7, 12, 0, 0, 855, 856, 7, 13, 0, 0, 856, 857, 7, 2, 0, 0, 857, 858, 7, 3, 0, 0, 858, 119, 1, 0, 0, 0, 859, 860, 7, 15, 0, 0, 860, 861, 7, 1, 0, 0, 861, 862, 7, 6, 0, 0, 862, 863, 7, 2, 0, 0, 863, 864, 7, 5, 0, 0, 864, 121, 1, 0, 0, 0, 865, 866, 7, 1, 0, 0, 866, 867, 7, 9, 0, 0, 867, 123, 1, 0, 0, 0, 868, 869, 7, 1, 0, 0, 869, 870, 7, 2, 0, 0, 870, 125, 1, 0, 0, 0, 871, 872, 7, 13, 0, 0, 872, 873, 7, 12, 0, 0, 873, 874, 7, 2, 0, 0, 874, 875, 7, 5, 0, 0, 875, 127, 1, 0, 0, 0, 876, 877, 7, 13, 0, 0, 877, 878, 7, 1, 0, 0, 878, 879, 7, 18, 0, 0, 879, 880, 7, 3, 0, 0, 880, 129, 1, 0, 0, 0, 881, 882, 5, 40, 0, 0, 882, 131, 1, 0, 0, 0, 883, 884, 7, 9, 0, 0, 884, 885, 7, 7, 0, 0, 885, 886, 7, 5, 0, 0, 886, 133, 1, 0, 0, 0, 887, 888, 7, 9, 0, 0, 888, 889, 7, 20, 0, 0, 889, 890, 7, 13, 0, 0, 890, 891, 7, 13, 0, 0, 891, 135, 1, 0, 0, 0, 892, 893, 7, 9, 0, 0, 893, 894, 7, 20, 0, 0, 894, 895, 7, 13, 0, 0, 895, 896, 7, 13, 0, 0, 896, 897, 7, 2, 0, 0, 897, 137, 1, 0, 0, 0, 898, 899, 7, 7, 0, 0, 899, 900, 7, 6, 0, 0, 900, 139, 1, 0, 0, 0, 901, 902, 5, 63, 0, 0, 902, 141, 1, 0, 0, 0, 903, 904, 7, 6, 0, 0, 904, 905, 7, 13, 0, 0, 905, 906, 7, 1, 0, 0, 906, 907, 7, 18, 0, 0, 907, 908, 7, 3, 0, 0, 908, 143, 1, 0, 0, 0, 909, 910, 5, 41, 0, 0, 910, 145, 1, 0, 0, 0, 911, 912, 7, 5, 0, 0, 912, 913, 7, 6, 0, 0, 913, 914, 7, 20, 0, 0, 914, 915, 7, 3, 0, 0, 915, 147, 1, 0, 0, 0, 916, 917, 5, 61, 0, 0, 917, 918, 5, 61, 0, 0, 918, 149, 1, 0, 0, 0, 919, 920, 5, 61, 0, 0, 920, 921, 5, 126, 0, 0, 921, 151, 1, 0, 0, 0, 922, 923, 5, 33, 0, 0, 923, 924, 5, 61, 0, 0, 924, 153, 1, 0, 0, 0, 925, 926, 5, 60, 0, 0, 926, 155, 1, 0, 0, 0, 927, 928, 5, 60, 0, 0, 928, 929, 5, 61, 0, 0, 929, 157, 1, 0, 0, 0, 930, 931, 5, 62, 0, 0, 931, 159, 1, 0, 0, 0, 932, 933, 5, 62, 0, 0, 933, 934, 5, 61, 0, 0, 934, 161, 1, 0, 0, 0, 935, 936, 5, 43, 0, 0, 936, 163, 1, 0, 0, 0, 937, 938, 5, 45, 0, 0, 938, 165, 1, 0, 0, 0, 939, 940, 5, 42, 0, 0, 940, 167, 1, 0, 0, 0, 941, 942, 5, 47, 0, 0, 942, 169, 1, 0, 0, 0, 943, 944, 5, 37, 0, 0, 944, 171, 1, 0, 0, 0, 945, 946, 4, 78, 8, 0, 946, 947, 5, 123, 0, 0, 947, 173, 1, 0, 0, 0, 948, 949, 4, 79, 9, 0, 949, 950, 5, 125, 0, 0, 950, 175, 1, 0, 0, 0, 951, 952, 3, 46, 15, 0, 952, 953, 1, 0, 0, 0, 953, 954, 6, 80, 13, 0, 954, 177, 1, 0, 0, 0, 955, 958, 3, 140, 62, 0, 956, 959, 3, 76, 30, 0, 957, 959, 3, 90, 37, 0, 958, 956, 1, 0, 0, 0, 958, 957, 1, 0, 0, 0, 959, 963, 1, 0, 0, 0, 960, 962, 3, 92, 38, 0, 961, 960, 1, 0, 0, 0, 962, 965, 1, 0, 0, 0, 963, 961, 1, 0, 0, 0, 963, 964, 1, 0, 0, 0, 964, 973, 1, 0, 0, 0, 965, 963, 1, 0, 0, 0, 966, 968, 3, 140, 62, 0, 967, 969, 3, 74, 29, 0, 968, 967, 1, 0, 0, 0, 969, 970, 1, 0, 0, 0, 970, 968, 1, 0, 0, 0, 970, 971, 1, 0, 0, 0, 971, 973, 1, 0, 0, 0, 972, 955, 1, 0, 0, 0, 972, 966, 1, 0, 0, 0, 973, 179, 1, 0, 0, 0, 974, 975, 5, 91, 0, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 82, 0, 0, 977, 978, 6, 82, 0, 0, 978, 181, 1, 0, 0, 0, 979, 980, 5, 93, 0, 0, 980, 981, 1, 0, 0, 0, 981, 982, 6, 83, 12, 0, 982, 983, 6, 83, 12, 0, 983, 183, 1, 0, 0, 0, 984, 988, 3, 76, 30, 0, 985, 987, 3, 92, 38, 0, 986, 985, 1, 0, 0, 0, 987, 990, 1, 0, 0, 0, 988, 986, 1, 0, 0, 0, 988, 989, 1, 0, 0, 0, 989, 1001, 1, 0, 0, 0, 990, 988, 1, 0, 0, 0, 991, 994, 3, 90, 37, 0, 992, 994, 3, 84, 34, 0, 993, 991, 1, 0, 0, 0, 993, 992, 1, 0, 0, 0, 994, 996, 1, 0, 0, 0, 995, 997, 3, 92, 38, 0, 996, 995, 1, 0, 0, 0, 997, 998, 1, 0, 0, 0, 998, 996, 1, 0, 0, 0, 998, 999, 1, 0, 0, 0, 999, 1001, 1, 0, 0, 0, 1000, 984, 1, 0, 0, 0, 1000, 993, 1, 0, 0, 0, 1001, 185, 1, 0, 0, 0, 1002, 1004, 3, 86, 35, 0, 1003, 1005, 3, 88, 36, 0, 1004, 1003, 1, 0, 0, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1004, 1, 0, 0, 0, 1006, 1007, 1, 0, 0, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 3, 86, 35, 0, 1009, 187, 1, 0, 0, 0, 1010, 1011, 3, 186, 85, 0, 1011, 189, 1, 0, 0, 0, 1012, 1013, 3, 66, 25, 0, 1013, 1014, 1, 0, 0, 0, 1014, 1015, 6, 87, 11, 0, 1015, 191, 1, 0, 0, 0, 1016, 1017, 3, 68, 26, 0, 1017, 1018, 1, 0, 0, 0, 1018, 1019, 6, 88, 11, 0, 1019, 193, 1, 0, 0, 0, 1020, 1021, 3, 70, 27, 0, 1021, 1022, 1, 0, 0, 0, 1022, 1023, 6, 89, 11, 0, 1023, 195, 1, 0, 0, 0, 1024, 1025, 3, 180, 82, 0, 1025, 1026, 1, 0, 0, 0, 1026, 1027, 6, 90, 14, 0, 1027, 1028, 6, 90, 15, 0, 1028, 197, 1, 0, 0, 0, 1029, 1030, 3, 72, 28, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 91, 16, 0, 1032, 1033, 6, 91, 12, 0, 1033, 199, 1, 0, 0, 0, 1034, 1035, 3, 70, 27, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 92, 11, 0, 1037, 201, 1, 0, 0, 0, 1038, 1039, 3, 66, 25, 0, 1039, 1040, 1, 0, 0, 0, 1040, 1041, 6, 93, 11, 0, 1041, 203, 1, 0, 0, 0, 1042, 1043, 3, 68, 26, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 94, 11, 0, 1045, 205, 1, 0, 0, 0, 1046, 1047, 3, 72, 28, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 95, 16, 0, 1049, 1050, 6, 95, 12, 0, 1050, 207, 1, 0, 0, 0, 1051, 1052, 3, 180, 82, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1054, 6, 96, 14, 0, 1054, 209, 1, 0, 0, 0, 1055, 1056, 3, 182, 83, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 97, 17, 0, 1058, 211, 1, 0, 0, 0, 1059, 1060, 3, 110, 47, 0, 1060, 1061, 1, 0, 0, 0, 1061, 1062, 6, 98, 18, 0, 1062, 213, 1, 0, 0, 0, 1063, 1064, 3, 112, 48, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 6, 99, 19, 0, 1066, 215, 1, 0, 0, 0, 1067, 1068, 3, 106, 45, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1070, 6, 100, 20, 0, 1070, 217, 1, 0, 0, 0, 1071, 1072, 7, 16, 0, 0, 1072, 1073, 7, 3, 0, 0, 1073, 1074, 7, 5, 0, 0, 1074, 1075, 7, 12, 0, 0, 1075, 1076, 7, 0, 0, 0, 1076, 1077, 7, 12, 0, 0, 1077, 1078, 7, 5, 0, 0, 1078, 1079, 7, 12, 0, 0, 1079, 219, 1, 0, 0, 0, 1080, 1084, 8, 33, 0, 0, 1081, 1082, 5, 47, 0, 0, 1082, 1084, 8, 34, 0, 0, 1083, 1080, 1, 0, 0, 0, 1083, 1081, 1, 0, 0, 0, 1084, 221, 1, 0, 0, 0, 1085, 1087, 3, 220, 102, 0, 1086, 1085, 1, 0, 0, 0, 1087, 1088, 1, 0, 0, 0, 1088, 1086, 1, 0, 0, 0, 1088, 1089, 1, 0, 0, 0, 1089, 223, 1, 0, 0, 0, 1090, 1091, 3, 222, 103, 0, 1091, 1092, 1, 0, 0, 0, 1092, 1093, 6, 104, 21, 0, 1093, 225, 1, 0, 0, 0, 1094, 1095, 3, 94, 39, 0, 1095, 1096, 1, 0, 0, 0, 1096, 1097, 6, 105, 22, 0, 1097, 227, 1, 0, 0, 0, 1098, 1099, 3, 66, 25, 0, 1099, 1100, 1, 0, 0, 0, 1100, 1101, 6, 106, 11, 0, 1101, 229, 1, 0, 0, 0, 1102, 1103, 3, 68, 26, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 107, 11, 0, 1105, 231, 1, 0, 0, 0, 1106, 1107, 3, 70, 27, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 108, 11, 0, 1109, 233, 1, 0, 0, 0, 1110, 1111, 3, 72, 28, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 109, 16, 0, 1113, 1114, 6, 109, 12, 0, 1114, 235, 1, 0, 0, 0, 1115, 1116, 3, 116, 50, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1118, 6, 110, 23, 0, 1118, 237, 1, 0, 0, 0, 1119, 1120, 3, 112, 48, 0, 1120, 1121, 1, 0, 0, 0, 1121, 1122, 6, 111, 19, 0, 1122, 239, 1, 0, 0, 0, 1123, 1124, 4, 112, 10, 0, 1124, 1125, 3, 140, 62, 0, 1125, 1126, 1, 0, 0, 0, 1126, 1127, 6, 112, 24, 0, 1127, 241, 1, 0, 0, 0, 1128, 1129, 4, 113, 11, 0, 1129, 1130, 3, 178, 81, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1132, 6, 113, 25, 0, 1132, 243, 1, 0, 0, 0, 1133, 1138, 3, 76, 30, 0, 1134, 1138, 3, 74, 29, 0, 1135, 1138, 3, 90, 37, 0, 1136, 1138, 3, 166, 75, 0, 1137, 1133, 1, 0, 0, 0, 1137, 1134, 1, 0, 0, 0, 1137, 1135, 1, 0, 0, 0, 1137, 1136, 1, 0, 0, 0, 1138, 245, 1, 0, 0, 0, 1139, 1142, 3, 76, 30, 0, 1140, 1142, 3, 166, 75, 0, 1141, 1139, 1, 0, 0, 0, 1141, 1140, 1, 0, 0, 0, 1142, 1146, 1, 0, 0, 0, 1143, 1145, 3, 244, 114, 0, 1144, 1143, 1, 0, 0, 0, 1145, 1148, 1, 0, 0, 0, 1146, 1144, 1, 0, 0, 0, 1146, 1147, 1, 0, 0, 0, 1147, 1159, 1, 0, 0, 0, 1148, 1146, 1, 0, 0, 0, 1149, 1152, 3, 90, 37, 0, 1150, 1152, 3, 84, 34, 0, 1151, 1149, 1, 0, 0, 0, 1151, 1150, 1, 0, 0, 0, 1152, 1154, 1, 0, 0, 0, 1153, 1155, 3, 244, 114, 0, 1154, 1153, 1, 0, 0, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1154, 1, 0, 0, 0, 1156, 1157, 1, 0, 0, 0, 1157, 1159, 1, 0, 0, 0, 1158, 1141, 1, 0, 0, 0, 1158, 1151, 1, 0, 0, 0, 1159, 247, 1, 0, 0, 0, 1160, 1163, 3, 246, 115, 0, 1161, 1163, 3, 186, 85, 0, 1162, 1160, 1, 0, 0, 0, 1162, 1161, 1, 0, 0, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1162, 1, 0, 0, 0, 1164, 1165, 1, 0, 0, 0, 1165, 249, 1, 0, 0, 0, 1166, 1167, 3, 66, 25, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1169, 6, 117, 11, 0, 1169, 251, 1, 0, 0, 0, 1170, 1171, 3, 68, 26, 0, 1171, 1172, 1, 0, 0, 0, 1172, 1173, 6, 118, 11, 0, 1173, 253, 1, 0, 0, 0, 1174, 1175, 3, 70, 27, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 119, 11, 0, 1177, 255, 1, 0, 0, 0, 1178, 1179, 3, 72, 28, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 120, 16, 0, 1181, 1182, 6, 120, 12, 0, 1182, 257, 1, 0, 0, 0, 1183, 1184, 3, 106, 45, 0, 1184, 1185, 1, 0, 0, 0, 1185, 1186, 6, 121, 20, 0, 1186, 259, 1, 0, 0, 0, 1187, 1188, 3, 112, 48, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1190, 6, 122, 19, 0, 1190, 261, 1, 0, 0, 0, 1191, 1192, 3, 116, 50, 0, 1192, 1193, 1, 0, 0, 0, 1193, 1194, 6, 123, 23, 0, 1194, 263, 1, 0, 0, 0, 1195, 1196, 4, 124, 12, 0, 1196, 1197, 3, 140, 62, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 124, 24, 0, 1199, 265, 1, 0, 0, 0, 1200, 1201, 4, 125, 13, 0, 1201, 1202, 3, 178, 81, 0, 1202, 1203, 1, 0, 0, 0, 1203, 1204, 6, 125, 25, 0, 1204, 267, 1, 0, 0, 0, 1205, 1206, 7, 12, 0, 0, 1206, 1207, 7, 2, 0, 0, 1207, 269, 1, 0, 0, 0, 1208, 1209, 3, 248, 116, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 6, 127, 26, 0, 1211, 271, 1, 0, 0, 0, 1212, 1213, 3, 66, 25, 0, 1213, 1214, 1, 0, 0, 0, 1214, 1215, 6, 128, 11, 0, 1215, 273, 1, 0, 0, 0, 1216, 1217, 3, 68, 26, 0, 1217, 1218, 1, 0, 0, 0, 1218, 1219, 6, 129, 11, 0, 1219, 275, 1, 0, 0, 0, 1220, 1221, 3, 70, 27, 0, 1221, 1222, 1, 0, 0, 0, 1222, 1223, 6, 130, 11, 0, 1223, 277, 1, 0, 0, 0, 1224, 1225, 3, 72, 28, 0, 1225, 1226, 1, 0, 0, 0, 1226, 1227, 6, 131, 16, 0, 1227, 1228, 6, 131, 12, 0, 1228, 279, 1, 0, 0, 0, 1229, 1230, 3, 180, 82, 0, 1230, 1231, 1, 0, 0, 0, 1231, 1232, 6, 132, 14, 0, 1232, 1233, 6, 132, 27, 0, 1233, 281, 1, 0, 0, 0, 1234, 1235, 7, 7, 0, 0, 1235, 1236, 7, 9, 0, 0, 1236, 1237, 1, 0, 0, 0, 1237, 1238, 6, 133, 28, 0, 1238, 283, 1, 0, 0, 0, 1239, 1240, 7, 19, 0, 0, 1240, 1241, 7, 1, 0, 0, 1241, 1242, 7, 5, 0, 0, 1242, 1243, 7, 10, 0, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1245, 6, 134, 28, 0, 1245, 285, 1, 0, 0, 0, 1246, 1247, 8, 35, 0, 0, 1247, 287, 1, 0, 0, 0, 1248, 1250, 3, 286, 135, 0, 1249, 1248, 1, 0, 0, 0, 1250, 1251, 1, 0, 0, 0, 1251, 1249, 1, 0, 0, 0, 1251, 1252, 1, 0, 0, 0, 1252, 1253, 1, 0, 0, 0, 1253, 1254, 3, 110, 47, 0, 1254, 1256, 1, 0, 0, 0, 1255, 1249, 1, 0, 0, 0, 1255, 1256, 1, 0, 0, 0, 1256, 1258, 1, 0, 0, 0, 1257, 1259, 3, 286, 135, 0, 1258, 1257, 1, 0, 0, 0, 1259, 1260, 1, 0, 0, 0, 1260, 1258, 1, 0, 0, 0, 1260, 1261, 1, 0, 0, 0, 1261, 289, 1, 0, 0, 0, 1262, 1263, 3, 288, 136, 0, 1263, 1264, 1, 0, 0, 0, 1264, 1265, 6, 137, 29, 0, 1265, 291, 1, 0, 0, 0, 1266, 1267, 3, 66, 25, 0, 1267, 1268, 1, 0, 0, 0, 1268, 1269, 6, 138, 11, 0, 1269, 293, 1, 0, 0, 0, 1270, 1271, 3, 68, 26, 0, 1271, 1272, 1, 0, 0, 0, 1272, 1273, 6, 139, 11, 0, 1273, 295, 1, 0, 0, 0, 1274, 1275, 3, 70, 27, 0, 1275, 1276, 1, 0, 0, 0, 1276, 1277, 6, 140, 11, 0, 1277, 297, 1, 0, 0, 0, 1278, 1279, 3, 72, 28, 0, 1279, 1280, 1, 0, 0, 0, 1280, 1281, 6, 141, 16, 0, 1281, 1282, 6, 141, 12, 0, 1282, 1283, 6, 141, 12, 0, 1283, 299, 1, 0, 0, 0, 1284, 1285, 3, 106, 45, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 142, 20, 0, 1287, 301, 1, 0, 0, 0, 1288, 1289, 3, 112, 48, 0, 1289, 1290, 1, 0, 0, 0, 1290, 1291, 6, 143, 19, 0, 1291, 303, 1, 0, 0, 0, 1292, 1293, 3, 116, 50, 0, 1293, 1294, 1, 0, 0, 0, 1294, 1295, 6, 144, 23, 0, 1295, 305, 1, 0, 0, 0, 1296, 1297, 3, 284, 134, 0, 1297, 1298, 1, 0, 0, 0, 1298, 1299, 6, 145, 30, 0, 1299, 307, 1, 0, 0, 0, 1300, 1301, 3, 248, 116, 0, 1301, 1302, 1, 0, 0, 0, 1302, 1303, 6, 146, 26, 0, 1303, 309, 1, 0, 0, 0, 1304, 1305, 3, 188, 86, 0, 1305, 1306, 1, 0, 0, 0, 1306, 1307, 6, 147, 31, 0, 1307, 311, 1, 0, 0, 0, 1308, 1309, 4, 148, 14, 0, 1309, 1310, 3, 140, 62, 0, 1310, 1311, 1, 0, 0, 0, 1311, 1312, 6, 148, 24, 0, 1312, 313, 1, 0, 0, 0, 1313, 1314, 4, 149, 15, 0, 1314, 1315, 3, 178, 81, 0, 1315, 1316, 1, 0, 0, 0, 1316, 1317, 6, 149, 25, 0, 1317, 315, 1, 0, 0, 0, 1318, 1319, 3, 66, 25, 0, 1319, 1320, 1, 0, 0, 0, 1320, 1321, 6, 150, 11, 0, 1321, 317, 1, 0, 0, 0, 1322, 1323, 3, 68, 26, 0, 1323, 1324, 1, 0, 0, 0, 1324, 1325, 6, 151, 11, 0, 1325, 319, 1, 0, 0, 0, 1326, 1327, 3, 70, 27, 0, 1327, 1328, 1, 0, 0, 0, 1328, 1329, 6, 152, 11, 0, 1329, 321, 1, 0, 0, 0, 1330, 1331, 3, 72, 28, 0, 1331, 1332, 1, 0, 0, 0, 1332, 1333, 6, 153, 16, 0, 1333, 1334, 6, 153, 12, 0, 1334, 323, 1, 0, 0, 0, 1335, 1336, 3, 116, 50, 0, 1336, 1337, 1, 0, 0, 0, 1337, 1338, 6, 154, 23, 0, 1338, 325, 1, 0, 0, 0, 1339, 1340, 4, 155, 16, 0, 1340, 1341, 3, 140, 62, 0, 1341, 1342, 1, 0, 0, 0, 1342, 1343, 6, 155, 24, 0, 1343, 327, 1, 0, 0, 0, 1344, 1345, 4, 156, 17, 0, 1345, 1346, 3, 178, 81, 0, 1346, 1347, 1, 0, 0, 0, 1347, 1348, 6, 156, 25, 0, 1348, 329, 1, 0, 0, 0, 1349, 1350, 3, 188, 86, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 157, 31, 0, 1352, 331, 1, 0, 0, 0, 1353, 1354, 3, 184, 84, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 158, 32, 0, 1356, 333, 1, 0, 0, 0, 1357, 1358, 3, 66, 25, 0, 1358, 1359, 1, 0, 0, 0, 1359, 1360, 6, 159, 11, 0, 1360, 335, 1, 0, 0, 0, 1361, 1362, 3, 68, 26, 0, 1362, 1363, 1, 0, 0, 0, 1363, 1364, 6, 160, 11, 0, 1364, 337, 1, 0, 0, 0, 1365, 1366, 3, 70, 27, 0, 1366, 1367, 1, 0, 0, 0, 1367, 1368, 6, 161, 11, 0, 1368, 339, 1, 0, 0, 0, 1369, 1370, 3, 72, 28, 0, 1370, 1371, 1, 0, 0, 0, 1371, 1372, 6, 162, 16, 0, 1372, 1373, 6, 162, 12, 0, 1373, 341, 1, 0, 0, 0, 1374, 1375, 7, 1, 0, 0, 1375, 1376, 7, 9, 0, 0, 1376, 1377, 7, 15, 0, 0, 1377, 1378, 7, 7, 0, 0, 1378, 343, 1, 0, 0, 0, 1379, 1380, 3, 66, 25, 0, 1380, 1381, 1, 0, 0, 0, 1381, 1382, 6, 164, 11, 0, 1382, 345, 1, 0, 0, 0, 1383, 1384, 3, 68, 26, 0, 1384, 1385, 1, 0, 0, 0, 1385, 1386, 6, 165, 11, 0, 1386, 347, 1, 0, 0, 0, 1387, 1388, 3, 70, 27, 0, 1388, 1389, 1, 0, 0, 0, 1389, 1390, 6, 166, 11, 0, 1390, 349, 1, 0, 0, 0, 1391, 1392, 3, 182, 83, 0, 1392, 1393, 1, 0, 0, 0, 1393, 1394, 6, 167, 17, 0, 1394, 1395, 6, 167, 12, 0, 1395, 351, 1, 0, 0, 0, 1396, 1397, 3, 110, 47, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1399, 6, 168, 18, 0, 1399, 353, 1, 0, 0, 0, 1400, 1406, 3, 84, 34, 0, 1401, 1406, 3, 74, 29, 0, 1402, 1406, 3, 116, 50, 0, 1403, 1406, 3, 76, 30, 0, 1404, 1406, 3, 90, 37, 0, 1405, 1400, 1, 0, 0, 0, 1405, 1401, 1, 0, 0, 0, 1405, 1402, 1, 0, 0, 0, 1405, 1403, 1, 0, 0, 0, 1405, 1404, 1, 0, 0, 0, 1406, 1407, 1, 0, 0, 0, 1407, 1405, 1, 0, 0, 0, 1407, 1408, 1, 0, 0, 0, 1408, 355, 1, 0, 0, 0, 1409, 1410, 3, 66, 25, 0, 1410, 1411, 1, 0, 0, 0, 1411, 1412, 6, 170, 11, 0, 1412, 357, 1, 0, 0, 0, 1413, 1414, 3, 68, 26, 0, 1414, 1415, 1, 0, 0, 0, 1415, 1416, 6, 171, 11, 0, 1416, 359, 1, 0, 0, 0, 1417, 1418, 3, 70, 27, 0, 1418, 1419, 1, 0, 0, 0, 1419, 1420, 6, 172, 11, 0, 1420, 361, 1, 0, 0, 0, 1421, 1422, 3, 72, 28, 0, 1422, 1423, 1, 0, 0, 0, 1423, 1424, 6, 173, 16, 0, 1424, 1425, 6, 173, 12, 0, 1425, 363, 1, 0, 0, 0, 1426, 1427, 3, 110, 47, 0, 1427, 1428, 1, 0, 0, 0, 1428, 1429, 6, 174, 18, 0, 1429, 365, 1, 0, 0, 0, 1430, 1431, 3, 112, 48, 0, 1431, 1432, 1, 0, 0, 0, 1432, 1433, 6, 175, 19, 0, 1433, 367, 1, 0, 0, 0, 1434, 1435, 3, 116, 50, 0, 1435, 1436, 1, 0, 0, 0, 1436, 1437, 6, 176, 23, 0, 1437, 369, 1, 0, 0, 0, 1438, 1439, 3, 282, 133, 0, 1439, 1440, 1, 0, 0, 0, 1440, 1441, 6, 177, 33, 0, 1441, 1442, 6, 177, 34, 0, 1442, 371, 1, 0, 0, 0, 1443, 1444, 3, 222, 103, 0, 1444, 1445, 1, 0, 0, 0, 1445, 1446, 6, 178, 21, 0, 1446, 373, 1, 0, 0, 0, 1447, 1448, 3, 94, 39, 0, 1448, 1449, 1, 0, 0, 0, 1449, 1450, 6, 179, 22, 0, 1450, 375, 1, 0, 0, 0, 1451, 1452, 3, 66, 25, 0, 1452, 1453, 1, 0, 0, 0, 1453, 1454, 6, 180, 11, 0, 1454, 377, 1, 0, 0, 0, 1455, 1456, 3, 68, 26, 0, 1456, 1457, 1, 0, 0, 0, 1457, 1458, 6, 181, 11, 0, 1458, 379, 1, 0, 0, 0, 1459, 1460, 3, 70, 27, 0, 1460, 1461, 1, 0, 0, 0, 1461, 1462, 6, 182, 11, 0, 1462, 381, 1, 0, 0, 0, 1463, 1464, 3, 72, 28, 0, 1464, 1465, 1, 0, 0, 0, 1465, 1466, 6, 183, 16, 0, 1466, 1467, 6, 183, 12, 0, 1467, 1468, 6, 183, 12, 0, 1468, 383, 1, 0, 0, 0, 1469, 1470, 3, 112, 48, 0, 1470, 1471, 1, 0, 0, 0, 1471, 1472, 6, 184, 19, 0, 1472, 385, 1, 0, 0, 0, 1473, 1474, 3, 116, 50, 0, 1474, 1475, 1, 0, 0, 0, 1475, 1476, 6, 185, 23, 0, 1476, 387, 1, 0, 0, 0, 1477, 1478, 3, 248, 116, 0, 1478, 1479, 1, 0, 0, 0, 1479, 1480, 6, 186, 26, 0, 1480, 389, 1, 0, 0, 0, 1481, 1482, 3, 66, 25, 0, 1482, 1483, 1, 0, 0, 0, 1483, 1484, 6, 187, 11, 0, 1484, 391, 1, 0, 0, 0, 1485, 1486, 3, 68, 26, 0, 1486, 1487, 1, 0, 0, 0, 1487, 1488, 6, 188, 11, 0, 1488, 393, 1, 0, 0, 0, 1489, 1490, 3, 70, 27, 0, 1490, 1491, 1, 0, 0, 0, 1491, 1492, 6, 189, 11, 0, 1492, 395, 1, 0, 0, 0, 1493, 1494, 3, 72, 28, 0, 1494, 1495, 1, 0, 0, 0, 1495, 1496, 6, 190, 16, 0, 1496, 1497, 6, 190, 12, 0, 1497, 397, 1, 0, 0, 0, 1498, 1499, 3, 54, 19, 0, 1499, 1500, 1, 0, 0, 0, 1500, 1501, 6, 191, 35, 0, 1501, 399, 1, 0, 0, 0, 1502, 1503, 3, 268, 126, 0, 1503, 1504, 1, 0, 0, 0, 1504, 1505, 6, 192, 36, 0, 1505, 401, 1, 0, 0, 0, 1506, 1507, 3, 282, 133, 0, 1507, 1508, 1, 0, 0, 0, 1508, 1509, 6, 193, 33, 0, 1509, 1510, 6, 193, 12, 0, 1510, 1511, 6, 193, 0, 0, 1511, 403, 1, 0, 0, 0, 1512, 1513, 7, 20, 0, 0, 1513, 1514, 7, 2, 0, 0, 1514, 1515, 7, 1, 0, 0, 1515, 1516, 7, 9, 0, 0, 1516, 1517, 7, 17, 0, 0, 1517, 1518, 1, 0, 0, 0, 1518, 1519, 6, 194, 12, 0, 1519, 1520, 6, 194, 0, 0, 1520, 405, 1, 0, 0, 0, 1521, 1522, 3, 222, 103, 0, 1522, 1523, 1, 0, 0, 0, 1523, 1524, 6, 195, 21, 0, 1524, 407, 1, 0, 0, 0, 1525, 1526, 3, 94, 39, 0, 1526, 1527, 1, 0, 0, 0, 1527, 1528, 6, 196, 22, 0, 1528, 409, 1, 0, 0, 0, 1529, 1530, 3, 110, 47, 0, 1530, 1531, 1, 0, 0, 0, 1531, 1532, 6, 197, 18, 0, 1532, 411, 1, 0, 0, 0, 1533, 1534, 3, 184, 84, 0, 1534, 1535, 1, 0, 0, 0, 1535, 1536, 6, 198, 32, 0, 1536, 413, 1, 0, 0, 0, 1537, 1538, 3, 188, 86, 0, 1538, 1539, 1, 0, 0, 0, 1539, 1540, 6, 199, 31, 0, 1540, 415, 1, 0, 0, 0, 1541, 1542, 3, 66, 25, 0, 1542, 1543, 1, 0, 0, 0, 1543, 1544, 6, 200, 11, 0, 1544, 417, 1, 0, 0, 0, 1545, 1546, 3, 68, 26, 0, 1546, 1547, 1, 0, 0, 0, 1547, 1548, 6, 201, 11, 0, 1548, 419, 1, 0, 0, 0, 1549, 1550, 3, 70, 27, 0, 1550, 1551, 1, 0, 0, 0, 1551, 1552, 6, 202, 11, 0, 1552, 421, 1, 0, 0, 0, 1553, 1554, 3, 72, 28, 0, 1554, 1555, 1, 0, 0, 0, 1555, 1556, 6, 203, 16, 0, 1556, 1557, 6, 203, 12, 0, 1557, 423, 1, 0, 0, 0, 1558, 1559, 3, 222, 103, 0, 1559, 1560, 1, 0, 0, 0, 1560, 1561, 6, 204, 21, 0, 1561, 1562, 6, 204, 12, 0, 1562, 1563, 6, 204, 37, 0, 1563, 425, 1, 0, 0, 0, 1564, 1565, 3, 94, 39, 0, 1565, 1566, 1, 0, 0, 0, 1566, 1567, 6, 205, 22, 0, 1567, 1568, 6, 205, 12, 0, 1568, 1569, 6, 205, 37, 0, 1569, 427, 1, 0, 0, 0, 1570, 1571, 3, 66, 25, 0, 1571, 1572, 1, 0, 0, 0, 1572, 1573, 6, 206, 11, 0, 1573, 429, 1, 0, 0, 0, 1574, 1575, 3, 68, 26, 0, 1575, 1576, 1, 0, 0, 0, 1576, 1577, 6, 207, 11, 0, 1577, 431, 1, 0, 0, 0, 1578, 1579, 3, 70, 27, 0, 1579, 1580, 1, 0, 0, 0, 1580, 1581, 6, 208, 11, 0, 1581, 433, 1, 0, 0, 0, 1582, 1583, 3, 110, 47, 0, 1583, 1584, 1, 0, 0, 0, 1584, 1585, 6, 209, 18, 0, 1585, 1586, 6, 209, 12, 0, 1586, 1587, 6, 209, 9, 0, 1587, 435, 1, 0, 0, 0, 1588, 1589, 3, 112, 48, 0, 1589, 1590, 1, 0, 0, 0, 1590, 1591, 6, 210, 19, 0, 1591, 1592, 6, 210, 12, 0, 1592, 1593, 6, 210, 9, 0, 1593, 437, 1, 0, 0, 0, 1594, 1595, 3, 66, 25, 0, 1595, 1596, 1, 0, 0, 0, 1596, 1597, 6, 211, 11, 0, 1597, 439, 1, 0, 0, 0, 1598, 1599, 3, 68, 26, 0, 1599, 1600, 1, 0, 0, 0, 1600, 1601, 6, 212, 11, 0, 1601, 441, 1, 0, 0, 0, 1602, 1603, 3, 70, 27, 0, 1603, 1604, 1, 0, 0, 0, 1604, 1605, 6, 213, 11, 0, 1605, 443, 1, 0, 0, 0, 1606, 1607, 3, 188, 86, 0, 1607, 1608, 1, 0, 0, 0, 1608, 1609, 6, 214, 12, 0, 1609, 1610, 6, 214, 0, 0, 1610, 1611, 6, 214, 31, 0, 1611, 445, 1, 0, 0, 0, 1612, 1613, 3, 184, 84, 0, 1613, 1614, 1, 0, 0, 0, 1614, 1615, 6, 215, 12, 0, 1615, 1616, 6, 215, 0, 0, 1616, 1617, 6, 215, 32, 0, 1617, 447, 1, 0, 0, 0, 1618, 1619, 3, 100, 42, 0, 1619, 1620, 1, 0, 0, 0, 1620, 1621, 6, 216, 12, 0, 1621, 1622, 6, 216, 0, 0, 1622, 1623, 6, 216, 38, 0, 1623, 449, 1, 0, 0, 0, 1624, 1625, 3, 72, 28, 0, 1625, 1626, 1, 0, 0, 0, 1626, 1627, 6, 217, 16, 0, 1627, 1628, 6, 217, 12, 0, 1628, 451, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 665, 675, 679, 682, 691, 693, 704, 723, 728, 737, 744, 749, 751, 762, 770, 773, 775, 780, 785, 791, 798, 803, 809, 812, 820, 824, 958, 963, 970, 972, 988, 993, 998, 1000, 1006, 1083, 1088, 1137, 1141, 1146, 1151, 1156, 1158, 1162, 1164, 1251, 1255, 1260, 1405, 1407, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 72, 0, 5, 0, 0, 7, 29, 0, 7, 73, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 83, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 71, 0, 7, 87, 0, 5, 10, 0, 5, 7, 0, 7, 97, 0, 7, 96, 0, 7, 75, 0, 7, 74, 0, 7, 95, 0, 5, 12, 0, 7, 20, 0, 7, 91, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 737f0465e1ab6..eb8af91bef274 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -109,13 +109,13 @@ private static String[] makeRuleNames() { "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "JOIN_PIPE", "JOIN_JOIN", - "JOIN_AS", "JOIN_ON", "USING", "JOIN_UNQUOTED_IDENTIFER", "JOIN_QUOTED_IDENTIFIER", - "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_PIPE", - "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", - "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", - "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", - "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" + "JOIN_AS", "JOIN_ON", "USING", "JOIN_UNQUOTED_SOURCE", "JOIN_QUOTED_SOURCE", + "JOIN_COLON", "JOIN_UNQUOTED_IDENTIFER", "JOIN_QUOTED_IDENTIFIER", "JOIN_LINE_COMMENT", + "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_PIPE", "METRICS_UNQUOTED_SOURCE", + "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", + "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", "CLOSING_METRICS_LINE_COMMENT", + "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", + "CLOSING_METRICS_UNQUOTED_IDENTIFIER", "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" }; } public static final String[] ruleNames = makeRuleNames(); @@ -397,7 +397,7 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx } public static final String _serializedATN = - "\u0004\u0000\u0082\u064b\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\u0004\u0000\u0082\u065d\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ @@ -459,7 +459,8 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u00cb\u0002\u00cc\u0007\u00cc\u0002\u00cd\u0007\u00cd\u0002\u00ce\u0007"+ "\u00ce\u0002\u00cf\u0007\u00cf\u0002\u00d0\u0007\u00d0\u0002\u00d1\u0007"+ "\u00d1\u0002\u00d2\u0007\u00d2\u0002\u00d3\u0007\u00d3\u0002\u00d4\u0007"+ - "\u00d4\u0002\u00d5\u0007\u00d5\u0002\u00d6\u0007\u00d6\u0001\u0000\u0001"+ + "\u00d4\u0002\u00d5\u0007\u00d5\u0002\u00d6\u0007\u00d6\u0002\u00d7\u0007"+ + "\u00d7\u0002\u00d8\u0007\u00d8\u0002\u00d9\u0007\u00d9\u0001\u0000\u0001"+ "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ "\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001"+ @@ -492,28 +493,28 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001"+ "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0004\u0018\u0292\b\u0018\u000b"+ - "\u0018\f\u0018\u0293\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001"+ - "\u0019\u0001\u0019\u0005\u0019\u029c\b\u0019\n\u0019\f\u0019\u029f\t\u0019"+ - "\u0001\u0019\u0003\u0019\u02a2\b\u0019\u0001\u0019\u0003\u0019\u02a5\b"+ + "\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0004\u0018\u0298\b\u0018\u000b"+ + "\u0018\f\u0018\u0299\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0001\u0019\u0005\u0019\u02a2\b\u0019\n\u0019\f\u0019\u02a5\t\u0019"+ + "\u0001\u0019\u0003\u0019\u02a8\b\u0019\u0001\u0019\u0003\u0019\u02ab\b"+ "\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0005\u001a\u02ae\b\u001a\n\u001a\f\u001a\u02b1\t\u001a"+ + "\u001a\u0001\u001a\u0005\u001a\u02b4\b\u001a\n\u001a\f\u001a\u02b7\t\u001a"+ "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b"+ - "\u0004\u001b\u02b9\b\u001b\u000b\u001b\f\u001b\u02ba\u0001\u001b\u0001"+ + "\u0004\u001b\u02bf\b\u001b\u000b\u001b\f\u001b\u02c0\u0001\u001b\u0001"+ "\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001d\u0001"+ "\u001d\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ - " \u0001 \u0001!\u0001!\u0003!\u02ce\b!\u0001!\u0004!\u02d1\b!\u000b!\f"+ - "!\u02d2\u0001\"\u0001\"\u0001#\u0001#\u0001$\u0001$\u0001$\u0003$\u02dc"+ - "\b$\u0001%\u0001%\u0001&\u0001&\u0001&\u0003&\u02e3\b&\u0001\'\u0001\'"+ - "\u0001\'\u0005\'\u02e8\b\'\n\'\f\'\u02eb\t\'\u0001\'\u0001\'\u0001\'\u0001"+ - "\'\u0001\'\u0001\'\u0005\'\u02f3\b\'\n\'\f\'\u02f6\t\'\u0001\'\u0001\'"+ - "\u0001\'\u0001\'\u0001\'\u0003\'\u02fd\b\'\u0001\'\u0003\'\u0300\b\'\u0003"+ - "\'\u0302\b\'\u0001(\u0004(\u0305\b(\u000b(\f(\u0306\u0001)\u0004)\u030a"+ - "\b)\u000b)\f)\u030b\u0001)\u0001)\u0005)\u0310\b)\n)\f)\u0313\t)\u0001"+ - ")\u0001)\u0004)\u0317\b)\u000b)\f)\u0318\u0001)\u0004)\u031c\b)\u000b"+ - ")\f)\u031d\u0001)\u0001)\u0005)\u0322\b)\n)\f)\u0325\t)\u0003)\u0327\b"+ - ")\u0001)\u0001)\u0001)\u0001)\u0004)\u032d\b)\u000b)\f)\u032e\u0001)\u0001"+ - ")\u0003)\u0333\b)\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001"+ + " \u0001 \u0001!\u0001!\u0003!\u02d4\b!\u0001!\u0004!\u02d7\b!\u000b!\f"+ + "!\u02d8\u0001\"\u0001\"\u0001#\u0001#\u0001$\u0001$\u0001$\u0003$\u02e2"+ + "\b$\u0001%\u0001%\u0001&\u0001&\u0001&\u0003&\u02e9\b&\u0001\'\u0001\'"+ + "\u0001\'\u0005\'\u02ee\b\'\n\'\f\'\u02f1\t\'\u0001\'\u0001\'\u0001\'\u0001"+ + "\'\u0001\'\u0001\'\u0005\'\u02f9\b\'\n\'\f\'\u02fc\t\'\u0001\'\u0001\'"+ + "\u0001\'\u0001\'\u0001\'\u0003\'\u0303\b\'\u0001\'\u0003\'\u0306\b\'\u0003"+ + "\'\u0308\b\'\u0001(\u0004(\u030b\b(\u000b(\f(\u030c\u0001)\u0004)\u0310"+ + "\b)\u000b)\f)\u0311\u0001)\u0001)\u0005)\u0316\b)\n)\f)\u0319\t)\u0001"+ + ")\u0001)\u0004)\u031d\b)\u000b)\f)\u031e\u0001)\u0004)\u0322\b)\u000b"+ + ")\f)\u0323\u0001)\u0001)\u0005)\u0328\b)\n)\f)\u032b\t)\u0003)\u032d\b"+ + ")\u0001)\u0001)\u0001)\u0001)\u0004)\u0333\b)\u000b)\f)\u0334\u0001)\u0001"+ + ")\u0003)\u0339\b)\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001"+ ",\u0001,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001"+ "/\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u0001"+ "3\u00013\u00013\u00013\u00013\u00013\u00014\u00014\u00014\u00014\u0001"+ @@ -526,12 +527,12 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001"+ "H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001"+ "M\u0001M\u0001N\u0001N\u0001N\u0001O\u0001O\u0001O\u0001P\u0001P\u0001"+ - "P\u0001P\u0001Q\u0001Q\u0001Q\u0003Q\u03b9\bQ\u0001Q\u0005Q\u03bc\bQ\n"+ - "Q\fQ\u03bf\tQ\u0001Q\u0001Q\u0004Q\u03c3\bQ\u000bQ\fQ\u03c4\u0003Q\u03c7"+ + "P\u0001P\u0001Q\u0001Q\u0001Q\u0003Q\u03bf\bQ\u0001Q\u0005Q\u03c2\bQ\n"+ + "Q\fQ\u03c5\tQ\u0001Q\u0001Q\u0004Q\u03c9\bQ\u000bQ\fQ\u03ca\u0003Q\u03cd"+ "\bQ\u0001R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001"+ - "S\u0001T\u0001T\u0005T\u03d5\bT\nT\fT\u03d8\tT\u0001T\u0001T\u0003T\u03dc"+ - "\bT\u0001T\u0004T\u03df\bT\u000bT\fT\u03e0\u0003T\u03e3\bT\u0001U\u0001"+ - "U\u0004U\u03e7\bU\u000bU\fU\u03e8\u0001U\u0001U\u0001V\u0001V\u0001W\u0001"+ + "S\u0001T\u0001T\u0005T\u03db\bT\nT\fT\u03de\tT\u0001T\u0001T\u0003T\u03e2"+ + "\bT\u0001T\u0004T\u03e5\bT\u000bT\fT\u03e6\u0003T\u03e9\bT\u0001U\u0001"+ + "U\u0004U\u03ed\bU\u000bU\fU\u03ee\u0001U\u0001U\u0001V\u0001V\u0001W\u0001"+ "W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001"+ "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001"+ "[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001"+ @@ -539,15 +540,15 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001"+ "c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001"+ "e\u0001e\u0001e\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0003"+ - "f\u0436\bf\u0001g\u0004g\u0439\bg\u000bg\fg\u043a\u0001h\u0001h\u0001"+ + "f\u043c\bf\u0001g\u0004g\u043f\bg\u000bg\fg\u0440\u0001h\u0001h\u0001"+ "h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001"+ "k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001"+ "m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001"+ "o\u0001p\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001"+ - "q\u0001r\u0001r\u0001r\u0001r\u0003r\u046c\br\u0001s\u0001s\u0003s\u0470"+ - "\bs\u0001s\u0005s\u0473\bs\ns\fs\u0476\ts\u0001s\u0001s\u0003s\u047a\b"+ - "s\u0001s\u0004s\u047d\bs\u000bs\fs\u047e\u0003s\u0481\bs\u0001t\u0001"+ - "t\u0004t\u0485\bt\u000bt\ft\u0486\u0001u\u0001u\u0001u\u0001u\u0001v\u0001"+ + "q\u0001r\u0001r\u0001r\u0001r\u0003r\u0472\br\u0001s\u0001s\u0003s\u0476"+ + "\bs\u0001s\u0005s\u0479\bs\ns\fs\u047c\ts\u0001s\u0001s\u0003s\u0480\b"+ + "s\u0001s\u0004s\u0483\bs\u000bs\fs\u0484\u0003s\u0487\bs\u0001t\u0001"+ + "t\u0004t\u048b\bt\u000bt\ft\u048c\u0001u\u0001u\u0001u\u0001u\u0001v\u0001"+ "v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001"+ "x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001"+ "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001"+ @@ -558,9 +559,9 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ "\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001"+ "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001"+ - "\u0087\u0001\u0087\u0001\u0088\u0004\u0088\u04dc\b\u0088\u000b\u0088\f"+ - "\u0088\u04dd\u0001\u0088\u0001\u0088\u0003\u0088\u04e2\b\u0088\u0001\u0088"+ - "\u0004\u0088\u04e5\b\u0088\u000b\u0088\f\u0088\u04e6\u0001\u0089\u0001"+ + "\u0087\u0001\u0087\u0001\u0088\u0004\u0088\u04e2\b\u0088\u000b\u0088\f"+ + "\u0088\u04e3\u0001\u0088\u0001\u0088\u0003\u0088\u04e8\b\u0088\u0001\u0088"+ + "\u0004\u0088\u04eb\b\u0088\u000b\u0088\f\u0088\u04ec\u0001\u0089\u0001"+ "\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001"+ "\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001"+ "\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001"+ @@ -584,8 +585,8 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001"+ "\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ "\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001"+ - "\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0004\u00a9\u0578\b\u00a9\u000b"+ - "\u00a9\f\u00a9\u0579\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ + "\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0004\u00a9\u057e\b\u00a9\u000b"+ + "\u00a9\f\u00a9\u057f\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ "\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001"+ "\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001"+ "\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001"+ @@ -608,19 +609,21 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u00c4\u0001\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001"+ "\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c7\u0001\u00c7\u0001"+ "\u00c7\u0001\u00c7\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001"+ - "\u00c8\u0001\u00c9\u0001\u00c9\u0001\u00c9\u0001\u00c9\u0001\u00c9\u0001"+ - "\u00c9\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001"+ - "\u00ca\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cc\u0001"+ - "\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001"+ - "\u00cd\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001"+ - "\u00ce\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001"+ - "\u00cf\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d1\u0001"+ - "\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001"+ - "\u00d2\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001"+ - "\u00d3\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0001"+ - "\u00d4\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001"+ - "\u00d5\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0002"+ - "\u02af\u02f4\u0000\u00d7\u0010\u0001\u0012\u0002\u0014\u0003\u0016\u0004"+ + "\u00c9\u0001\u00c9\u0001\u00c9\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001"+ + "\u00ca\u0001\u00ca\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001"+ + "\u00cb\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001"+ + "\u00cc\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001"+ + "\u00cd\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00cf\u0001"+ + "\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001"+ + "\u00d0\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001"+ + "\u00d1\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001"+ + "\u00d2\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001"+ + "\u00d4\u0001\u00d4\u0001\u00d4\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001"+ + "\u00d5\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001"+ + "\u00d6\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001"+ + "\u00d7\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001"+ + "\u00d8\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0002"+ + "\u02b5\u02fa\u0000\u00da\u0010\u0001\u0012\u0002\u0014\u0003\u0016\u0004"+ "\u0018\u0005\u001a\u0006\u001c\u0007\u001e\b \t\"\n$\u000b&\f(\r*\u000e"+ ",\u000f.\u00100\u00112\u00124\u00136\u00148\u0015:\u0016<\u0017>\u0018"+ "@\u0019B\u001aD\u001bF\u001cH\u001dJ\u0000L\u0000N\u0000P\u0000R\u0000"+ @@ -644,809 +647,818 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u0172\u0000\u0174\u0000\u0176\u0000\u0178s\u017at\u017cu\u017e\u0000"+ "\u0180\u0000\u0182\u0000\u0184\u0000\u0186v\u0188w\u018ax\u018c\u0000"+ "\u018e\u0000\u0190\u0000\u0192\u0000\u0194y\u0196\u0000\u0198\u0000\u019a"+ - "z\u019c{\u019e|\u01a0\u0000\u01a2\u0000\u01a4\u0000\u01a6}\u01a8~\u01aa"+ - "\u007f\u01ac\u0000\u01ae\u0000\u01b0\u0080\u01b2\u0081\u01b4\u0082\u01b6"+ - "\u0000\u01b8\u0000\u01ba\u0000\u01bc\u0000\u0010\u0000\u0001\u0002\u0003"+ - "\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f$\u0002\u0000DDdd"+ - "\u0002\u0000IIii\u0002\u0000SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002"+ - "\u0000TTtt\u0002\u0000RRrr\u0002\u0000OOoo\u0002\u0000PPpp\u0002\u0000"+ - "NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002"+ - "\u0000XXxx\u0002\u0000FFff\u0002\u0000MMmm\u0002\u0000GGgg\u0002\u0000"+ - "KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0002\u0000JJjj\u0006\u0000\t\n\r"+ - "\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002"+ - "\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002"+ - "\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t"+ - "\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,"+ - "//::<<>?\\\\||\u0666\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001"+ - "\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001"+ - "\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001"+ - "\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001"+ - "\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000"+ - "\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000"+ - "\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,"+ - "\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000"+ - "\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000"+ - "\u00006\u0001\u0000\u0000\u0000\u00008\u0001\u0000\u0000\u0000\u0000:"+ - "\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000"+ - "\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000B\u0001\u0000\u0000\u0000"+ - "\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001\u0000\u0000\u0000\u0001H"+ - "\u0001\u0000\u0000\u0000\u0001^\u0001\u0000\u0000\u0000\u0001`\u0001\u0000"+ - "\u0000\u0000\u0001b\u0001\u0000\u0000\u0000\u0001d\u0001\u0000\u0000\u0000"+ - "\u0001f\u0001\u0000\u0000\u0000\u0001h\u0001\u0000\u0000\u0000\u0001j"+ - "\u0001\u0000\u0000\u0000\u0001l\u0001\u0000\u0000\u0000\u0001n\u0001\u0000"+ - "\u0000\u0000\u0001p\u0001\u0000\u0000\u0000\u0001r\u0001\u0000\u0000\u0000"+ - "\u0001t\u0001\u0000\u0000\u0000\u0001v\u0001\u0000\u0000\u0000\u0001x"+ - "\u0001\u0000\u0000\u0000\u0001z\u0001\u0000\u0000\u0000\u0001|\u0001\u0000"+ - "\u0000\u0000\u0001~\u0001\u0000\u0000\u0000\u0001\u0080\u0001\u0000\u0000"+ - "\u0000\u0001\u0082\u0001\u0000\u0000\u0000\u0001\u0084\u0001\u0000\u0000"+ - "\u0000\u0001\u0086\u0001\u0000\u0000\u0000\u0001\u0088\u0001\u0000\u0000"+ - "\u0000\u0001\u008a\u0001\u0000\u0000\u0000\u0001\u008c\u0001\u0000\u0000"+ - "\u0000\u0001\u008e\u0001\u0000\u0000\u0000\u0001\u0090\u0001\u0000\u0000"+ - "\u0000\u0001\u0092\u0001\u0000\u0000\u0000\u0001\u0094\u0001\u0000\u0000"+ - "\u0000\u0001\u0096\u0001\u0000\u0000\u0000\u0001\u0098\u0001\u0000\u0000"+ - "\u0000\u0001\u009a\u0001\u0000\u0000\u0000\u0001\u009c\u0001\u0000\u0000"+ - "\u0000\u0001\u009e\u0001\u0000\u0000\u0000\u0001\u00a0\u0001\u0000\u0000"+ - "\u0000\u0001\u00a2\u0001\u0000\u0000\u0000\u0001\u00a4\u0001\u0000\u0000"+ - "\u0000\u0001\u00a6\u0001\u0000\u0000\u0000\u0001\u00a8\u0001\u0000\u0000"+ - "\u0000\u0001\u00aa\u0001\u0000\u0000\u0000\u0001\u00ac\u0001\u0000\u0000"+ - "\u0000\u0001\u00ae\u0001\u0000\u0000\u0000\u0001\u00b0\u0001\u0000\u0000"+ - "\u0000\u0001\u00b2\u0001\u0000\u0000\u0000\u0001\u00b4\u0001\u0000\u0000"+ - "\u0000\u0001\u00b6\u0001\u0000\u0000\u0000\u0001\u00b8\u0001\u0000\u0000"+ - "\u0000\u0001\u00bc\u0001\u0000\u0000\u0000\u0001\u00be\u0001\u0000\u0000"+ - "\u0000\u0001\u00c0\u0001\u0000\u0000\u0000\u0001\u00c2\u0001\u0000\u0000"+ - "\u0000\u0002\u00c4\u0001\u0000\u0000\u0000\u0002\u00c6\u0001\u0000\u0000"+ - "\u0000\u0002\u00c8\u0001\u0000\u0000\u0000\u0002\u00ca\u0001\u0000\u0000"+ - "\u0000\u0002\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001\u0000\u0000"+ - "\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003\u00d2\u0001\u0000\u0000"+ - "\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003\u00d6\u0001\u0000\u0000"+ - "\u0000\u0003\u00d8\u0001\u0000\u0000\u0000\u0003\u00da\u0001\u0000\u0000"+ - "\u0000\u0003\u00de\u0001\u0000\u0000\u0000\u0003\u00e0\u0001\u0000\u0000"+ - "\u0000\u0003\u00e2\u0001\u0000\u0000\u0000\u0003\u00e4\u0001\u0000\u0000"+ - "\u0000\u0003\u00e6\u0001\u0000\u0000\u0000\u0003\u00e8\u0001\u0000\u0000"+ - "\u0000\u0004\u00ea\u0001\u0000\u0000\u0000\u0004\u00ec\u0001\u0000\u0000"+ - "\u0000\u0004\u00ee\u0001\u0000\u0000\u0000\u0004\u00f0\u0001\u0000\u0000"+ - "\u0000\u0004\u00f2\u0001\u0000\u0000\u0000\u0004\u00f8\u0001\u0000\u0000"+ - "\u0000\u0004\u00fa\u0001\u0000\u0000\u0000\u0004\u00fc\u0001\u0000\u0000"+ - "\u0000\u0004\u00fe\u0001\u0000\u0000\u0000\u0005\u0100\u0001\u0000\u0000"+ - "\u0000\u0005\u0102\u0001\u0000\u0000\u0000\u0005\u0104\u0001\u0000\u0000"+ - "\u0000\u0005\u0106\u0001\u0000\u0000\u0000\u0005\u0108\u0001\u0000\u0000"+ - "\u0000\u0005\u010a\u0001\u0000\u0000\u0000\u0005\u010c\u0001\u0000\u0000"+ - "\u0000\u0005\u010e\u0001\u0000\u0000\u0000\u0005\u0110\u0001\u0000\u0000"+ - "\u0000\u0005\u0112\u0001\u0000\u0000\u0000\u0005\u0114\u0001\u0000\u0000"+ - "\u0000\u0006\u0116\u0001\u0000\u0000\u0000\u0006\u0118\u0001\u0000\u0000"+ - "\u0000\u0006\u011a\u0001\u0000\u0000\u0000\u0006\u011c\u0001\u0000\u0000"+ - "\u0000\u0006\u0120\u0001\u0000\u0000\u0000\u0006\u0122\u0001\u0000\u0000"+ - "\u0000\u0006\u0124\u0001\u0000\u0000\u0000\u0006\u0126\u0001\u0000\u0000"+ - "\u0000\u0006\u0128\u0001\u0000\u0000\u0000\u0007\u012a\u0001\u0000\u0000"+ - "\u0000\u0007\u012c\u0001\u0000\u0000\u0000\u0007\u012e\u0001\u0000\u0000"+ - "\u0000\u0007\u0130\u0001\u0000\u0000\u0000\u0007\u0132\u0001\u0000\u0000"+ - "\u0000\u0007\u0134\u0001\u0000\u0000\u0000\u0007\u0136\u0001\u0000\u0000"+ - "\u0000\u0007\u0138\u0001\u0000\u0000\u0000\u0007\u013a\u0001\u0000\u0000"+ - "\u0000\u0007\u013c\u0001\u0000\u0000\u0000\u0007\u013e\u0001\u0000\u0000"+ - "\u0000\u0007\u0140\u0001\u0000\u0000\u0000\b\u0142\u0001\u0000\u0000\u0000"+ - "\b\u0144\u0001\u0000\u0000\u0000\b\u0146\u0001\u0000\u0000\u0000\b\u0148"+ - "\u0001\u0000\u0000\u0000\b\u014a\u0001\u0000\u0000\u0000\b\u014c\u0001"+ - "\u0000\u0000\u0000\b\u014e\u0001\u0000\u0000\u0000\b\u0150\u0001\u0000"+ - "\u0000\u0000\b\u0152\u0001\u0000\u0000\u0000\t\u0154\u0001\u0000\u0000"+ - "\u0000\t\u0156\u0001\u0000\u0000\u0000\t\u0158\u0001\u0000\u0000\u0000"+ - "\t\u015a\u0001\u0000\u0000\u0000\t\u015c\u0001\u0000\u0000\u0000\n\u015e"+ - "\u0001\u0000\u0000\u0000\n\u0160\u0001\u0000\u0000\u0000\n\u0162\u0001"+ - "\u0000\u0000\u0000\n\u0164\u0001\u0000\u0000\u0000\n\u0166\u0001\u0000"+ - "\u0000\u0000\n\u0168\u0001\u0000\u0000\u0000\u000b\u016a\u0001\u0000\u0000"+ - "\u0000\u000b\u016c\u0001\u0000\u0000\u0000\u000b\u016e\u0001\u0000\u0000"+ - "\u0000\u000b\u0170\u0001\u0000\u0000\u0000\u000b\u0172\u0001\u0000\u0000"+ - "\u0000\u000b\u0174\u0001\u0000\u0000\u0000\u000b\u0176\u0001\u0000\u0000"+ - "\u0000\u000b\u0178\u0001\u0000\u0000\u0000\u000b\u017a\u0001\u0000\u0000"+ - "\u0000\u000b\u017c\u0001\u0000\u0000\u0000\f\u017e\u0001\u0000\u0000\u0000"+ - "\f\u0180\u0001\u0000\u0000\u0000\f\u0182\u0001\u0000\u0000\u0000\f\u0184"+ - "\u0001\u0000\u0000\u0000\f\u0186\u0001\u0000\u0000\u0000\f\u0188\u0001"+ - "\u0000\u0000\u0000\f\u018a\u0001\u0000\u0000\u0000\r\u018c\u0001\u0000"+ - "\u0000\u0000\r\u018e\u0001\u0000\u0000\u0000\r\u0190\u0001\u0000\u0000"+ - "\u0000\r\u0192\u0001\u0000\u0000\u0000\r\u0194\u0001\u0000\u0000\u0000"+ - "\r\u0196\u0001\u0000\u0000\u0000\r\u0198\u0001\u0000\u0000\u0000\r\u019a"+ - "\u0001\u0000\u0000\u0000\r\u019c\u0001\u0000\u0000\u0000\r\u019e\u0001"+ - "\u0000\u0000\u0000\u000e\u01a0\u0001\u0000\u0000\u0000\u000e\u01a2\u0001"+ - "\u0000\u0000\u0000\u000e\u01a4\u0001\u0000\u0000\u0000\u000e\u01a6\u0001"+ - "\u0000\u0000\u0000\u000e\u01a8\u0001\u0000\u0000\u0000\u000e\u01aa\u0001"+ - "\u0000\u0000\u0000\u000f\u01ac\u0001\u0000\u0000\u0000\u000f\u01ae\u0001"+ - "\u0000\u0000\u0000\u000f\u01b0\u0001\u0000\u0000\u0000\u000f\u01b2\u0001"+ - "\u0000\u0000\u0000\u000f\u01b4\u0001\u0000\u0000\u0000\u000f\u01b6\u0001"+ - "\u0000\u0000\u0000\u000f\u01b8\u0001\u0000\u0000\u0000\u000f\u01ba\u0001"+ - "\u0000\u0000\u0000\u000f\u01bc\u0001\u0000\u0000\u0000\u0010\u01be\u0001"+ - "\u0000\u0000\u0000\u0012\u01c8\u0001\u0000\u0000\u0000\u0014\u01cf\u0001"+ - "\u0000\u0000\u0000\u0016\u01d8\u0001\u0000\u0000\u0000\u0018\u01df\u0001"+ - "\u0000\u0000\u0000\u001a\u01e9\u0001\u0000\u0000\u0000\u001c\u01f0\u0001"+ - "\u0000\u0000\u0000\u001e\u01f7\u0001\u0000\u0000\u0000 \u01fe\u0001\u0000"+ - "\u0000\u0000\"\u0206\u0001\u0000\u0000\u0000$\u0212\u0001\u0000\u0000"+ - "\u0000&\u021b\u0001\u0000\u0000\u0000(\u0221\u0001\u0000\u0000\u0000*"+ - "\u0228\u0001\u0000\u0000\u0000,\u022f\u0001\u0000\u0000\u0000.\u0237\u0001"+ - "\u0000\u0000\u00000\u023f\u0001\u0000\u0000\u00002\u024e\u0001\u0000\u0000"+ - "\u00004\u025a\u0001\u0000\u0000\u00006\u0265\u0001\u0000\u0000\u00008"+ - "\u026d\u0001\u0000\u0000\u0000:\u0275\u0001\u0000\u0000\u0000<\u027d\u0001"+ - "\u0000\u0000\u0000>\u0286\u0001\u0000\u0000\u0000@\u0291\u0001\u0000\u0000"+ - "\u0000B\u0297\u0001\u0000\u0000\u0000D\u02a8\u0001\u0000\u0000\u0000F"+ - "\u02b8\u0001\u0000\u0000\u0000H\u02be\u0001\u0000\u0000\u0000J\u02c2\u0001"+ - "\u0000\u0000\u0000L\u02c4\u0001\u0000\u0000\u0000N\u02c6\u0001\u0000\u0000"+ - "\u0000P\u02c9\u0001\u0000\u0000\u0000R\u02cb\u0001\u0000\u0000\u0000T"+ - "\u02d4\u0001\u0000\u0000\u0000V\u02d6\u0001\u0000\u0000\u0000X\u02db\u0001"+ - "\u0000\u0000\u0000Z\u02dd\u0001\u0000\u0000\u0000\\\u02e2\u0001\u0000"+ - "\u0000\u0000^\u0301\u0001\u0000\u0000\u0000`\u0304\u0001\u0000\u0000\u0000"+ - "b\u0332\u0001\u0000\u0000\u0000d\u0334\u0001\u0000\u0000\u0000f\u0337"+ - "\u0001\u0000\u0000\u0000h\u033b\u0001\u0000\u0000\u0000j\u033f\u0001\u0000"+ - "\u0000\u0000l\u0341\u0001\u0000\u0000\u0000n\u0344\u0001\u0000\u0000\u0000"+ - "p\u0346\u0001\u0000\u0000\u0000r\u0348\u0001\u0000\u0000\u0000t\u034d"+ - "\u0001\u0000\u0000\u0000v\u034f\u0001\u0000\u0000\u0000x\u0355\u0001\u0000"+ - "\u0000\u0000z\u035b\u0001\u0000\u0000\u0000|\u035e\u0001\u0000\u0000\u0000"+ - "~\u0361\u0001\u0000\u0000\u0000\u0080\u0366\u0001\u0000\u0000\u0000\u0082"+ - "\u036b\u0001\u0000\u0000\u0000\u0084\u036d\u0001\u0000\u0000\u0000\u0086"+ - "\u0371\u0001\u0000\u0000\u0000\u0088\u0376\u0001\u0000\u0000\u0000\u008a"+ - "\u037c\u0001\u0000\u0000\u0000\u008c\u037f\u0001\u0000\u0000\u0000\u008e"+ - "\u0381\u0001\u0000\u0000\u0000\u0090\u0387\u0001\u0000\u0000\u0000\u0092"+ - "\u0389\u0001\u0000\u0000\u0000\u0094\u038e\u0001\u0000\u0000\u0000\u0096"+ - "\u0391\u0001\u0000\u0000\u0000\u0098\u0394\u0001\u0000\u0000\u0000\u009a"+ - "\u0397\u0001\u0000\u0000\u0000\u009c\u0399\u0001\u0000\u0000\u0000\u009e"+ - "\u039c\u0001\u0000\u0000\u0000\u00a0\u039e\u0001\u0000\u0000\u0000\u00a2"+ - "\u03a1\u0001\u0000\u0000\u0000\u00a4\u03a3\u0001\u0000\u0000\u0000\u00a6"+ - "\u03a5\u0001\u0000\u0000\u0000\u00a8\u03a7\u0001\u0000\u0000\u0000\u00aa"+ - "\u03a9\u0001\u0000\u0000\u0000\u00ac\u03ab\u0001\u0000\u0000\u0000\u00ae"+ - "\u03ae\u0001\u0000\u0000\u0000\u00b0\u03b1\u0001\u0000\u0000\u0000\u00b2"+ - "\u03c6\u0001\u0000\u0000\u0000\u00b4\u03c8\u0001\u0000\u0000\u0000\u00b6"+ - "\u03cd\u0001\u0000\u0000\u0000\u00b8\u03e2\u0001\u0000\u0000\u0000\u00ba"+ - "\u03e4\u0001\u0000\u0000\u0000\u00bc\u03ec\u0001\u0000\u0000\u0000\u00be"+ - "\u03ee\u0001\u0000\u0000\u0000\u00c0\u03f2\u0001\u0000\u0000\u0000\u00c2"+ - "\u03f6\u0001\u0000\u0000\u0000\u00c4\u03fa\u0001\u0000\u0000\u0000\u00c6"+ - "\u03ff\u0001\u0000\u0000\u0000\u00c8\u0404\u0001\u0000\u0000\u0000\u00ca"+ - "\u0408\u0001\u0000\u0000\u0000\u00cc\u040c\u0001\u0000\u0000\u0000\u00ce"+ - "\u0410\u0001\u0000\u0000\u0000\u00d0\u0415\u0001\u0000\u0000\u0000\u00d2"+ - "\u0419\u0001\u0000\u0000\u0000\u00d4\u041d\u0001\u0000\u0000\u0000\u00d6"+ - "\u0421\u0001\u0000\u0000\u0000\u00d8\u0425\u0001\u0000\u0000\u0000\u00da"+ - "\u0429\u0001\u0000\u0000\u0000\u00dc\u0435\u0001\u0000\u0000\u0000\u00de"+ - "\u0438\u0001\u0000\u0000\u0000\u00e0\u043c\u0001\u0000\u0000\u0000\u00e2"+ - "\u0440\u0001\u0000\u0000\u0000\u00e4\u0444\u0001\u0000\u0000\u0000\u00e6"+ - "\u0448\u0001\u0000\u0000\u0000\u00e8\u044c\u0001\u0000\u0000\u0000\u00ea"+ - "\u0450\u0001\u0000\u0000\u0000\u00ec\u0455\u0001\u0000\u0000\u0000\u00ee"+ - "\u0459\u0001\u0000\u0000\u0000\u00f0\u045d\u0001\u0000\u0000\u0000\u00f2"+ - "\u0462\u0001\u0000\u0000\u0000\u00f4\u046b\u0001\u0000\u0000\u0000\u00f6"+ - "\u0480\u0001\u0000\u0000\u0000\u00f8\u0484\u0001\u0000\u0000\u0000\u00fa"+ - "\u0488\u0001\u0000\u0000\u0000\u00fc\u048c\u0001\u0000\u0000\u0000\u00fe"+ - "\u0490\u0001\u0000\u0000\u0000\u0100\u0494\u0001\u0000\u0000\u0000\u0102"+ - "\u0499\u0001\u0000\u0000\u0000\u0104\u049d\u0001\u0000\u0000\u0000\u0106"+ - "\u04a1\u0001\u0000\u0000\u0000\u0108\u04a5\u0001\u0000\u0000\u0000\u010a"+ - "\u04aa\u0001\u0000\u0000\u0000\u010c\u04af\u0001\u0000\u0000\u0000\u010e"+ - "\u04b2\u0001\u0000\u0000\u0000\u0110\u04b6\u0001\u0000\u0000\u0000\u0112"+ - "\u04ba\u0001\u0000\u0000\u0000\u0114\u04be\u0001\u0000\u0000\u0000\u0116"+ - "\u04c2\u0001\u0000\u0000\u0000\u0118\u04c7\u0001\u0000\u0000\u0000\u011a"+ - "\u04cc\u0001\u0000\u0000\u0000\u011c\u04d1\u0001\u0000\u0000\u0000\u011e"+ - "\u04d8\u0001\u0000\u0000\u0000\u0120\u04e1\u0001\u0000\u0000\u0000\u0122"+ - "\u04e8\u0001\u0000\u0000\u0000\u0124\u04ec\u0001\u0000\u0000\u0000\u0126"+ - "\u04f0\u0001\u0000\u0000\u0000\u0128\u04f4\u0001\u0000\u0000\u0000\u012a"+ - "\u04f8\u0001\u0000\u0000\u0000\u012c\u04fe\u0001\u0000\u0000\u0000\u012e"+ - "\u0502\u0001\u0000\u0000\u0000\u0130\u0506\u0001\u0000\u0000\u0000\u0132"+ - "\u050a\u0001\u0000\u0000\u0000\u0134\u050e\u0001\u0000\u0000\u0000\u0136"+ - "\u0512\u0001\u0000\u0000\u0000\u0138\u0516\u0001\u0000\u0000\u0000\u013a"+ - "\u051b\u0001\u0000\u0000\u0000\u013c\u0520\u0001\u0000\u0000\u0000\u013e"+ - "\u0524\u0001\u0000\u0000\u0000\u0140\u0528\u0001\u0000\u0000\u0000\u0142"+ - "\u052c\u0001\u0000\u0000\u0000\u0144\u0531\u0001\u0000\u0000\u0000\u0146"+ - "\u0535\u0001\u0000\u0000\u0000\u0148\u053a\u0001\u0000\u0000\u0000\u014a"+ - "\u053f\u0001\u0000\u0000\u0000\u014c\u0543\u0001\u0000\u0000\u0000\u014e"+ - "\u0547\u0001\u0000\u0000\u0000\u0150\u054b\u0001\u0000\u0000\u0000\u0152"+ - "\u054f\u0001\u0000\u0000\u0000\u0154\u0553\u0001\u0000\u0000\u0000\u0156"+ - "\u0558\u0001\u0000\u0000\u0000\u0158\u055d\u0001\u0000\u0000\u0000\u015a"+ - "\u0561\u0001\u0000\u0000\u0000\u015c\u0565\u0001\u0000\u0000\u0000\u015e"+ - "\u0569\u0001\u0000\u0000\u0000\u0160\u056e\u0001\u0000\u0000\u0000\u0162"+ - "\u0577\u0001\u0000\u0000\u0000\u0164\u057b\u0001\u0000\u0000\u0000\u0166"+ - "\u057f\u0001\u0000\u0000\u0000\u0168\u0583\u0001\u0000\u0000\u0000\u016a"+ - "\u0587\u0001\u0000\u0000\u0000\u016c\u058c\u0001\u0000\u0000\u0000\u016e"+ - "\u0590\u0001\u0000\u0000\u0000\u0170\u0594\u0001\u0000\u0000\u0000\u0172"+ - "\u0598\u0001\u0000\u0000\u0000\u0174\u059d\u0001\u0000\u0000\u0000\u0176"+ - "\u05a1\u0001\u0000\u0000\u0000\u0178\u05a5\u0001\u0000\u0000\u0000\u017a"+ - "\u05a9\u0001\u0000\u0000\u0000\u017c\u05ad\u0001\u0000\u0000\u0000\u017e"+ - "\u05b1\u0001\u0000\u0000\u0000\u0180\u05b7\u0001\u0000\u0000\u0000\u0182"+ - "\u05bb\u0001\u0000\u0000\u0000\u0184\u05bf\u0001\u0000\u0000\u0000\u0186"+ - "\u05c3\u0001\u0000\u0000\u0000\u0188\u05c7\u0001\u0000\u0000\u0000\u018a"+ - "\u05cb\u0001\u0000\u0000\u0000\u018c\u05cf\u0001\u0000\u0000\u0000\u018e"+ - "\u05d4\u0001\u0000\u0000\u0000\u0190\u05d8\u0001\u0000\u0000\u0000\u0192"+ - "\u05dc\u0001\u0000\u0000\u0000\u0194\u05e2\u0001\u0000\u0000\u0000\u0196"+ - "\u05eb\u0001\u0000\u0000\u0000\u0198\u05ef\u0001\u0000\u0000\u0000\u019a"+ - "\u05f3\u0001\u0000\u0000\u0000\u019c\u05f7\u0001\u0000\u0000\u0000\u019e"+ - "\u05fb\u0001\u0000\u0000\u0000\u01a0\u05ff\u0001\u0000\u0000\u0000\u01a2"+ - "\u0604\u0001\u0000\u0000\u0000\u01a4\u060a\u0001\u0000\u0000\u0000\u01a6"+ - "\u0610\u0001\u0000\u0000\u0000\u01a8\u0614\u0001\u0000\u0000\u0000\u01aa"+ - "\u0618\u0001\u0000\u0000\u0000\u01ac\u061c\u0001\u0000\u0000\u0000\u01ae"+ - "\u0622\u0001\u0000\u0000\u0000\u01b0\u0628\u0001\u0000\u0000\u0000\u01b2"+ - "\u062c\u0001\u0000\u0000\u0000\u01b4\u0630\u0001\u0000\u0000\u0000\u01b6"+ - "\u0634\u0001\u0000\u0000\u0000\u01b8\u063a\u0001\u0000\u0000\u0000\u01ba"+ - "\u0640\u0001\u0000\u0000\u0000\u01bc\u0646\u0001\u0000\u0000\u0000\u01be"+ - "\u01bf\u0007\u0000\u0000\u0000\u01bf\u01c0\u0007\u0001\u0000\u0000\u01c0"+ - "\u01c1\u0007\u0002\u0000\u0000\u01c1\u01c2\u0007\u0002\u0000\u0000\u01c2"+ - "\u01c3\u0007\u0003\u0000\u0000\u01c3\u01c4\u0007\u0004\u0000\u0000\u01c4"+ - "\u01c5\u0007\u0005\u0000\u0000\u01c5\u01c6\u0001\u0000\u0000\u0000\u01c6"+ - "\u01c7\u0006\u0000\u0000\u0000\u01c7\u0011\u0001\u0000\u0000\u0000\u01c8"+ - "\u01c9\u0007\u0000\u0000\u0000\u01c9\u01ca\u0007\u0006\u0000\u0000\u01ca"+ - "\u01cb\u0007\u0007\u0000\u0000\u01cb\u01cc\u0007\b\u0000\u0000\u01cc\u01cd"+ - "\u0001\u0000\u0000\u0000\u01cd\u01ce\u0006\u0001\u0001\u0000\u01ce\u0013"+ - "\u0001\u0000\u0000\u0000\u01cf\u01d0\u0007\u0003\u0000\u0000\u01d0\u01d1"+ - "\u0007\t\u0000\u0000\u01d1\u01d2\u0007\u0006\u0000\u0000\u01d2\u01d3\u0007"+ - "\u0001\u0000\u0000\u01d3\u01d4\u0007\u0004\u0000\u0000\u01d4\u01d5\u0007"+ - "\n\u0000\u0000\u01d5\u01d6\u0001\u0000\u0000\u0000\u01d6\u01d7\u0006\u0002"+ - "\u0002\u0000\u01d7\u0015\u0001\u0000\u0000\u0000\u01d8\u01d9\u0007\u0003"+ - "\u0000\u0000\u01d9\u01da\u0007\u000b\u0000\u0000\u01da\u01db\u0007\f\u0000"+ - "\u0000\u01db\u01dc\u0007\r\u0000\u0000\u01dc\u01dd\u0001\u0000\u0000\u0000"+ - "\u01dd\u01de\u0006\u0003\u0000\u0000\u01de\u0017\u0001\u0000\u0000\u0000"+ - "\u01df\u01e0\u0007\u0003\u0000\u0000\u01e0\u01e1\u0007\u000e\u0000\u0000"+ - "\u01e1\u01e2\u0007\b\u0000\u0000\u01e2\u01e3\u0007\r\u0000\u0000\u01e3"+ - "\u01e4\u0007\f\u0000\u0000\u01e4\u01e5\u0007\u0001\u0000\u0000\u01e5\u01e6"+ - "\u0007\t\u0000\u0000\u01e6\u01e7\u0001\u0000\u0000\u0000\u01e7\u01e8\u0006"+ - "\u0004\u0003\u0000\u01e8\u0019\u0001\u0000\u0000\u0000\u01e9\u01ea\u0007"+ - "\u000f\u0000\u0000\u01ea\u01eb\u0007\u0006\u0000\u0000\u01eb\u01ec\u0007"+ - "\u0007\u0000\u0000\u01ec\u01ed\u0007\u0010\u0000\u0000\u01ed\u01ee\u0001"+ - "\u0000\u0000\u0000\u01ee\u01ef\u0006\u0005\u0004\u0000\u01ef\u001b\u0001"+ - "\u0000\u0000\u0000\u01f0\u01f1\u0007\u0011\u0000\u0000\u01f1\u01f2\u0007"+ - "\u0006\u0000\u0000\u01f2\u01f3\u0007\u0007\u0000\u0000\u01f3\u01f4\u0007"+ - "\u0012\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000\u01f5\u01f6\u0006"+ - "\u0006\u0000\u0000\u01f6\u001d\u0001\u0000\u0000\u0000\u01f7\u01f8\u0007"+ - "\u0012\u0000\u0000\u01f8\u01f9\u0007\u0003\u0000\u0000\u01f9\u01fa\u0007"+ - "\u0003\u0000\u0000\u01fa\u01fb\u0007\b\u0000\u0000\u01fb\u01fc\u0001\u0000"+ - "\u0000\u0000\u01fc\u01fd\u0006\u0007\u0001\u0000\u01fd\u001f\u0001\u0000"+ - "\u0000\u0000\u01fe\u01ff\u0007\r\u0000\u0000\u01ff\u0200\u0007\u0001\u0000"+ - "\u0000\u0200\u0201\u0007\u0010\u0000\u0000\u0201\u0202\u0007\u0001\u0000"+ - "\u0000\u0202\u0203\u0007\u0005\u0000\u0000\u0203\u0204\u0001\u0000\u0000"+ - "\u0000\u0204\u0205\u0006\b\u0000\u0000\u0205!\u0001\u0000\u0000\u0000"+ - "\u0206\u0207\u0007\u0010\u0000\u0000\u0207\u0208\u0007\u000b\u0000\u0000"+ - "\u0208\u0209\u0005_\u0000\u0000\u0209\u020a\u0007\u0003\u0000\u0000\u020a"+ - "\u020b\u0007\u000e\u0000\u0000\u020b\u020c\u0007\b\u0000\u0000\u020c\u020d"+ - "\u0007\f\u0000\u0000\u020d\u020e\u0007\t\u0000\u0000\u020e\u020f\u0007"+ - "\u0000\u0000\u0000\u020f\u0210\u0001\u0000\u0000\u0000\u0210\u0211\u0006"+ - "\t\u0005\u0000\u0211#\u0001\u0000\u0000\u0000\u0212\u0213\u0007\u0006"+ - "\u0000\u0000\u0213\u0214\u0007\u0003\u0000\u0000\u0214\u0215\u0007\t\u0000"+ - "\u0000\u0215\u0216\u0007\f\u0000\u0000\u0216\u0217\u0007\u0010\u0000\u0000"+ - "\u0217\u0218\u0007\u0003\u0000\u0000\u0218\u0219\u0001\u0000\u0000\u0000"+ - "\u0219\u021a\u0006\n\u0006\u0000\u021a%\u0001\u0000\u0000\u0000\u021b"+ - "\u021c\u0007\u0006\u0000\u0000\u021c\u021d\u0007\u0007\u0000\u0000\u021d"+ - "\u021e\u0007\u0013\u0000\u0000\u021e\u021f\u0001\u0000\u0000\u0000\u021f"+ - "\u0220\u0006\u000b\u0000\u0000\u0220\'\u0001\u0000\u0000\u0000\u0221\u0222"+ - "\u0007\u0002\u0000\u0000\u0222\u0223\u0007\n\u0000\u0000\u0223\u0224\u0007"+ - "\u0007\u0000\u0000\u0224\u0225\u0007\u0013\u0000\u0000\u0225\u0226\u0001"+ - "\u0000\u0000\u0000\u0226\u0227\u0006\f\u0007\u0000\u0227)\u0001\u0000"+ - "\u0000\u0000\u0228\u0229\u0007\u0002\u0000\u0000\u0229\u022a\u0007\u0007"+ - "\u0000\u0000\u022a\u022b\u0007\u0006\u0000\u0000\u022b\u022c\u0007\u0005"+ - "\u0000\u0000\u022c\u022d\u0001\u0000\u0000\u0000\u022d\u022e\u0006\r\u0000"+ - "\u0000\u022e+\u0001\u0000\u0000\u0000\u022f\u0230\u0007\u0002\u0000\u0000"+ - "\u0230\u0231\u0007\u0005\u0000\u0000\u0231\u0232\u0007\f\u0000\u0000\u0232"+ - "\u0233\u0007\u0005\u0000\u0000\u0233\u0234\u0007\u0002\u0000\u0000\u0234"+ - "\u0235\u0001\u0000\u0000\u0000\u0235\u0236\u0006\u000e\u0000\u0000\u0236"+ - "-\u0001\u0000\u0000\u0000\u0237\u0238\u0007\u0013\u0000\u0000\u0238\u0239"+ - "\u0007\n\u0000\u0000\u0239\u023a\u0007\u0003\u0000\u0000\u023a\u023b\u0007"+ - "\u0006\u0000\u0000\u023b\u023c\u0007\u0003\u0000\u0000\u023c\u023d\u0001"+ - "\u0000\u0000\u0000\u023d\u023e\u0006\u000f\u0000\u0000\u023e/\u0001\u0000"+ - "\u0000\u0000\u023f\u0240\u0004\u0010\u0000\u0000\u0240\u0241\u0007\u0001"+ - "\u0000\u0000\u0241\u0242\u0007\t\u0000\u0000\u0242\u0243\u0007\r\u0000"+ - "\u0000\u0243\u0244\u0007\u0001\u0000\u0000\u0244\u0245\u0007\t\u0000\u0000"+ - "\u0245\u0246\u0007\u0003\u0000\u0000\u0246\u0247\u0007\u0002\u0000\u0000"+ - "\u0247\u0248\u0007\u0005\u0000\u0000\u0248\u0249\u0007\f\u0000\u0000\u0249"+ - "\u024a\u0007\u0005\u0000\u0000\u024a\u024b\u0007\u0002\u0000\u0000\u024b"+ - "\u024c\u0001\u0000\u0000\u0000\u024c\u024d\u0006\u0010\u0000\u0000\u024d"+ - "1\u0001\u0000\u0000\u0000\u024e\u024f\u0004\u0011\u0001\u0000\u024f\u0250"+ - "\u0007\r\u0000\u0000\u0250\u0251\u0007\u0007\u0000\u0000\u0251\u0252\u0007"+ - "\u0007\u0000\u0000\u0252\u0253\u0007\u0012\u0000\u0000\u0253\u0254\u0007"+ - "\u0014\u0000\u0000\u0254\u0255\u0007\b\u0000\u0000\u0255\u0256\u0005_"+ - "\u0000\u0000\u0256\u0257\u0005\u8001\uf414\u0000\u0000\u0257\u0258\u0001"+ - "\u0000\u0000\u0000\u0258\u0259\u0006\u0011\b\u0000\u02593\u0001\u0000"+ - "\u0000\u0000\u025a\u025b\u0004\u0012\u0002\u0000\u025b\u025c\u0007\u0010"+ - "\u0000\u0000\u025c\u025d\u0007\u0003\u0000\u0000\u025d\u025e\u0007\u0005"+ - "\u0000\u0000\u025e\u025f\u0007\u0006\u0000\u0000\u025f\u0260\u0007\u0001"+ - "\u0000\u0000\u0260\u0261\u0007\u0004\u0000\u0000\u0261\u0262\u0007\u0002"+ - "\u0000\u0000\u0262\u0263\u0001\u0000\u0000\u0000\u0263\u0264\u0006\u0012"+ - "\t\u0000\u02645\u0001\u0000\u0000\u0000\u0265\u0266\u0004\u0013\u0003"+ - "\u0000\u0266\u0267\u0007\u0015\u0000\u0000\u0267\u0268\u0007\u0007\u0000"+ - "\u0000\u0268\u0269\u0007\u0001\u0000\u0000\u0269\u026a\u0007\t\u0000\u0000"+ - "\u026a\u026b\u0001\u0000\u0000\u0000\u026b\u026c\u0006\u0013\n\u0000\u026c"+ - "7\u0001\u0000\u0000\u0000\u026d\u026e\u0004\u0014\u0004\u0000\u026e\u026f"+ - "\u0007\u000f\u0000\u0000\u026f\u0270\u0007\u0014\u0000\u0000\u0270\u0271"+ - "\u0007\r\u0000\u0000\u0271\u0272\u0007\r\u0000\u0000\u0272\u0273\u0001"+ - "\u0000\u0000\u0000\u0273\u0274\u0006\u0014\n\u0000\u02749\u0001\u0000"+ - "\u0000\u0000\u0275\u0276\u0004\u0015\u0005\u0000\u0276\u0277\u0007\r\u0000"+ - "\u0000\u0277\u0278\u0007\u0003\u0000\u0000\u0278\u0279\u0007\u000f\u0000"+ - "\u0000\u0279\u027a\u0007\u0005\u0000\u0000\u027a\u027b\u0001\u0000\u0000"+ - "\u0000\u027b\u027c\u0006\u0015\n\u0000\u027c;\u0001\u0000\u0000\u0000"+ - "\u027d\u027e\u0004\u0016\u0006\u0000\u027e\u027f\u0007\u0006\u0000\u0000"+ - "\u027f\u0280\u0007\u0001\u0000\u0000\u0280\u0281\u0007\u0011\u0000\u0000"+ - "\u0281\u0282\u0007\n\u0000\u0000\u0282\u0283\u0007\u0005\u0000\u0000\u0283"+ - "\u0284\u0001\u0000\u0000\u0000\u0284\u0285\u0006\u0016\n\u0000\u0285="+ - "\u0001\u0000\u0000\u0000\u0286\u0287\u0004\u0017\u0007\u0000\u0287\u0288"+ - "\u0007\r\u0000\u0000\u0288\u0289\u0007\u0007\u0000\u0000\u0289\u028a\u0007"+ - "\u0007\u0000\u0000\u028a\u028b\u0007\u0012\u0000\u0000\u028b\u028c\u0007"+ - "\u0014\u0000\u0000\u028c\u028d\u0007\b\u0000\u0000\u028d\u028e\u0001\u0000"+ - "\u0000\u0000\u028e\u028f\u0006\u0017\n\u0000\u028f?\u0001\u0000\u0000"+ - "\u0000\u0290\u0292\b\u0016\u0000\u0000\u0291\u0290\u0001\u0000\u0000\u0000"+ - "\u0292\u0293\u0001\u0000\u0000\u0000\u0293\u0291\u0001\u0000\u0000\u0000"+ - "\u0293\u0294\u0001\u0000\u0000\u0000\u0294\u0295\u0001\u0000\u0000\u0000"+ - "\u0295\u0296\u0006\u0018\u0000\u0000\u0296A\u0001\u0000\u0000\u0000\u0297"+ - "\u0298\u0005/\u0000\u0000\u0298\u0299\u0005/\u0000\u0000\u0299\u029d\u0001"+ - "\u0000\u0000\u0000\u029a\u029c\b\u0017\u0000\u0000\u029b\u029a\u0001\u0000"+ - "\u0000\u0000\u029c\u029f\u0001\u0000\u0000\u0000\u029d\u029b\u0001\u0000"+ - "\u0000\u0000\u029d\u029e\u0001\u0000\u0000\u0000\u029e\u02a1\u0001\u0000"+ - "\u0000\u0000\u029f\u029d\u0001\u0000\u0000\u0000\u02a0\u02a2\u0005\r\u0000"+ - "\u0000\u02a1\u02a0\u0001\u0000\u0000\u0000\u02a1\u02a2\u0001\u0000\u0000"+ - "\u0000\u02a2\u02a4\u0001\u0000\u0000\u0000\u02a3\u02a5\u0005\n\u0000\u0000"+ - "\u02a4\u02a3\u0001\u0000\u0000\u0000\u02a4\u02a5\u0001\u0000\u0000\u0000"+ - "\u02a5\u02a6\u0001\u0000\u0000\u0000\u02a6\u02a7\u0006\u0019\u000b\u0000"+ - "\u02a7C\u0001\u0000\u0000\u0000\u02a8\u02a9\u0005/\u0000\u0000\u02a9\u02aa"+ - "\u0005*\u0000\u0000\u02aa\u02af\u0001\u0000\u0000\u0000\u02ab\u02ae\u0003"+ - "D\u001a\u0000\u02ac\u02ae\t\u0000\u0000\u0000\u02ad\u02ab\u0001\u0000"+ - "\u0000\u0000\u02ad\u02ac\u0001\u0000\u0000\u0000\u02ae\u02b1\u0001\u0000"+ - "\u0000\u0000\u02af\u02b0\u0001\u0000\u0000\u0000\u02af\u02ad\u0001\u0000"+ - "\u0000\u0000\u02b0\u02b2\u0001\u0000\u0000\u0000\u02b1\u02af\u0001\u0000"+ - "\u0000\u0000\u02b2\u02b3\u0005*\u0000\u0000\u02b3\u02b4\u0005/\u0000\u0000"+ - "\u02b4\u02b5\u0001\u0000\u0000\u0000\u02b5\u02b6\u0006\u001a\u000b\u0000"+ - "\u02b6E\u0001\u0000\u0000\u0000\u02b7\u02b9\u0007\u0018\u0000\u0000\u02b8"+ - "\u02b7\u0001\u0000\u0000\u0000\u02b9\u02ba\u0001\u0000\u0000\u0000\u02ba"+ - "\u02b8\u0001\u0000\u0000\u0000\u02ba\u02bb\u0001\u0000\u0000\u0000\u02bb"+ - "\u02bc\u0001\u0000\u0000\u0000\u02bc\u02bd\u0006\u001b\u000b\u0000\u02bd"+ - "G\u0001\u0000\u0000\u0000\u02be\u02bf\u0005|\u0000\u0000\u02bf\u02c0\u0001"+ - "\u0000\u0000\u0000\u02c0\u02c1\u0006\u001c\f\u0000\u02c1I\u0001\u0000"+ - "\u0000\u0000\u02c2\u02c3\u0007\u0019\u0000\u0000\u02c3K\u0001\u0000\u0000"+ - "\u0000\u02c4\u02c5\u0007\u001a\u0000\u0000\u02c5M\u0001\u0000\u0000\u0000"+ - "\u02c6\u02c7\u0005\\\u0000\u0000\u02c7\u02c8\u0007\u001b\u0000\u0000\u02c8"+ - "O\u0001\u0000\u0000\u0000\u02c9\u02ca\b\u001c\u0000\u0000\u02caQ\u0001"+ - "\u0000\u0000\u0000\u02cb\u02cd\u0007\u0003\u0000\u0000\u02cc\u02ce\u0007"+ - "\u001d\u0000\u0000\u02cd\u02cc\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001"+ - "\u0000\u0000\u0000\u02ce\u02d0\u0001\u0000\u0000\u0000\u02cf\u02d1\u0003"+ - "J\u001d\u0000\u02d0\u02cf\u0001\u0000\u0000\u0000\u02d1\u02d2\u0001\u0000"+ - "\u0000\u0000\u02d2\u02d0\u0001\u0000\u0000\u0000\u02d2\u02d3\u0001\u0000"+ - "\u0000\u0000\u02d3S\u0001\u0000\u0000\u0000\u02d4\u02d5\u0005@\u0000\u0000"+ - "\u02d5U\u0001\u0000\u0000\u0000\u02d6\u02d7\u0005`\u0000\u0000\u02d7W"+ - "\u0001\u0000\u0000\u0000\u02d8\u02dc\b\u001e\u0000\u0000\u02d9\u02da\u0005"+ - "`\u0000\u0000\u02da\u02dc\u0005`\u0000\u0000\u02db\u02d8\u0001\u0000\u0000"+ - "\u0000\u02db\u02d9\u0001\u0000\u0000\u0000\u02dcY\u0001\u0000\u0000\u0000"+ - "\u02dd\u02de\u0005_\u0000\u0000\u02de[\u0001\u0000\u0000\u0000\u02df\u02e3"+ - "\u0003L\u001e\u0000\u02e0\u02e3\u0003J\u001d\u0000\u02e1\u02e3\u0003Z"+ - "%\u0000\u02e2\u02df\u0001\u0000\u0000\u0000\u02e2\u02e0\u0001\u0000\u0000"+ - "\u0000\u02e2\u02e1\u0001\u0000\u0000\u0000\u02e3]\u0001\u0000\u0000\u0000"+ - "\u02e4\u02e9\u0005\"\u0000\u0000\u02e5\u02e8\u0003N\u001f\u0000\u02e6"+ - "\u02e8\u0003P \u0000\u02e7\u02e5\u0001\u0000\u0000\u0000\u02e7\u02e6\u0001"+ - "\u0000\u0000\u0000\u02e8\u02eb\u0001\u0000\u0000\u0000\u02e9\u02e7\u0001"+ - "\u0000\u0000\u0000\u02e9\u02ea\u0001\u0000\u0000\u0000\u02ea\u02ec\u0001"+ - "\u0000\u0000\u0000\u02eb\u02e9\u0001\u0000\u0000\u0000\u02ec\u0302\u0005"+ - "\"\u0000\u0000\u02ed\u02ee\u0005\"\u0000\u0000\u02ee\u02ef\u0005\"\u0000"+ - "\u0000\u02ef\u02f0\u0005\"\u0000\u0000\u02f0\u02f4\u0001\u0000\u0000\u0000"+ - "\u02f1\u02f3\b\u0017\u0000\u0000\u02f2\u02f1\u0001\u0000\u0000\u0000\u02f3"+ - "\u02f6\u0001\u0000\u0000\u0000\u02f4\u02f5\u0001\u0000\u0000\u0000\u02f4"+ - "\u02f2\u0001\u0000\u0000\u0000\u02f5\u02f7\u0001\u0000\u0000\u0000\u02f6"+ - "\u02f4\u0001\u0000\u0000\u0000\u02f7\u02f8\u0005\"\u0000\u0000\u02f8\u02f9"+ - "\u0005\"\u0000\u0000\u02f9\u02fa\u0005\"\u0000\u0000\u02fa\u02fc\u0001"+ - "\u0000\u0000\u0000\u02fb\u02fd\u0005\"\u0000\u0000\u02fc\u02fb\u0001\u0000"+ - "\u0000\u0000\u02fc\u02fd\u0001\u0000\u0000\u0000\u02fd\u02ff\u0001\u0000"+ - "\u0000\u0000\u02fe\u0300\u0005\"\u0000\u0000\u02ff\u02fe\u0001\u0000\u0000"+ - "\u0000\u02ff\u0300\u0001\u0000\u0000\u0000\u0300\u0302\u0001\u0000\u0000"+ - "\u0000\u0301\u02e4\u0001\u0000\u0000\u0000\u0301\u02ed\u0001\u0000\u0000"+ - "\u0000\u0302_\u0001\u0000\u0000\u0000\u0303\u0305\u0003J\u001d\u0000\u0304"+ - "\u0303\u0001\u0000\u0000\u0000\u0305\u0306\u0001\u0000\u0000\u0000\u0306"+ - "\u0304\u0001\u0000\u0000\u0000\u0306\u0307\u0001\u0000\u0000\u0000\u0307"+ - "a\u0001\u0000\u0000\u0000\u0308\u030a\u0003J\u001d\u0000\u0309\u0308\u0001"+ - "\u0000\u0000\u0000\u030a\u030b\u0001\u0000\u0000\u0000\u030b\u0309\u0001"+ - "\u0000\u0000\u0000\u030b\u030c\u0001\u0000\u0000\u0000\u030c\u030d\u0001"+ - "\u0000\u0000\u0000\u030d\u0311\u0003t2\u0000\u030e\u0310\u0003J\u001d"+ - "\u0000\u030f\u030e\u0001\u0000\u0000\u0000\u0310\u0313\u0001\u0000\u0000"+ - "\u0000\u0311\u030f\u0001\u0000\u0000\u0000\u0311\u0312\u0001\u0000\u0000"+ - "\u0000\u0312\u0333\u0001\u0000\u0000\u0000\u0313\u0311\u0001\u0000\u0000"+ - "\u0000\u0314\u0316\u0003t2\u0000\u0315\u0317\u0003J\u001d\u0000\u0316"+ - "\u0315\u0001\u0000\u0000\u0000\u0317\u0318\u0001\u0000\u0000\u0000\u0318"+ - "\u0316\u0001\u0000\u0000\u0000\u0318\u0319\u0001\u0000\u0000\u0000\u0319"+ - "\u0333\u0001\u0000\u0000\u0000\u031a\u031c\u0003J\u001d\u0000\u031b\u031a"+ - "\u0001\u0000\u0000\u0000\u031c\u031d\u0001\u0000\u0000\u0000\u031d\u031b"+ - "\u0001\u0000\u0000\u0000\u031d\u031e\u0001\u0000\u0000\u0000\u031e\u0326"+ - "\u0001\u0000\u0000\u0000\u031f\u0323\u0003t2\u0000\u0320\u0322\u0003J"+ - "\u001d\u0000\u0321\u0320\u0001\u0000\u0000\u0000\u0322\u0325\u0001\u0000"+ - "\u0000\u0000\u0323\u0321\u0001\u0000\u0000\u0000\u0323\u0324\u0001\u0000"+ - "\u0000\u0000\u0324\u0327\u0001\u0000\u0000\u0000\u0325\u0323\u0001\u0000"+ - "\u0000\u0000\u0326\u031f\u0001\u0000\u0000\u0000\u0326\u0327\u0001\u0000"+ - "\u0000\u0000\u0327\u0328\u0001\u0000\u0000\u0000\u0328\u0329\u0003R!\u0000"+ - "\u0329\u0333\u0001\u0000\u0000\u0000\u032a\u032c\u0003t2\u0000\u032b\u032d"+ - "\u0003J\u001d\u0000\u032c\u032b\u0001\u0000\u0000\u0000\u032d\u032e\u0001"+ - "\u0000\u0000\u0000\u032e\u032c\u0001\u0000\u0000\u0000\u032e\u032f\u0001"+ - "\u0000\u0000\u0000\u032f\u0330\u0001\u0000\u0000\u0000\u0330\u0331\u0003"+ - "R!\u0000\u0331\u0333\u0001\u0000\u0000\u0000\u0332\u0309\u0001\u0000\u0000"+ - "\u0000\u0332\u0314\u0001\u0000\u0000\u0000\u0332\u031b\u0001\u0000\u0000"+ - "\u0000\u0332\u032a\u0001\u0000\u0000\u0000\u0333c\u0001\u0000\u0000\u0000"+ - "\u0334\u0335\u0007\u001f\u0000\u0000\u0335\u0336\u0007 \u0000\u0000\u0336"+ - "e\u0001\u0000\u0000\u0000\u0337\u0338\u0007\f\u0000\u0000\u0338\u0339"+ - "\u0007\t\u0000\u0000\u0339\u033a\u0007\u0000\u0000\u0000\u033ag\u0001"+ - "\u0000\u0000\u0000\u033b\u033c\u0007\f\u0000\u0000\u033c\u033d\u0007\u0002"+ - "\u0000\u0000\u033d\u033e\u0007\u0004\u0000\u0000\u033ei\u0001\u0000\u0000"+ - "\u0000\u033f\u0340\u0005=\u0000\u0000\u0340k\u0001\u0000\u0000\u0000\u0341"+ - "\u0342\u0005:\u0000\u0000\u0342\u0343\u0005:\u0000\u0000\u0343m\u0001"+ - "\u0000\u0000\u0000\u0344\u0345\u0005:\u0000\u0000\u0345o\u0001\u0000\u0000"+ - "\u0000\u0346\u0347\u0005,\u0000\u0000\u0347q\u0001\u0000\u0000\u0000\u0348"+ - "\u0349\u0007\u0000\u0000\u0000\u0349\u034a\u0007\u0003\u0000\u0000\u034a"+ - "\u034b\u0007\u0002\u0000\u0000\u034b\u034c\u0007\u0004\u0000\u0000\u034c"+ - "s\u0001\u0000\u0000\u0000\u034d\u034e\u0005.\u0000\u0000\u034eu\u0001"+ - "\u0000\u0000\u0000\u034f\u0350\u0007\u000f\u0000\u0000\u0350\u0351\u0007"+ - "\f\u0000\u0000\u0351\u0352\u0007\r\u0000\u0000\u0352\u0353\u0007\u0002"+ - "\u0000\u0000\u0353\u0354\u0007\u0003\u0000\u0000\u0354w\u0001\u0000\u0000"+ - "\u0000\u0355\u0356\u0007\u000f\u0000\u0000\u0356\u0357\u0007\u0001\u0000"+ - "\u0000\u0357\u0358\u0007\u0006\u0000\u0000\u0358\u0359\u0007\u0002\u0000"+ - "\u0000\u0359\u035a\u0007\u0005\u0000\u0000\u035ay\u0001\u0000\u0000\u0000"+ - "\u035b\u035c\u0007\u0001\u0000\u0000\u035c\u035d\u0007\t\u0000\u0000\u035d"+ - "{\u0001\u0000\u0000\u0000\u035e\u035f\u0007\u0001\u0000\u0000\u035f\u0360"+ - "\u0007\u0002\u0000\u0000\u0360}\u0001\u0000\u0000\u0000\u0361\u0362\u0007"+ - "\r\u0000\u0000\u0362\u0363\u0007\f\u0000\u0000\u0363\u0364\u0007\u0002"+ - "\u0000\u0000\u0364\u0365\u0007\u0005\u0000\u0000\u0365\u007f\u0001\u0000"+ - "\u0000\u0000\u0366\u0367\u0007\r\u0000\u0000\u0367\u0368\u0007\u0001\u0000"+ - "\u0000\u0368\u0369\u0007\u0012\u0000\u0000\u0369\u036a\u0007\u0003\u0000"+ - "\u0000\u036a\u0081\u0001\u0000\u0000\u0000\u036b\u036c\u0005(\u0000\u0000"+ - "\u036c\u0083\u0001\u0000\u0000\u0000\u036d\u036e\u0007\t\u0000\u0000\u036e"+ - "\u036f\u0007\u0007\u0000\u0000\u036f\u0370\u0007\u0005\u0000\u0000\u0370"+ - "\u0085\u0001\u0000\u0000\u0000\u0371\u0372\u0007\t\u0000\u0000\u0372\u0373"+ - "\u0007\u0014\u0000\u0000\u0373\u0374\u0007\r\u0000\u0000\u0374\u0375\u0007"+ - "\r\u0000\u0000\u0375\u0087\u0001\u0000\u0000\u0000\u0376\u0377\u0007\t"+ - "\u0000\u0000\u0377\u0378\u0007\u0014\u0000\u0000\u0378\u0379\u0007\r\u0000"+ - "\u0000\u0379\u037a\u0007\r\u0000\u0000\u037a\u037b\u0007\u0002\u0000\u0000"+ - "\u037b\u0089\u0001\u0000\u0000\u0000\u037c\u037d\u0007\u0007\u0000\u0000"+ - "\u037d\u037e\u0007\u0006\u0000\u0000\u037e\u008b\u0001\u0000\u0000\u0000"+ - "\u037f\u0380\u0005?\u0000\u0000\u0380\u008d\u0001\u0000\u0000\u0000\u0381"+ - "\u0382\u0007\u0006\u0000\u0000\u0382\u0383\u0007\r\u0000\u0000\u0383\u0384"+ - "\u0007\u0001\u0000\u0000\u0384\u0385\u0007\u0012\u0000\u0000\u0385\u0386"+ - "\u0007\u0003\u0000\u0000\u0386\u008f\u0001\u0000\u0000\u0000\u0387\u0388"+ - "\u0005)\u0000\u0000\u0388\u0091\u0001\u0000\u0000\u0000\u0389\u038a\u0007"+ - "\u0005\u0000\u0000\u038a\u038b\u0007\u0006\u0000\u0000\u038b\u038c\u0007"+ - "\u0014\u0000\u0000\u038c\u038d\u0007\u0003\u0000\u0000\u038d\u0093\u0001"+ - "\u0000\u0000\u0000\u038e\u038f\u0005=\u0000\u0000\u038f\u0390\u0005=\u0000"+ - "\u0000\u0390\u0095\u0001\u0000\u0000\u0000\u0391\u0392\u0005=\u0000\u0000"+ - "\u0392\u0393\u0005~\u0000\u0000\u0393\u0097\u0001\u0000\u0000\u0000\u0394"+ - "\u0395\u0005!\u0000\u0000\u0395\u0396\u0005=\u0000\u0000\u0396\u0099\u0001"+ - "\u0000\u0000\u0000\u0397\u0398\u0005<\u0000\u0000\u0398\u009b\u0001\u0000"+ - "\u0000\u0000\u0399\u039a\u0005<\u0000\u0000\u039a\u039b\u0005=\u0000\u0000"+ - "\u039b\u009d\u0001\u0000\u0000\u0000\u039c\u039d\u0005>\u0000\u0000\u039d"+ - "\u009f\u0001\u0000\u0000\u0000\u039e\u039f\u0005>\u0000\u0000\u039f\u03a0"+ - "\u0005=\u0000\u0000\u03a0\u00a1\u0001\u0000\u0000\u0000\u03a1\u03a2\u0005"+ - "+\u0000\u0000\u03a2\u00a3\u0001\u0000\u0000\u0000\u03a3\u03a4\u0005-\u0000"+ - "\u0000\u03a4\u00a5\u0001\u0000\u0000\u0000\u03a5\u03a6\u0005*\u0000\u0000"+ - "\u03a6\u00a7\u0001\u0000\u0000\u0000\u03a7\u03a8\u0005/\u0000\u0000\u03a8"+ - "\u00a9\u0001\u0000\u0000\u0000\u03a9\u03aa\u0005%\u0000\u0000\u03aa\u00ab"+ - "\u0001\u0000\u0000\u0000\u03ab\u03ac\u0004N\b\u0000\u03ac\u03ad\u0005"+ - "{\u0000\u0000\u03ad\u00ad\u0001\u0000\u0000\u0000\u03ae\u03af\u0004O\t"+ - "\u0000\u03af\u03b0\u0005}\u0000\u0000\u03b0\u00af\u0001\u0000\u0000\u0000"+ - "\u03b1\u03b2\u0003.\u000f\u0000\u03b2\u03b3\u0001\u0000\u0000\u0000\u03b3"+ - "\u03b4\u0006P\r\u0000\u03b4\u00b1\u0001\u0000\u0000\u0000\u03b5\u03b8"+ - "\u0003\u008c>\u0000\u03b6\u03b9\u0003L\u001e\u0000\u03b7\u03b9\u0003Z"+ - "%\u0000\u03b8\u03b6\u0001\u0000\u0000\u0000\u03b8\u03b7\u0001\u0000\u0000"+ - "\u0000\u03b9\u03bd\u0001\u0000\u0000\u0000\u03ba\u03bc\u0003\\&\u0000"+ - "\u03bb\u03ba\u0001\u0000\u0000\u0000\u03bc\u03bf\u0001\u0000\u0000\u0000"+ - "\u03bd\u03bb\u0001\u0000\u0000\u0000\u03bd\u03be\u0001\u0000\u0000\u0000"+ - "\u03be\u03c7\u0001\u0000\u0000\u0000\u03bf\u03bd\u0001\u0000\u0000\u0000"+ - "\u03c0\u03c2\u0003\u008c>\u0000\u03c1\u03c3\u0003J\u001d\u0000\u03c2\u03c1"+ - "\u0001\u0000\u0000\u0000\u03c3\u03c4\u0001\u0000\u0000\u0000\u03c4\u03c2"+ - "\u0001\u0000\u0000\u0000\u03c4\u03c5\u0001\u0000\u0000\u0000\u03c5\u03c7"+ - "\u0001\u0000\u0000\u0000\u03c6\u03b5\u0001\u0000\u0000\u0000\u03c6\u03c0"+ - "\u0001\u0000\u0000\u0000\u03c7\u00b3\u0001\u0000\u0000\u0000\u03c8\u03c9"+ - "\u0005[\u0000\u0000\u03c9\u03ca\u0001\u0000\u0000\u0000\u03ca\u03cb\u0006"+ - "R\u0000\u0000\u03cb\u03cc\u0006R\u0000\u0000\u03cc\u00b5\u0001\u0000\u0000"+ - "\u0000\u03cd\u03ce\u0005]\u0000\u0000\u03ce\u03cf\u0001\u0000\u0000\u0000"+ - "\u03cf\u03d0\u0006S\f\u0000\u03d0\u03d1\u0006S\f\u0000\u03d1\u00b7\u0001"+ - "\u0000\u0000\u0000\u03d2\u03d6\u0003L\u001e\u0000\u03d3\u03d5\u0003\\"+ - "&\u0000\u03d4\u03d3\u0001\u0000\u0000\u0000\u03d5\u03d8\u0001\u0000\u0000"+ - "\u0000\u03d6\u03d4\u0001\u0000\u0000\u0000\u03d6\u03d7\u0001\u0000\u0000"+ - "\u0000\u03d7\u03e3\u0001\u0000\u0000\u0000\u03d8\u03d6\u0001\u0000\u0000"+ - "\u0000\u03d9\u03dc\u0003Z%\u0000\u03da\u03dc\u0003T\"\u0000\u03db\u03d9"+ - "\u0001\u0000\u0000\u0000\u03db\u03da\u0001\u0000\u0000\u0000\u03dc\u03de"+ - "\u0001\u0000\u0000\u0000\u03dd\u03df\u0003\\&\u0000\u03de\u03dd\u0001"+ - "\u0000\u0000\u0000\u03df\u03e0\u0001\u0000\u0000\u0000\u03e0\u03de\u0001"+ - "\u0000\u0000\u0000\u03e0\u03e1\u0001\u0000\u0000\u0000\u03e1\u03e3\u0001"+ - "\u0000\u0000\u0000\u03e2\u03d2\u0001\u0000\u0000\u0000\u03e2\u03db\u0001"+ - "\u0000\u0000\u0000\u03e3\u00b9\u0001\u0000\u0000\u0000\u03e4\u03e6\u0003"+ - "V#\u0000\u03e5\u03e7\u0003X$\u0000\u03e6\u03e5\u0001\u0000\u0000\u0000"+ - "\u03e7\u03e8\u0001\u0000\u0000\u0000\u03e8\u03e6\u0001\u0000\u0000\u0000"+ - "\u03e8\u03e9\u0001\u0000\u0000\u0000\u03e9\u03ea\u0001\u0000\u0000\u0000"+ - "\u03ea\u03eb\u0003V#\u0000\u03eb\u00bb\u0001\u0000\u0000\u0000\u03ec\u03ed"+ - "\u0003\u00baU\u0000\u03ed\u00bd\u0001\u0000\u0000\u0000\u03ee\u03ef\u0003"+ - "B\u0019\u0000\u03ef\u03f0\u0001\u0000\u0000\u0000\u03f0\u03f1\u0006W\u000b"+ - "\u0000\u03f1\u00bf\u0001\u0000\u0000\u0000\u03f2\u03f3\u0003D\u001a\u0000"+ - "\u03f3\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006X\u000b\u0000\u03f5"+ - "\u00c1\u0001\u0000\u0000\u0000\u03f6\u03f7\u0003F\u001b\u0000\u03f7\u03f8"+ - "\u0001\u0000\u0000\u0000\u03f8\u03f9\u0006Y\u000b\u0000\u03f9\u00c3\u0001"+ - "\u0000\u0000\u0000\u03fa\u03fb\u0003\u00b4R\u0000\u03fb\u03fc\u0001\u0000"+ - "\u0000\u0000\u03fc\u03fd\u0006Z\u000e\u0000\u03fd\u03fe\u0006Z\u000f\u0000"+ - "\u03fe\u00c5\u0001\u0000\u0000\u0000\u03ff\u0400\u0003H\u001c\u0000\u0400"+ - "\u0401\u0001\u0000\u0000\u0000\u0401\u0402\u0006[\u0010\u0000\u0402\u0403"+ - "\u0006[\f\u0000\u0403\u00c7\u0001\u0000\u0000\u0000\u0404\u0405\u0003"+ - "F\u001b\u0000\u0405\u0406\u0001\u0000\u0000\u0000\u0406\u0407\u0006\\"+ - "\u000b\u0000\u0407\u00c9\u0001\u0000\u0000\u0000\u0408\u0409\u0003B\u0019"+ - "\u0000\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u040b\u0006]\u000b\u0000"+ - "\u040b\u00cb\u0001\u0000\u0000\u0000\u040c\u040d\u0003D\u001a\u0000\u040d"+ - "\u040e\u0001\u0000\u0000\u0000\u040e\u040f\u0006^\u000b\u0000\u040f\u00cd"+ - "\u0001\u0000\u0000\u0000\u0410\u0411\u0003H\u001c\u0000\u0411\u0412\u0001"+ - "\u0000\u0000\u0000\u0412\u0413\u0006_\u0010\u0000\u0413\u0414\u0006_\f"+ - "\u0000\u0414\u00cf\u0001\u0000\u0000\u0000\u0415\u0416\u0003\u00b4R\u0000"+ - "\u0416\u0417\u0001\u0000\u0000\u0000\u0417\u0418\u0006`\u000e\u0000\u0418"+ - "\u00d1\u0001\u0000\u0000\u0000\u0419\u041a\u0003\u00b6S\u0000\u041a\u041b"+ - "\u0001\u0000\u0000\u0000\u041b\u041c\u0006a\u0011\u0000\u041c\u00d3\u0001"+ - "\u0000\u0000\u0000\u041d\u041e\u0003n/\u0000\u041e\u041f\u0001\u0000\u0000"+ - "\u0000\u041f\u0420\u0006b\u0012\u0000\u0420\u00d5\u0001\u0000\u0000\u0000"+ - "\u0421\u0422\u0003p0\u0000\u0422\u0423\u0001\u0000\u0000\u0000\u0423\u0424"+ - "\u0006c\u0013\u0000\u0424\u00d7\u0001\u0000\u0000\u0000\u0425\u0426\u0003"+ - "j-\u0000\u0426\u0427\u0001\u0000\u0000\u0000\u0427\u0428\u0006d\u0014"+ - "\u0000\u0428\u00d9\u0001\u0000\u0000\u0000\u0429\u042a\u0007\u0010\u0000"+ - "\u0000\u042a\u042b\u0007\u0003\u0000\u0000\u042b\u042c\u0007\u0005\u0000"+ - "\u0000\u042c\u042d\u0007\f\u0000\u0000\u042d\u042e\u0007\u0000\u0000\u0000"+ - "\u042e\u042f\u0007\f\u0000\u0000\u042f\u0430\u0007\u0005\u0000\u0000\u0430"+ - "\u0431\u0007\f\u0000\u0000\u0431\u00db\u0001\u0000\u0000\u0000\u0432\u0436"+ - "\b!\u0000\u0000\u0433\u0434\u0005/\u0000\u0000\u0434\u0436\b\"\u0000\u0000"+ - "\u0435\u0432\u0001\u0000\u0000\u0000\u0435\u0433\u0001\u0000\u0000\u0000"+ - "\u0436\u00dd\u0001\u0000\u0000\u0000\u0437\u0439\u0003\u00dcf\u0000\u0438"+ - "\u0437\u0001\u0000\u0000\u0000\u0439\u043a\u0001\u0000\u0000\u0000\u043a"+ - "\u0438\u0001\u0000\u0000\u0000\u043a\u043b\u0001\u0000\u0000\u0000\u043b"+ - "\u00df\u0001\u0000\u0000\u0000\u043c\u043d\u0003\u00deg\u0000\u043d\u043e"+ - "\u0001\u0000\u0000\u0000\u043e\u043f\u0006h\u0015\u0000\u043f\u00e1\u0001"+ - "\u0000\u0000\u0000\u0440\u0441\u0003^\'\u0000\u0441\u0442\u0001\u0000"+ - "\u0000\u0000\u0442\u0443\u0006i\u0016\u0000\u0443\u00e3\u0001\u0000\u0000"+ - "\u0000\u0444\u0445\u0003B\u0019\u0000\u0445\u0446\u0001\u0000\u0000\u0000"+ - "\u0446\u0447\u0006j\u000b\u0000\u0447\u00e5\u0001\u0000\u0000\u0000\u0448"+ - "\u0449\u0003D\u001a\u0000\u0449\u044a\u0001\u0000\u0000\u0000\u044a\u044b"+ - "\u0006k\u000b\u0000\u044b\u00e7\u0001\u0000\u0000\u0000\u044c\u044d\u0003"+ - "F\u001b\u0000\u044d\u044e\u0001\u0000\u0000\u0000\u044e\u044f\u0006l\u000b"+ - "\u0000\u044f\u00e9\u0001\u0000\u0000\u0000\u0450\u0451\u0003H\u001c\u0000"+ - "\u0451\u0452\u0001\u0000\u0000\u0000\u0452\u0453\u0006m\u0010\u0000\u0453"+ - "\u0454\u0006m\f\u0000\u0454\u00eb\u0001\u0000\u0000\u0000\u0455\u0456"+ - "\u0003t2\u0000\u0456\u0457\u0001\u0000\u0000\u0000\u0457\u0458\u0006n"+ - "\u0017\u0000\u0458\u00ed\u0001\u0000\u0000\u0000\u0459\u045a\u0003p0\u0000"+ - "\u045a\u045b\u0001\u0000\u0000\u0000\u045b\u045c\u0006o\u0013\u0000\u045c"+ - "\u00ef\u0001\u0000\u0000\u0000\u045d\u045e\u0004p\n\u0000\u045e\u045f"+ - "\u0003\u008c>\u0000\u045f\u0460\u0001\u0000\u0000\u0000\u0460\u0461\u0006"+ - "p\u0018\u0000\u0461\u00f1\u0001\u0000\u0000\u0000\u0462\u0463\u0004q\u000b"+ - "\u0000\u0463\u0464\u0003\u00b2Q\u0000\u0464\u0465\u0001\u0000\u0000\u0000"+ - "\u0465\u0466\u0006q\u0019\u0000\u0466\u00f3\u0001\u0000\u0000\u0000\u0467"+ - "\u046c\u0003L\u001e\u0000\u0468\u046c\u0003J\u001d\u0000\u0469\u046c\u0003"+ - "Z%\u0000\u046a\u046c\u0003\u00a6K\u0000\u046b\u0467\u0001\u0000\u0000"+ - "\u0000\u046b\u0468\u0001\u0000\u0000\u0000\u046b\u0469\u0001\u0000\u0000"+ - "\u0000\u046b\u046a\u0001\u0000\u0000\u0000\u046c\u00f5\u0001\u0000\u0000"+ - "\u0000\u046d\u0470\u0003L\u001e\u0000\u046e\u0470\u0003\u00a6K\u0000\u046f"+ - "\u046d\u0001\u0000\u0000\u0000\u046f\u046e\u0001\u0000\u0000\u0000\u0470"+ - "\u0474\u0001\u0000\u0000\u0000\u0471\u0473\u0003\u00f4r\u0000\u0472\u0471"+ - "\u0001\u0000\u0000\u0000\u0473\u0476\u0001\u0000\u0000\u0000\u0474\u0472"+ - "\u0001\u0000\u0000\u0000\u0474\u0475\u0001\u0000\u0000\u0000\u0475\u0481"+ - "\u0001\u0000\u0000\u0000\u0476\u0474\u0001\u0000\u0000\u0000\u0477\u047a"+ - "\u0003Z%\u0000\u0478\u047a\u0003T\"\u0000\u0479\u0477\u0001\u0000\u0000"+ - "\u0000\u0479\u0478\u0001\u0000\u0000\u0000\u047a\u047c\u0001\u0000\u0000"+ - "\u0000\u047b\u047d\u0003\u00f4r\u0000\u047c\u047b\u0001\u0000\u0000\u0000"+ - "\u047d\u047e\u0001\u0000\u0000\u0000\u047e\u047c\u0001\u0000\u0000\u0000"+ - "\u047e\u047f\u0001\u0000\u0000\u0000\u047f\u0481\u0001\u0000\u0000\u0000"+ - "\u0480\u046f\u0001\u0000\u0000\u0000\u0480\u0479\u0001\u0000\u0000\u0000"+ - "\u0481\u00f7\u0001\u0000\u0000\u0000\u0482\u0485\u0003\u00f6s\u0000\u0483"+ - "\u0485\u0003\u00baU\u0000\u0484\u0482\u0001\u0000\u0000\u0000\u0484\u0483"+ - "\u0001\u0000\u0000\u0000\u0485\u0486\u0001\u0000\u0000\u0000\u0486\u0484"+ - "\u0001\u0000\u0000\u0000\u0486\u0487\u0001\u0000\u0000\u0000\u0487\u00f9"+ - "\u0001\u0000\u0000\u0000\u0488\u0489\u0003B\u0019\u0000\u0489\u048a\u0001"+ - "\u0000\u0000\u0000\u048a\u048b\u0006u\u000b\u0000\u048b\u00fb\u0001\u0000"+ - "\u0000\u0000\u048c\u048d\u0003D\u001a\u0000\u048d\u048e\u0001\u0000\u0000"+ - "\u0000\u048e\u048f\u0006v\u000b\u0000\u048f\u00fd\u0001\u0000\u0000\u0000"+ - "\u0490\u0491\u0003F\u001b\u0000\u0491\u0492\u0001\u0000\u0000\u0000\u0492"+ - "\u0493\u0006w\u000b\u0000\u0493\u00ff\u0001\u0000\u0000\u0000\u0494\u0495"+ - "\u0003H\u001c\u0000\u0495\u0496\u0001\u0000\u0000\u0000\u0496\u0497\u0006"+ - "x\u0010\u0000\u0497\u0498\u0006x\f\u0000\u0498\u0101\u0001\u0000\u0000"+ - "\u0000\u0499\u049a\u0003j-\u0000\u049a\u049b\u0001\u0000\u0000\u0000\u049b"+ - "\u049c\u0006y\u0014\u0000\u049c\u0103\u0001\u0000\u0000\u0000\u049d\u049e"+ - "\u0003p0\u0000\u049e\u049f\u0001\u0000\u0000\u0000\u049f\u04a0\u0006z"+ - "\u0013\u0000\u04a0\u0105\u0001\u0000\u0000\u0000\u04a1\u04a2\u0003t2\u0000"+ - "\u04a2\u04a3\u0001\u0000\u0000\u0000\u04a3\u04a4\u0006{\u0017\u0000\u04a4"+ - "\u0107\u0001\u0000\u0000\u0000\u04a5\u04a6\u0004|\f\u0000\u04a6\u04a7"+ - "\u0003\u008c>\u0000\u04a7\u04a8\u0001\u0000\u0000\u0000\u04a8\u04a9\u0006"+ - "|\u0018\u0000\u04a9\u0109\u0001\u0000\u0000\u0000\u04aa\u04ab\u0004}\r"+ - "\u0000\u04ab\u04ac\u0003\u00b2Q\u0000\u04ac\u04ad\u0001\u0000\u0000\u0000"+ - "\u04ad\u04ae\u0006}\u0019\u0000\u04ae\u010b\u0001\u0000\u0000\u0000\u04af"+ - "\u04b0\u0007\f\u0000\u0000\u04b0\u04b1\u0007\u0002\u0000\u0000\u04b1\u010d"+ - "\u0001\u0000\u0000\u0000\u04b2\u04b3\u0003\u00f8t\u0000\u04b3\u04b4\u0001"+ - "\u0000\u0000\u0000\u04b4\u04b5\u0006\u007f\u001a\u0000\u04b5\u010f\u0001"+ - "\u0000\u0000\u0000\u04b6\u04b7\u0003B\u0019\u0000\u04b7\u04b8\u0001\u0000"+ - "\u0000\u0000\u04b8\u04b9\u0006\u0080\u000b\u0000\u04b9\u0111\u0001\u0000"+ - "\u0000\u0000\u04ba\u04bb\u0003D\u001a\u0000\u04bb\u04bc\u0001\u0000\u0000"+ - "\u0000\u04bc\u04bd\u0006\u0081\u000b\u0000\u04bd\u0113\u0001\u0000\u0000"+ - "\u0000\u04be\u04bf\u0003F\u001b\u0000\u04bf\u04c0\u0001\u0000\u0000\u0000"+ - "\u04c0\u04c1\u0006\u0082\u000b\u0000\u04c1\u0115\u0001\u0000\u0000\u0000"+ - "\u04c2\u04c3\u0003H\u001c\u0000\u04c3\u04c4\u0001\u0000\u0000\u0000\u04c4"+ - "\u04c5\u0006\u0083\u0010\u0000\u04c5\u04c6\u0006\u0083\f\u0000\u04c6\u0117"+ - "\u0001\u0000\u0000\u0000\u04c7\u04c8\u0003\u00b4R\u0000\u04c8\u04c9\u0001"+ - "\u0000\u0000\u0000\u04c9\u04ca\u0006\u0084\u000e\u0000\u04ca\u04cb\u0006"+ - "\u0084\u001b\u0000\u04cb\u0119\u0001\u0000\u0000\u0000\u04cc\u04cd\u0007"+ - "\u0007\u0000\u0000\u04cd\u04ce\u0007\t\u0000\u0000\u04ce\u04cf\u0001\u0000"+ - "\u0000\u0000\u04cf\u04d0\u0006\u0085\u001c\u0000\u04d0\u011b\u0001\u0000"+ - "\u0000\u0000\u04d1\u04d2\u0007\u0013\u0000\u0000\u04d2\u04d3\u0007\u0001"+ - "\u0000\u0000\u04d3\u04d4\u0007\u0005\u0000\u0000\u04d4\u04d5\u0007\n\u0000"+ - "\u0000\u04d5\u04d6\u0001\u0000\u0000\u0000\u04d6\u04d7\u0006\u0086\u001c"+ - "\u0000\u04d7\u011d\u0001\u0000\u0000\u0000\u04d8\u04d9\b#\u0000\u0000"+ - "\u04d9\u011f\u0001\u0000\u0000\u0000\u04da\u04dc\u0003\u011e\u0087\u0000"+ - "\u04db\u04da\u0001\u0000\u0000\u0000\u04dc\u04dd\u0001\u0000\u0000\u0000"+ - "\u04dd\u04db\u0001\u0000\u0000\u0000\u04dd\u04de\u0001\u0000\u0000\u0000"+ - "\u04de\u04df\u0001\u0000\u0000\u0000\u04df\u04e0\u0003n/\u0000\u04e0\u04e2"+ - "\u0001\u0000\u0000\u0000\u04e1\u04db\u0001\u0000\u0000\u0000\u04e1\u04e2"+ - "\u0001\u0000\u0000\u0000\u04e2\u04e4\u0001\u0000\u0000\u0000\u04e3\u04e5"+ - "\u0003\u011e\u0087\u0000\u04e4\u04e3\u0001\u0000\u0000\u0000\u04e5\u04e6"+ - "\u0001\u0000\u0000\u0000\u04e6\u04e4\u0001\u0000\u0000\u0000\u04e6\u04e7"+ - "\u0001\u0000\u0000\u0000\u04e7\u0121\u0001\u0000\u0000\u0000\u04e8\u04e9"+ - "\u0003\u0120\u0088\u0000\u04e9\u04ea\u0001\u0000\u0000\u0000\u04ea\u04eb"+ - "\u0006\u0089\u001d\u0000\u04eb\u0123\u0001\u0000\u0000\u0000\u04ec\u04ed"+ - "\u0003B\u0019\u0000\u04ed\u04ee\u0001\u0000\u0000\u0000\u04ee\u04ef\u0006"+ - "\u008a\u000b\u0000\u04ef\u0125\u0001\u0000\u0000\u0000\u04f0\u04f1\u0003"+ - "D\u001a\u0000\u04f1\u04f2\u0001\u0000\u0000\u0000\u04f2\u04f3\u0006\u008b"+ - "\u000b\u0000\u04f3\u0127\u0001\u0000\u0000\u0000\u04f4\u04f5\u0003F\u001b"+ - "\u0000\u04f5\u04f6\u0001\u0000\u0000\u0000\u04f6\u04f7\u0006\u008c\u000b"+ - "\u0000\u04f7\u0129\u0001\u0000\u0000\u0000\u04f8\u04f9\u0003H\u001c\u0000"+ - "\u04f9\u04fa\u0001\u0000\u0000\u0000\u04fa\u04fb\u0006\u008d\u0010\u0000"+ - "\u04fb\u04fc\u0006\u008d\f\u0000\u04fc\u04fd\u0006\u008d\f\u0000\u04fd"+ - "\u012b\u0001\u0000\u0000\u0000\u04fe\u04ff\u0003j-\u0000\u04ff\u0500\u0001"+ - "\u0000\u0000\u0000\u0500\u0501\u0006\u008e\u0014\u0000\u0501\u012d\u0001"+ - "\u0000\u0000\u0000\u0502\u0503\u0003p0\u0000\u0503\u0504\u0001\u0000\u0000"+ - "\u0000\u0504\u0505\u0006\u008f\u0013\u0000\u0505\u012f\u0001\u0000\u0000"+ - "\u0000\u0506\u0507\u0003t2\u0000\u0507\u0508\u0001\u0000\u0000\u0000\u0508"+ - "\u0509\u0006\u0090\u0017\u0000\u0509\u0131\u0001\u0000\u0000\u0000\u050a"+ - "\u050b\u0003\u011c\u0086\u0000\u050b\u050c\u0001\u0000\u0000\u0000\u050c"+ - "\u050d\u0006\u0091\u001e\u0000\u050d\u0133\u0001\u0000\u0000\u0000\u050e"+ - "\u050f\u0003\u00f8t\u0000\u050f\u0510\u0001\u0000\u0000\u0000\u0510\u0511"+ - "\u0006\u0092\u001a\u0000\u0511\u0135\u0001\u0000\u0000\u0000\u0512\u0513"+ - "\u0003\u00bcV\u0000\u0513\u0514\u0001\u0000\u0000\u0000\u0514\u0515\u0006"+ - "\u0093\u001f\u0000\u0515\u0137\u0001\u0000\u0000\u0000\u0516\u0517\u0004"+ - "\u0094\u000e\u0000\u0517\u0518\u0003\u008c>\u0000\u0518\u0519\u0001\u0000"+ - "\u0000\u0000\u0519\u051a\u0006\u0094\u0018\u0000\u051a\u0139\u0001\u0000"+ - "\u0000\u0000\u051b\u051c\u0004\u0095\u000f\u0000\u051c\u051d\u0003\u00b2"+ - "Q\u0000\u051d\u051e\u0001\u0000\u0000\u0000\u051e\u051f\u0006\u0095\u0019"+ - "\u0000\u051f\u013b\u0001\u0000\u0000\u0000\u0520\u0521\u0003B\u0019\u0000"+ - "\u0521\u0522\u0001\u0000\u0000\u0000\u0522\u0523\u0006\u0096\u000b\u0000"+ - "\u0523\u013d\u0001\u0000\u0000\u0000\u0524\u0525\u0003D\u001a\u0000\u0525"+ - "\u0526\u0001\u0000\u0000\u0000\u0526\u0527\u0006\u0097\u000b\u0000\u0527"+ - "\u013f\u0001\u0000\u0000\u0000\u0528\u0529\u0003F\u001b\u0000\u0529\u052a"+ - "\u0001\u0000\u0000\u0000\u052a\u052b\u0006\u0098\u000b\u0000\u052b\u0141"+ - "\u0001\u0000\u0000\u0000\u052c\u052d\u0003H\u001c\u0000\u052d\u052e\u0001"+ - "\u0000\u0000\u0000\u052e\u052f\u0006\u0099\u0010\u0000\u052f\u0530\u0006"+ - "\u0099\f\u0000\u0530\u0143\u0001\u0000\u0000\u0000\u0531\u0532\u0003t"+ - "2\u0000\u0532\u0533\u0001\u0000\u0000\u0000\u0533\u0534\u0006\u009a\u0017"+ - "\u0000\u0534\u0145\u0001\u0000\u0000\u0000\u0535\u0536\u0004\u009b\u0010"+ - "\u0000\u0536\u0537\u0003\u008c>\u0000\u0537\u0538\u0001\u0000\u0000\u0000"+ - "\u0538\u0539\u0006\u009b\u0018\u0000\u0539\u0147\u0001\u0000\u0000\u0000"+ - "\u053a\u053b\u0004\u009c\u0011\u0000\u053b\u053c\u0003\u00b2Q\u0000\u053c"+ - "\u053d\u0001\u0000\u0000\u0000\u053d\u053e\u0006\u009c\u0019\u0000\u053e"+ - "\u0149\u0001\u0000\u0000\u0000\u053f\u0540\u0003\u00bcV\u0000\u0540\u0541"+ - "\u0001\u0000\u0000\u0000\u0541\u0542\u0006\u009d\u001f\u0000\u0542\u014b"+ - "\u0001\u0000\u0000\u0000\u0543\u0544\u0003\u00b8T\u0000\u0544\u0545\u0001"+ - "\u0000\u0000\u0000\u0545\u0546\u0006\u009e \u0000\u0546\u014d\u0001\u0000"+ - "\u0000\u0000\u0547\u0548\u0003B\u0019\u0000\u0548\u0549\u0001\u0000\u0000"+ - "\u0000\u0549\u054a\u0006\u009f\u000b\u0000\u054a\u014f\u0001\u0000\u0000"+ - "\u0000\u054b\u054c\u0003D\u001a\u0000\u054c\u054d\u0001\u0000\u0000\u0000"+ - "\u054d\u054e\u0006\u00a0\u000b\u0000\u054e\u0151\u0001\u0000\u0000\u0000"+ - "\u054f\u0550\u0003F\u001b\u0000\u0550\u0551\u0001\u0000\u0000\u0000\u0551"+ - "\u0552\u0006\u00a1\u000b\u0000\u0552\u0153\u0001\u0000\u0000\u0000\u0553"+ - "\u0554\u0003H\u001c\u0000\u0554\u0555\u0001\u0000\u0000\u0000\u0555\u0556"+ - "\u0006\u00a2\u0010\u0000\u0556\u0557\u0006\u00a2\f\u0000\u0557\u0155\u0001"+ - "\u0000\u0000\u0000\u0558\u0559\u0007\u0001\u0000\u0000\u0559\u055a\u0007"+ - "\t\u0000\u0000\u055a\u055b\u0007\u000f\u0000\u0000\u055b\u055c\u0007\u0007"+ - "\u0000\u0000\u055c\u0157\u0001\u0000\u0000\u0000\u055d\u055e\u0003B\u0019"+ - "\u0000\u055e\u055f\u0001\u0000\u0000\u0000\u055f\u0560\u0006\u00a4\u000b"+ - "\u0000\u0560\u0159\u0001\u0000\u0000\u0000\u0561\u0562\u0003D\u001a\u0000"+ - "\u0562\u0563\u0001\u0000\u0000\u0000\u0563\u0564\u0006\u00a5\u000b\u0000"+ - "\u0564\u015b\u0001\u0000\u0000\u0000\u0565\u0566\u0003F\u001b\u0000\u0566"+ - "\u0567\u0001\u0000\u0000\u0000\u0567\u0568\u0006\u00a6\u000b\u0000\u0568"+ - "\u015d\u0001\u0000\u0000\u0000\u0569\u056a\u0003\u00b6S\u0000\u056a\u056b"+ - "\u0001\u0000\u0000\u0000\u056b\u056c\u0006\u00a7\u0011\u0000\u056c\u056d"+ - "\u0006\u00a7\f\u0000\u056d\u015f\u0001\u0000\u0000\u0000\u056e\u056f\u0003"+ - "n/\u0000\u056f\u0570\u0001\u0000\u0000\u0000\u0570\u0571\u0006\u00a8\u0012"+ - "\u0000\u0571\u0161\u0001\u0000\u0000\u0000\u0572\u0578\u0003T\"\u0000"+ - "\u0573\u0578\u0003J\u001d\u0000\u0574\u0578\u0003t2\u0000\u0575\u0578"+ - "\u0003L\u001e\u0000\u0576\u0578\u0003Z%\u0000\u0577\u0572\u0001\u0000"+ - "\u0000\u0000\u0577\u0573\u0001\u0000\u0000\u0000\u0577\u0574\u0001\u0000"+ - "\u0000\u0000\u0577\u0575\u0001\u0000\u0000\u0000\u0577\u0576\u0001\u0000"+ - "\u0000\u0000\u0578\u0579\u0001\u0000\u0000\u0000\u0579\u0577\u0001\u0000"+ - "\u0000\u0000\u0579\u057a\u0001\u0000\u0000\u0000\u057a\u0163\u0001\u0000"+ - "\u0000\u0000\u057b\u057c\u0003B\u0019\u0000\u057c\u057d\u0001\u0000\u0000"+ - "\u0000\u057d\u057e\u0006\u00aa\u000b\u0000\u057e\u0165\u0001\u0000\u0000"+ - "\u0000\u057f\u0580\u0003D\u001a\u0000\u0580\u0581\u0001\u0000\u0000\u0000"+ - "\u0581\u0582\u0006\u00ab\u000b\u0000\u0582\u0167\u0001\u0000\u0000\u0000"+ - "\u0583\u0584\u0003F\u001b\u0000\u0584\u0585\u0001\u0000\u0000\u0000\u0585"+ - "\u0586\u0006\u00ac\u000b\u0000\u0586\u0169\u0001\u0000\u0000\u0000\u0587"+ - "\u0588\u0003H\u001c\u0000\u0588\u0589\u0001\u0000\u0000\u0000\u0589\u058a"+ - "\u0006\u00ad\u0010\u0000\u058a\u058b\u0006\u00ad\f\u0000\u058b\u016b\u0001"+ - "\u0000\u0000\u0000\u058c\u058d\u0003n/\u0000\u058d\u058e\u0001\u0000\u0000"+ - "\u0000\u058e\u058f\u0006\u00ae\u0012\u0000\u058f\u016d\u0001\u0000\u0000"+ - "\u0000\u0590\u0591\u0003p0\u0000\u0591\u0592\u0001\u0000\u0000\u0000\u0592"+ - "\u0593\u0006\u00af\u0013\u0000\u0593\u016f\u0001\u0000\u0000\u0000\u0594"+ - "\u0595\u0003t2\u0000\u0595\u0596\u0001\u0000\u0000\u0000\u0596\u0597\u0006"+ - "\u00b0\u0017\u0000\u0597\u0171\u0001\u0000\u0000\u0000\u0598\u0599\u0003"+ - "\u011a\u0085\u0000\u0599\u059a\u0001\u0000\u0000\u0000\u059a\u059b\u0006"+ - "\u00b1!\u0000\u059b\u059c\u0006\u00b1\"\u0000\u059c\u0173\u0001\u0000"+ - "\u0000\u0000\u059d\u059e\u0003\u00deg\u0000\u059e\u059f\u0001\u0000\u0000"+ - "\u0000\u059f\u05a0\u0006\u00b2\u0015\u0000\u05a0\u0175\u0001\u0000\u0000"+ - "\u0000\u05a1\u05a2\u0003^\'\u0000\u05a2\u05a3\u0001\u0000\u0000\u0000"+ - "\u05a3\u05a4\u0006\u00b3\u0016\u0000\u05a4\u0177\u0001\u0000\u0000\u0000"+ - "\u05a5\u05a6\u0003B\u0019\u0000\u05a6\u05a7\u0001\u0000\u0000\u0000\u05a7"+ - "\u05a8\u0006\u00b4\u000b\u0000\u05a8\u0179\u0001\u0000\u0000\u0000\u05a9"+ - "\u05aa\u0003D\u001a\u0000\u05aa\u05ab\u0001\u0000\u0000\u0000\u05ab\u05ac"+ - "\u0006\u00b5\u000b\u0000\u05ac\u017b\u0001\u0000\u0000\u0000\u05ad\u05ae"+ - "\u0003F\u001b\u0000\u05ae\u05af\u0001\u0000\u0000\u0000\u05af\u05b0\u0006"+ - "\u00b6\u000b\u0000\u05b0\u017d\u0001\u0000\u0000\u0000\u05b1\u05b2\u0003"+ - "H\u001c\u0000\u05b2\u05b3\u0001\u0000\u0000\u0000\u05b3\u05b4\u0006\u00b7"+ - "\u0010\u0000\u05b4\u05b5\u0006\u00b7\f\u0000\u05b5\u05b6\u0006\u00b7\f"+ - "\u0000\u05b6\u017f\u0001\u0000\u0000\u0000\u05b7\u05b8\u0003p0\u0000\u05b8"+ - "\u05b9\u0001\u0000\u0000\u0000\u05b9\u05ba\u0006\u00b8\u0013\u0000\u05ba"+ - "\u0181\u0001\u0000\u0000\u0000\u05bb\u05bc\u0003t2\u0000\u05bc\u05bd\u0001"+ - "\u0000\u0000\u0000\u05bd\u05be\u0006\u00b9\u0017\u0000\u05be\u0183\u0001"+ - "\u0000\u0000\u0000\u05bf\u05c0\u0003\u00f8t\u0000\u05c0\u05c1\u0001\u0000"+ - "\u0000\u0000\u05c1\u05c2\u0006\u00ba\u001a\u0000\u05c2\u0185\u0001\u0000"+ - "\u0000\u0000\u05c3\u05c4\u0003B\u0019\u0000\u05c4\u05c5\u0001\u0000\u0000"+ - "\u0000\u05c5\u05c6\u0006\u00bb\u000b\u0000\u05c6\u0187\u0001\u0000\u0000"+ - "\u0000\u05c7\u05c8\u0003D\u001a\u0000\u05c8\u05c9\u0001\u0000\u0000\u0000"+ - "\u05c9\u05ca\u0006\u00bc\u000b\u0000\u05ca\u0189\u0001\u0000\u0000\u0000"+ - "\u05cb\u05cc\u0003F\u001b\u0000\u05cc\u05cd\u0001\u0000\u0000\u0000\u05cd"+ - "\u05ce\u0006\u00bd\u000b\u0000\u05ce\u018b\u0001\u0000\u0000\u0000\u05cf"+ - "\u05d0\u0003H\u001c\u0000\u05d0\u05d1\u0001\u0000\u0000\u0000\u05d1\u05d2"+ - "\u0006\u00be\u0010\u0000\u05d2\u05d3\u0006\u00be\f\u0000\u05d3\u018d\u0001"+ - "\u0000\u0000\u0000\u05d4\u05d5\u00036\u0013\u0000\u05d5\u05d6\u0001\u0000"+ - "\u0000\u0000\u05d6\u05d7\u0006\u00bf#\u0000\u05d7\u018f\u0001\u0000\u0000"+ - "\u0000\u05d8\u05d9\u0003\u010c~\u0000\u05d9\u05da\u0001\u0000\u0000\u0000"+ - "\u05da\u05db\u0006\u00c0$\u0000\u05db\u0191\u0001\u0000\u0000\u0000\u05dc"+ - "\u05dd\u0003\u011a\u0085\u0000\u05dd\u05de\u0001\u0000\u0000\u0000\u05de"+ - "\u05df\u0006\u00c1!\u0000\u05df\u05e0\u0006\u00c1\f\u0000\u05e0\u05e1"+ - "\u0006\u00c1\u0000\u0000\u05e1\u0193\u0001\u0000\u0000\u0000\u05e2\u05e3"+ - "\u0007\u0014\u0000\u0000\u05e3\u05e4\u0007\u0002\u0000\u0000\u05e4\u05e5"+ - "\u0007\u0001\u0000\u0000\u05e5\u05e6\u0007\t\u0000\u0000\u05e6\u05e7\u0007"+ - "\u0011\u0000\u0000\u05e7\u05e8\u0001\u0000\u0000\u0000\u05e8\u05e9\u0006"+ - "\u00c2\f\u0000\u05e9\u05ea\u0006\u00c2\u0000\u0000\u05ea\u0195\u0001\u0000"+ - "\u0000\u0000\u05eb\u05ec\u0003\u00b8T\u0000\u05ec\u05ed\u0001\u0000\u0000"+ - "\u0000\u05ed\u05ee\u0006\u00c3 \u0000\u05ee\u0197\u0001\u0000\u0000\u0000"+ - "\u05ef\u05f0\u0003\u00bcV\u0000\u05f0\u05f1\u0001\u0000\u0000\u0000\u05f1"+ - "\u05f2\u0006\u00c4\u001f\u0000\u05f2\u0199\u0001\u0000\u0000\u0000\u05f3"+ - "\u05f4\u0003B\u0019\u0000\u05f4\u05f5\u0001\u0000\u0000\u0000\u05f5\u05f6"+ - "\u0006\u00c5\u000b\u0000\u05f6\u019b\u0001\u0000\u0000\u0000\u05f7\u05f8"+ - "\u0003D\u001a\u0000\u05f8\u05f9\u0001\u0000\u0000\u0000\u05f9\u05fa\u0006"+ - "\u00c6\u000b\u0000\u05fa\u019d\u0001\u0000\u0000\u0000\u05fb\u05fc\u0003"+ - "F\u001b\u0000\u05fc\u05fd\u0001\u0000\u0000\u0000\u05fd\u05fe\u0006\u00c7"+ - "\u000b\u0000\u05fe\u019f\u0001\u0000\u0000\u0000\u05ff\u0600\u0003H\u001c"+ - "\u0000\u0600\u0601\u0001\u0000\u0000\u0000\u0601\u0602\u0006\u00c8\u0010"+ - "\u0000\u0602\u0603\u0006\u00c8\f\u0000\u0603\u01a1\u0001\u0000\u0000\u0000"+ - "\u0604\u0605\u0003\u00deg\u0000\u0605\u0606\u0001\u0000\u0000\u0000\u0606"+ - "\u0607\u0006\u00c9\u0015\u0000\u0607\u0608\u0006\u00c9\f\u0000\u0608\u0609"+ - "\u0006\u00c9%\u0000\u0609\u01a3\u0001\u0000\u0000\u0000\u060a\u060b\u0003"+ - "^\'\u0000\u060b\u060c\u0001\u0000\u0000\u0000\u060c\u060d\u0006\u00ca"+ - "\u0016\u0000\u060d\u060e\u0006\u00ca\f\u0000\u060e\u060f\u0006\u00ca%"+ - "\u0000\u060f\u01a5\u0001\u0000\u0000\u0000\u0610\u0611\u0003B\u0019\u0000"+ - "\u0611\u0612\u0001\u0000\u0000\u0000\u0612\u0613\u0006\u00cb\u000b\u0000"+ - "\u0613\u01a7\u0001\u0000\u0000\u0000\u0614\u0615\u0003D\u001a\u0000\u0615"+ - "\u0616\u0001\u0000\u0000\u0000\u0616\u0617\u0006\u00cc\u000b\u0000\u0617"+ - "\u01a9\u0001\u0000\u0000\u0000\u0618\u0619\u0003F\u001b\u0000\u0619\u061a"+ - "\u0001\u0000\u0000\u0000\u061a\u061b\u0006\u00cd\u000b\u0000\u061b\u01ab"+ - "\u0001\u0000\u0000\u0000\u061c\u061d\u0003n/\u0000\u061d\u061e\u0001\u0000"+ - "\u0000\u0000\u061e\u061f\u0006\u00ce\u0012\u0000\u061f\u0620\u0006\u00ce"+ - "\f\u0000\u0620\u0621\u0006\u00ce\t\u0000\u0621\u01ad\u0001\u0000\u0000"+ - "\u0000\u0622\u0623\u0003p0\u0000\u0623\u0624\u0001\u0000\u0000\u0000\u0624"+ - "\u0625\u0006\u00cf\u0013\u0000\u0625\u0626\u0006\u00cf\f\u0000\u0626\u0627"+ - "\u0006\u00cf\t\u0000\u0627\u01af\u0001\u0000\u0000\u0000\u0628\u0629\u0003"+ - "B\u0019\u0000\u0629\u062a\u0001\u0000\u0000\u0000\u062a\u062b\u0006\u00d0"+ - "\u000b\u0000\u062b\u01b1\u0001\u0000\u0000\u0000\u062c\u062d\u0003D\u001a"+ - "\u0000\u062d\u062e\u0001\u0000\u0000\u0000\u062e\u062f\u0006\u00d1\u000b"+ - "\u0000\u062f\u01b3\u0001\u0000\u0000\u0000\u0630\u0631\u0003F\u001b\u0000"+ - "\u0631\u0632\u0001\u0000\u0000\u0000\u0632\u0633\u0006\u00d2\u000b\u0000"+ - "\u0633\u01b5\u0001\u0000\u0000\u0000\u0634\u0635\u0003\u00bcV\u0000\u0635"+ - "\u0636\u0001\u0000\u0000\u0000\u0636\u0637\u0006\u00d3\f\u0000\u0637\u0638"+ - "\u0006\u00d3\u0000\u0000\u0638\u0639\u0006\u00d3\u001f\u0000\u0639\u01b7"+ - "\u0001\u0000\u0000\u0000\u063a\u063b\u0003\u00b8T\u0000\u063b\u063c\u0001"+ - "\u0000\u0000\u0000\u063c\u063d\u0006\u00d4\f\u0000\u063d\u063e\u0006\u00d4"+ - "\u0000\u0000\u063e\u063f\u0006\u00d4 \u0000\u063f\u01b9\u0001\u0000\u0000"+ - "\u0000\u0640\u0641\u0003d*\u0000\u0641\u0642\u0001\u0000\u0000\u0000\u0642"+ - "\u0643\u0006\u00d5\f\u0000\u0643\u0644\u0006\u00d5\u0000\u0000\u0644\u0645"+ - "\u0006\u00d5&\u0000\u0645\u01bb\u0001\u0000\u0000\u0000\u0646\u0647\u0003"+ - "H\u001c\u0000\u0647\u0648\u0001\u0000\u0000\u0000\u0648\u0649\u0006\u00d6"+ - "\u0010\u0000\u0649\u064a\u0006\u00d6\f\u0000\u064a\u01bd\u0001\u0000\u0000"+ - "\u0000B\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f"+ - "\r\u000e\u000f\u0293\u029d\u02a1\u02a4\u02ad\u02af\u02ba\u02cd\u02d2\u02db"+ - "\u02e2\u02e7\u02e9\u02f4\u02fc\u02ff\u0301\u0306\u030b\u0311\u0318\u031d"+ - "\u0323\u0326\u032e\u0332\u03b8\u03bd\u03c4\u03c6\u03d6\u03db\u03e0\u03e2"+ - "\u03e8\u0435\u043a\u046b\u046f\u0474\u0479\u047e\u0480\u0484\u0486\u04dd"+ - "\u04e1\u04e6\u0577\u0579\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006"+ - "\u0000\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000"+ - "\u0005\t\u0000\u0005\u000b\u0000\u0005\u000e\u0000\u0005\r\u0000\u0000"+ - "\u0001\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007H\u0000\u0005\u0000"+ - "\u0000\u0007\u001d\u0000\u0007I\u0000\u0007&\u0000\u0007\'\u0000\u0007"+ - "$\u0000\u0007S\u0000\u0007\u001e\u0000\u0007)\u0000\u00075\u0000\u0007"+ - "G\u0000\u0007W\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007a\u0000\u0007"+ - "`\u0000\u0007K\u0000\u0007J\u0000\u0007_\u0000\u0005\f\u0000\u0007\u0014"+ - "\u0000\u0007[\u0000\u0005\u000f\u0000\u0007!\u0000"; + "\u0000\u019c\u0000\u019e\u0000\u01a0z\u01a2{\u01a4|\u01a6\u0000\u01a8"+ + "\u0000\u01aa\u0000\u01ac}\u01ae~\u01b0\u007f\u01b2\u0000\u01b4\u0000\u01b6"+ + "\u0080\u01b8\u0081\u01ba\u0082\u01bc\u0000\u01be\u0000\u01c0\u0000\u01c2"+ + "\u0000\u0010\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b"+ + "\f\r\u000e\u000f$\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000SSss\u0002"+ + "\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002\u0000"+ + "OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002"+ + "\u0000AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002\u0000"+ + "MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0002"+ + "\u0000JJjj\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000"+ + "\t\n\r\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004"+ + "\u0000\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002"+ + "\u0000YYyy\u000b\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b"+ + "\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u0678\u0000\u0010\u0001\u0000\u0000"+ + "\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000"+ + "\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000"+ + "\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000"+ + "\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000"+ + "\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000"+ + "&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001"+ + "\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000"+ + "\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u0000"+ + "4\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000\u00008\u0001"+ + "\u0000\u0000\u0000\u0000:\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000"+ + "\u0000\u0000>\u0001\u0000\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000"+ + "B\u0001\u0000\u0000\u0000\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001"+ + "\u0000\u0000\u0000\u0001H\u0001\u0000\u0000\u0000\u0001^\u0001\u0000\u0000"+ + "\u0000\u0001`\u0001\u0000\u0000\u0000\u0001b\u0001\u0000\u0000\u0000\u0001"+ + "d\u0001\u0000\u0000\u0000\u0001f\u0001\u0000\u0000\u0000\u0001h\u0001"+ + "\u0000\u0000\u0000\u0001j\u0001\u0000\u0000\u0000\u0001l\u0001\u0000\u0000"+ + "\u0000\u0001n\u0001\u0000\u0000\u0000\u0001p\u0001\u0000\u0000\u0000\u0001"+ + "r\u0001\u0000\u0000\u0000\u0001t\u0001\u0000\u0000\u0000\u0001v\u0001"+ + "\u0000\u0000\u0000\u0001x\u0001\u0000\u0000\u0000\u0001z\u0001\u0000\u0000"+ + "\u0000\u0001|\u0001\u0000\u0000\u0000\u0001~\u0001\u0000\u0000\u0000\u0001"+ + "\u0080\u0001\u0000\u0000\u0000\u0001\u0082\u0001\u0000\u0000\u0000\u0001"+ + "\u0084\u0001\u0000\u0000\u0000\u0001\u0086\u0001\u0000\u0000\u0000\u0001"+ + "\u0088\u0001\u0000\u0000\u0000\u0001\u008a\u0001\u0000\u0000\u0000\u0001"+ + "\u008c\u0001\u0000\u0000\u0000\u0001\u008e\u0001\u0000\u0000\u0000\u0001"+ + "\u0090\u0001\u0000\u0000\u0000\u0001\u0092\u0001\u0000\u0000\u0000\u0001"+ + "\u0094\u0001\u0000\u0000\u0000\u0001\u0096\u0001\u0000\u0000\u0000\u0001"+ + "\u0098\u0001\u0000\u0000\u0000\u0001\u009a\u0001\u0000\u0000\u0000\u0001"+ + "\u009c\u0001\u0000\u0000\u0000\u0001\u009e\u0001\u0000\u0000\u0000\u0001"+ + "\u00a0\u0001\u0000\u0000\u0000\u0001\u00a2\u0001\u0000\u0000\u0000\u0001"+ + "\u00a4\u0001\u0000\u0000\u0000\u0001\u00a6\u0001\u0000\u0000\u0000\u0001"+ + "\u00a8\u0001\u0000\u0000\u0000\u0001\u00aa\u0001\u0000\u0000\u0000\u0001"+ + "\u00ac\u0001\u0000\u0000\u0000\u0001\u00ae\u0001\u0000\u0000\u0000\u0001"+ + "\u00b0\u0001\u0000\u0000\u0000\u0001\u00b2\u0001\u0000\u0000\u0000\u0001"+ + "\u00b4\u0001\u0000\u0000\u0000\u0001\u00b6\u0001\u0000\u0000\u0000\u0001"+ + "\u00b8\u0001\u0000\u0000\u0000\u0001\u00bc\u0001\u0000\u0000\u0000\u0001"+ + "\u00be\u0001\u0000\u0000\u0000\u0001\u00c0\u0001\u0000\u0000\u0000\u0001"+ + "\u00c2\u0001\u0000\u0000\u0000\u0002\u00c4\u0001\u0000\u0000\u0000\u0002"+ + "\u00c6\u0001\u0000\u0000\u0000\u0002\u00c8\u0001\u0000\u0000\u0000\u0002"+ + "\u00ca\u0001\u0000\u0000\u0000\u0002\u00cc\u0001\u0000\u0000\u0000\u0003"+ + "\u00ce\u0001\u0000\u0000\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003"+ + "\u00d2\u0001\u0000\u0000\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003"+ + "\u00d6\u0001\u0000\u0000\u0000\u0003\u00d8\u0001\u0000\u0000\u0000\u0003"+ + "\u00da\u0001\u0000\u0000\u0000\u0003\u00de\u0001\u0000\u0000\u0000\u0003"+ + "\u00e0\u0001\u0000\u0000\u0000\u0003\u00e2\u0001\u0000\u0000\u0000\u0003"+ + "\u00e4\u0001\u0000\u0000\u0000\u0003\u00e6\u0001\u0000\u0000\u0000\u0003"+ + "\u00e8\u0001\u0000\u0000\u0000\u0004\u00ea\u0001\u0000\u0000\u0000\u0004"+ + "\u00ec\u0001\u0000\u0000\u0000\u0004\u00ee\u0001\u0000\u0000\u0000\u0004"+ + "\u00f0\u0001\u0000\u0000\u0000\u0004\u00f2\u0001\u0000\u0000\u0000\u0004"+ + "\u00f8\u0001\u0000\u0000\u0000\u0004\u00fa\u0001\u0000\u0000\u0000\u0004"+ + "\u00fc\u0001\u0000\u0000\u0000\u0004\u00fe\u0001\u0000\u0000\u0000\u0005"+ + "\u0100\u0001\u0000\u0000\u0000\u0005\u0102\u0001\u0000\u0000\u0000\u0005"+ + "\u0104\u0001\u0000\u0000\u0000\u0005\u0106\u0001\u0000\u0000\u0000\u0005"+ + "\u0108\u0001\u0000\u0000\u0000\u0005\u010a\u0001\u0000\u0000\u0000\u0005"+ + "\u010c\u0001\u0000\u0000\u0000\u0005\u010e\u0001\u0000\u0000\u0000\u0005"+ + "\u0110\u0001\u0000\u0000\u0000\u0005\u0112\u0001\u0000\u0000\u0000\u0005"+ + "\u0114\u0001\u0000\u0000\u0000\u0006\u0116\u0001\u0000\u0000\u0000\u0006"+ + "\u0118\u0001\u0000\u0000\u0000\u0006\u011a\u0001\u0000\u0000\u0000\u0006"+ + "\u011c\u0001\u0000\u0000\u0000\u0006\u0120\u0001\u0000\u0000\u0000\u0006"+ + "\u0122\u0001\u0000\u0000\u0000\u0006\u0124\u0001\u0000\u0000\u0000\u0006"+ + "\u0126\u0001\u0000\u0000\u0000\u0006\u0128\u0001\u0000\u0000\u0000\u0007"+ + "\u012a\u0001\u0000\u0000\u0000\u0007\u012c\u0001\u0000\u0000\u0000\u0007"+ + "\u012e\u0001\u0000\u0000\u0000\u0007\u0130\u0001\u0000\u0000\u0000\u0007"+ + "\u0132\u0001\u0000\u0000\u0000\u0007\u0134\u0001\u0000\u0000\u0000\u0007"+ + "\u0136\u0001\u0000\u0000\u0000\u0007\u0138\u0001\u0000\u0000\u0000\u0007"+ + "\u013a\u0001\u0000\u0000\u0000\u0007\u013c\u0001\u0000\u0000\u0000\u0007"+ + "\u013e\u0001\u0000\u0000\u0000\u0007\u0140\u0001\u0000\u0000\u0000\b\u0142"+ + "\u0001\u0000\u0000\u0000\b\u0144\u0001\u0000\u0000\u0000\b\u0146\u0001"+ + "\u0000\u0000\u0000\b\u0148\u0001\u0000\u0000\u0000\b\u014a\u0001\u0000"+ + "\u0000\u0000\b\u014c\u0001\u0000\u0000\u0000\b\u014e\u0001\u0000\u0000"+ + "\u0000\b\u0150\u0001\u0000\u0000\u0000\b\u0152\u0001\u0000\u0000\u0000"+ + "\t\u0154\u0001\u0000\u0000\u0000\t\u0156\u0001\u0000\u0000\u0000\t\u0158"+ + "\u0001\u0000\u0000\u0000\t\u015a\u0001\u0000\u0000\u0000\t\u015c\u0001"+ + "\u0000\u0000\u0000\n\u015e\u0001\u0000\u0000\u0000\n\u0160\u0001\u0000"+ + "\u0000\u0000\n\u0162\u0001\u0000\u0000\u0000\n\u0164\u0001\u0000\u0000"+ + "\u0000\n\u0166\u0001\u0000\u0000\u0000\n\u0168\u0001\u0000\u0000\u0000"+ + "\u000b\u016a\u0001\u0000\u0000\u0000\u000b\u016c\u0001\u0000\u0000\u0000"+ + "\u000b\u016e\u0001\u0000\u0000\u0000\u000b\u0170\u0001\u0000\u0000\u0000"+ + "\u000b\u0172\u0001\u0000\u0000\u0000\u000b\u0174\u0001\u0000\u0000\u0000"+ + "\u000b\u0176\u0001\u0000\u0000\u0000\u000b\u0178\u0001\u0000\u0000\u0000"+ + "\u000b\u017a\u0001\u0000\u0000\u0000\u000b\u017c\u0001\u0000\u0000\u0000"+ + "\f\u017e\u0001\u0000\u0000\u0000\f\u0180\u0001\u0000\u0000\u0000\f\u0182"+ + "\u0001\u0000\u0000\u0000\f\u0184\u0001\u0000\u0000\u0000\f\u0186\u0001"+ + "\u0000\u0000\u0000\f\u0188\u0001\u0000\u0000\u0000\f\u018a\u0001\u0000"+ + "\u0000\u0000\r\u018c\u0001\u0000\u0000\u0000\r\u018e\u0001\u0000\u0000"+ + "\u0000\r\u0190\u0001\u0000\u0000\u0000\r\u0192\u0001\u0000\u0000\u0000"+ + "\r\u0194\u0001\u0000\u0000\u0000\r\u0196\u0001\u0000\u0000\u0000\r\u0198"+ + "\u0001\u0000\u0000\u0000\r\u019a\u0001\u0000\u0000\u0000\r\u019c\u0001"+ + "\u0000\u0000\u0000\r\u019e\u0001\u0000\u0000\u0000\r\u01a0\u0001\u0000"+ + "\u0000\u0000\r\u01a2\u0001\u0000\u0000\u0000\r\u01a4\u0001\u0000\u0000"+ + "\u0000\u000e\u01a6\u0001\u0000\u0000\u0000\u000e\u01a8\u0001\u0000\u0000"+ + "\u0000\u000e\u01aa\u0001\u0000\u0000\u0000\u000e\u01ac\u0001\u0000\u0000"+ + "\u0000\u000e\u01ae\u0001\u0000\u0000\u0000\u000e\u01b0\u0001\u0000\u0000"+ + "\u0000\u000f\u01b2\u0001\u0000\u0000\u0000\u000f\u01b4\u0001\u0000\u0000"+ + "\u0000\u000f\u01b6\u0001\u0000\u0000\u0000\u000f\u01b8\u0001\u0000\u0000"+ + "\u0000\u000f\u01ba\u0001\u0000\u0000\u0000\u000f\u01bc\u0001\u0000\u0000"+ + "\u0000\u000f\u01be\u0001\u0000\u0000\u0000\u000f\u01c0\u0001\u0000\u0000"+ + "\u0000\u000f\u01c2\u0001\u0000\u0000\u0000\u0010\u01c4\u0001\u0000\u0000"+ + "\u0000\u0012\u01ce\u0001\u0000\u0000\u0000\u0014\u01d5\u0001\u0000\u0000"+ + "\u0000\u0016\u01de\u0001\u0000\u0000\u0000\u0018\u01e5\u0001\u0000\u0000"+ + "\u0000\u001a\u01ef\u0001\u0000\u0000\u0000\u001c\u01f6\u0001\u0000\u0000"+ + "\u0000\u001e\u01fd\u0001\u0000\u0000\u0000 \u0204\u0001\u0000\u0000\u0000"+ + "\"\u020c\u0001\u0000\u0000\u0000$\u0218\u0001\u0000\u0000\u0000&\u0221"+ + "\u0001\u0000\u0000\u0000(\u0227\u0001\u0000\u0000\u0000*\u022e\u0001\u0000"+ + "\u0000\u0000,\u0235\u0001\u0000\u0000\u0000.\u023d\u0001\u0000\u0000\u0000"+ + "0\u0245\u0001\u0000\u0000\u00002\u0254\u0001\u0000\u0000\u00004\u0260"+ + "\u0001\u0000\u0000\u00006\u026b\u0001\u0000\u0000\u00008\u0273\u0001\u0000"+ + "\u0000\u0000:\u027b\u0001\u0000\u0000\u0000<\u0283\u0001\u0000\u0000\u0000"+ + ">\u028c\u0001\u0000\u0000\u0000@\u0297\u0001\u0000\u0000\u0000B\u029d"+ + "\u0001\u0000\u0000\u0000D\u02ae\u0001\u0000\u0000\u0000F\u02be\u0001\u0000"+ + "\u0000\u0000H\u02c4\u0001\u0000\u0000\u0000J\u02c8\u0001\u0000\u0000\u0000"+ + "L\u02ca\u0001\u0000\u0000\u0000N\u02cc\u0001\u0000\u0000\u0000P\u02cf"+ + "\u0001\u0000\u0000\u0000R\u02d1\u0001\u0000\u0000\u0000T\u02da\u0001\u0000"+ + "\u0000\u0000V\u02dc\u0001\u0000\u0000\u0000X\u02e1\u0001\u0000\u0000\u0000"+ + "Z\u02e3\u0001\u0000\u0000\u0000\\\u02e8\u0001\u0000\u0000\u0000^\u0307"+ + "\u0001\u0000\u0000\u0000`\u030a\u0001\u0000\u0000\u0000b\u0338\u0001\u0000"+ + "\u0000\u0000d\u033a\u0001\u0000\u0000\u0000f\u033d\u0001\u0000\u0000\u0000"+ + "h\u0341\u0001\u0000\u0000\u0000j\u0345\u0001\u0000\u0000\u0000l\u0347"+ + "\u0001\u0000\u0000\u0000n\u034a\u0001\u0000\u0000\u0000p\u034c\u0001\u0000"+ + "\u0000\u0000r\u034e\u0001\u0000\u0000\u0000t\u0353\u0001\u0000\u0000\u0000"+ + "v\u0355\u0001\u0000\u0000\u0000x\u035b\u0001\u0000\u0000\u0000z\u0361"+ + "\u0001\u0000\u0000\u0000|\u0364\u0001\u0000\u0000\u0000~\u0367\u0001\u0000"+ + "\u0000\u0000\u0080\u036c\u0001\u0000\u0000\u0000\u0082\u0371\u0001\u0000"+ + "\u0000\u0000\u0084\u0373\u0001\u0000\u0000\u0000\u0086\u0377\u0001\u0000"+ + "\u0000\u0000\u0088\u037c\u0001\u0000\u0000\u0000\u008a\u0382\u0001\u0000"+ + "\u0000\u0000\u008c\u0385\u0001\u0000\u0000\u0000\u008e\u0387\u0001\u0000"+ + "\u0000\u0000\u0090\u038d\u0001\u0000\u0000\u0000\u0092\u038f\u0001\u0000"+ + "\u0000\u0000\u0094\u0394\u0001\u0000\u0000\u0000\u0096\u0397\u0001\u0000"+ + "\u0000\u0000\u0098\u039a\u0001\u0000\u0000\u0000\u009a\u039d\u0001\u0000"+ + "\u0000\u0000\u009c\u039f\u0001\u0000\u0000\u0000\u009e\u03a2\u0001\u0000"+ + "\u0000\u0000\u00a0\u03a4\u0001\u0000\u0000\u0000\u00a2\u03a7\u0001\u0000"+ + "\u0000\u0000\u00a4\u03a9\u0001\u0000\u0000\u0000\u00a6\u03ab\u0001\u0000"+ + "\u0000\u0000\u00a8\u03ad\u0001\u0000\u0000\u0000\u00aa\u03af\u0001\u0000"+ + "\u0000\u0000\u00ac\u03b1\u0001\u0000\u0000\u0000\u00ae\u03b4\u0001\u0000"+ + "\u0000\u0000\u00b0\u03b7\u0001\u0000\u0000\u0000\u00b2\u03cc\u0001\u0000"+ + "\u0000\u0000\u00b4\u03ce\u0001\u0000\u0000\u0000\u00b6\u03d3\u0001\u0000"+ + "\u0000\u0000\u00b8\u03e8\u0001\u0000\u0000\u0000\u00ba\u03ea\u0001\u0000"+ + "\u0000\u0000\u00bc\u03f2\u0001\u0000\u0000\u0000\u00be\u03f4\u0001\u0000"+ + "\u0000\u0000\u00c0\u03f8\u0001\u0000\u0000\u0000\u00c2\u03fc\u0001\u0000"+ + "\u0000\u0000\u00c4\u0400\u0001\u0000\u0000\u0000\u00c6\u0405\u0001\u0000"+ + "\u0000\u0000\u00c8\u040a\u0001\u0000\u0000\u0000\u00ca\u040e\u0001\u0000"+ + "\u0000\u0000\u00cc\u0412\u0001\u0000\u0000\u0000\u00ce\u0416\u0001\u0000"+ + "\u0000\u0000\u00d0\u041b\u0001\u0000\u0000\u0000\u00d2\u041f\u0001\u0000"+ + "\u0000\u0000\u00d4\u0423\u0001\u0000\u0000\u0000\u00d6\u0427\u0001\u0000"+ + "\u0000\u0000\u00d8\u042b\u0001\u0000\u0000\u0000\u00da\u042f\u0001\u0000"+ + "\u0000\u0000\u00dc\u043b\u0001\u0000\u0000\u0000\u00de\u043e\u0001\u0000"+ + "\u0000\u0000\u00e0\u0442\u0001\u0000\u0000\u0000\u00e2\u0446\u0001\u0000"+ + "\u0000\u0000\u00e4\u044a\u0001\u0000\u0000\u0000\u00e6\u044e\u0001\u0000"+ + "\u0000\u0000\u00e8\u0452\u0001\u0000\u0000\u0000\u00ea\u0456\u0001\u0000"+ + "\u0000\u0000\u00ec\u045b\u0001\u0000\u0000\u0000\u00ee\u045f\u0001\u0000"+ + "\u0000\u0000\u00f0\u0463\u0001\u0000\u0000\u0000\u00f2\u0468\u0001\u0000"+ + "\u0000\u0000\u00f4\u0471\u0001\u0000\u0000\u0000\u00f6\u0486\u0001\u0000"+ + "\u0000\u0000\u00f8\u048a\u0001\u0000\u0000\u0000\u00fa\u048e\u0001\u0000"+ + "\u0000\u0000\u00fc\u0492\u0001\u0000\u0000\u0000\u00fe\u0496\u0001\u0000"+ + "\u0000\u0000\u0100\u049a\u0001\u0000\u0000\u0000\u0102\u049f\u0001\u0000"+ + "\u0000\u0000\u0104\u04a3\u0001\u0000\u0000\u0000\u0106\u04a7\u0001\u0000"+ + "\u0000\u0000\u0108\u04ab\u0001\u0000\u0000\u0000\u010a\u04b0\u0001\u0000"+ + "\u0000\u0000\u010c\u04b5\u0001\u0000\u0000\u0000\u010e\u04b8\u0001\u0000"+ + "\u0000\u0000\u0110\u04bc\u0001\u0000\u0000\u0000\u0112\u04c0\u0001\u0000"+ + "\u0000\u0000\u0114\u04c4\u0001\u0000\u0000\u0000\u0116\u04c8\u0001\u0000"+ + "\u0000\u0000\u0118\u04cd\u0001\u0000\u0000\u0000\u011a\u04d2\u0001\u0000"+ + "\u0000\u0000\u011c\u04d7\u0001\u0000\u0000\u0000\u011e\u04de\u0001\u0000"+ + "\u0000\u0000\u0120\u04e7\u0001\u0000\u0000\u0000\u0122\u04ee\u0001\u0000"+ + "\u0000\u0000\u0124\u04f2\u0001\u0000\u0000\u0000\u0126\u04f6\u0001\u0000"+ + "\u0000\u0000\u0128\u04fa\u0001\u0000\u0000\u0000\u012a\u04fe\u0001\u0000"+ + "\u0000\u0000\u012c\u0504\u0001\u0000\u0000\u0000\u012e\u0508\u0001\u0000"+ + "\u0000\u0000\u0130\u050c\u0001\u0000\u0000\u0000\u0132\u0510\u0001\u0000"+ + "\u0000\u0000\u0134\u0514\u0001\u0000\u0000\u0000\u0136\u0518\u0001\u0000"+ + "\u0000\u0000\u0138\u051c\u0001\u0000\u0000\u0000\u013a\u0521\u0001\u0000"+ + "\u0000\u0000\u013c\u0526\u0001\u0000\u0000\u0000\u013e\u052a\u0001\u0000"+ + "\u0000\u0000\u0140\u052e\u0001\u0000\u0000\u0000\u0142\u0532\u0001\u0000"+ + "\u0000\u0000\u0144\u0537\u0001\u0000\u0000\u0000\u0146\u053b\u0001\u0000"+ + "\u0000\u0000\u0148\u0540\u0001\u0000\u0000\u0000\u014a\u0545\u0001\u0000"+ + "\u0000\u0000\u014c\u0549\u0001\u0000\u0000\u0000\u014e\u054d\u0001\u0000"+ + "\u0000\u0000\u0150\u0551\u0001\u0000\u0000\u0000\u0152\u0555\u0001\u0000"+ + "\u0000\u0000\u0154\u0559\u0001\u0000\u0000\u0000\u0156\u055e\u0001\u0000"+ + "\u0000\u0000\u0158\u0563\u0001\u0000\u0000\u0000\u015a\u0567\u0001\u0000"+ + "\u0000\u0000\u015c\u056b\u0001\u0000\u0000\u0000\u015e\u056f\u0001\u0000"+ + "\u0000\u0000\u0160\u0574\u0001\u0000\u0000\u0000\u0162\u057d\u0001\u0000"+ + "\u0000\u0000\u0164\u0581\u0001\u0000\u0000\u0000\u0166\u0585\u0001\u0000"+ + "\u0000\u0000\u0168\u0589\u0001\u0000\u0000\u0000\u016a\u058d\u0001\u0000"+ + "\u0000\u0000\u016c\u0592\u0001\u0000\u0000\u0000\u016e\u0596\u0001\u0000"+ + "\u0000\u0000\u0170\u059a\u0001\u0000\u0000\u0000\u0172\u059e\u0001\u0000"+ + "\u0000\u0000\u0174\u05a3\u0001\u0000\u0000\u0000\u0176\u05a7\u0001\u0000"+ + "\u0000\u0000\u0178\u05ab\u0001\u0000\u0000\u0000\u017a\u05af\u0001\u0000"+ + "\u0000\u0000\u017c\u05b3\u0001\u0000\u0000\u0000\u017e\u05b7\u0001\u0000"+ + "\u0000\u0000\u0180\u05bd\u0001\u0000\u0000\u0000\u0182\u05c1\u0001\u0000"+ + "\u0000\u0000\u0184\u05c5\u0001\u0000\u0000\u0000\u0186\u05c9\u0001\u0000"+ + "\u0000\u0000\u0188\u05cd\u0001\u0000\u0000\u0000\u018a\u05d1\u0001\u0000"+ + "\u0000\u0000\u018c\u05d5\u0001\u0000\u0000\u0000\u018e\u05da\u0001\u0000"+ + "\u0000\u0000\u0190\u05de\u0001\u0000\u0000\u0000\u0192\u05e2\u0001\u0000"+ + "\u0000\u0000\u0194\u05e8\u0001\u0000\u0000\u0000\u0196\u05f1\u0001\u0000"+ + "\u0000\u0000\u0198\u05f5\u0001\u0000\u0000\u0000\u019a\u05f9\u0001\u0000"+ + "\u0000\u0000\u019c\u05fd\u0001\u0000\u0000\u0000\u019e\u0601\u0001\u0000"+ + "\u0000\u0000\u01a0\u0605\u0001\u0000\u0000\u0000\u01a2\u0609\u0001\u0000"+ + "\u0000\u0000\u01a4\u060d\u0001\u0000\u0000\u0000\u01a6\u0611\u0001\u0000"+ + "\u0000\u0000\u01a8\u0616\u0001\u0000\u0000\u0000\u01aa\u061c\u0001\u0000"+ + "\u0000\u0000\u01ac\u0622\u0001\u0000\u0000\u0000\u01ae\u0626\u0001\u0000"+ + "\u0000\u0000\u01b0\u062a\u0001\u0000\u0000\u0000\u01b2\u062e\u0001\u0000"+ + "\u0000\u0000\u01b4\u0634\u0001\u0000\u0000\u0000\u01b6\u063a\u0001\u0000"+ + "\u0000\u0000\u01b8\u063e\u0001\u0000\u0000\u0000\u01ba\u0642\u0001\u0000"+ + "\u0000\u0000\u01bc\u0646\u0001\u0000\u0000\u0000\u01be\u064c\u0001\u0000"+ + "\u0000\u0000\u01c0\u0652\u0001\u0000\u0000\u0000\u01c2\u0658\u0001\u0000"+ + "\u0000\u0000\u01c4\u01c5\u0007\u0000\u0000\u0000\u01c5\u01c6\u0007\u0001"+ + "\u0000\u0000\u01c6\u01c7\u0007\u0002\u0000\u0000\u01c7\u01c8\u0007\u0002"+ + "\u0000\u0000\u01c8\u01c9\u0007\u0003\u0000\u0000\u01c9\u01ca\u0007\u0004"+ + "\u0000\u0000\u01ca\u01cb\u0007\u0005\u0000\u0000\u01cb\u01cc\u0001\u0000"+ + "\u0000\u0000\u01cc\u01cd\u0006\u0000\u0000\u0000\u01cd\u0011\u0001\u0000"+ + "\u0000\u0000\u01ce\u01cf\u0007\u0000\u0000\u0000\u01cf\u01d0\u0007\u0006"+ + "\u0000\u0000\u01d0\u01d1\u0007\u0007\u0000\u0000\u01d1\u01d2\u0007\b\u0000"+ + "\u0000\u01d2\u01d3\u0001\u0000\u0000\u0000\u01d3\u01d4\u0006\u0001\u0001"+ + "\u0000\u01d4\u0013\u0001\u0000\u0000\u0000\u01d5\u01d6\u0007\u0003\u0000"+ + "\u0000\u01d6\u01d7\u0007\t\u0000\u0000\u01d7\u01d8\u0007\u0006\u0000\u0000"+ + "\u01d8\u01d9\u0007\u0001\u0000\u0000\u01d9\u01da\u0007\u0004\u0000\u0000"+ + "\u01da\u01db\u0007\n\u0000\u0000\u01db\u01dc\u0001\u0000\u0000\u0000\u01dc"+ + "\u01dd\u0006\u0002\u0002\u0000\u01dd\u0015\u0001\u0000\u0000\u0000\u01de"+ + "\u01df\u0007\u0003\u0000\u0000\u01df\u01e0\u0007\u000b\u0000\u0000\u01e0"+ + "\u01e1\u0007\f\u0000\u0000\u01e1\u01e2\u0007\r\u0000\u0000\u01e2\u01e3"+ + "\u0001\u0000\u0000\u0000\u01e3\u01e4\u0006\u0003\u0000\u0000\u01e4\u0017"+ + "\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0003\u0000\u0000\u01e6\u01e7"+ + "\u0007\u000e\u0000\u0000\u01e7\u01e8\u0007\b\u0000\u0000\u01e8\u01e9\u0007"+ + "\r\u0000\u0000\u01e9\u01ea\u0007\f\u0000\u0000\u01ea\u01eb\u0007\u0001"+ + "\u0000\u0000\u01eb\u01ec\u0007\t\u0000\u0000\u01ec\u01ed\u0001\u0000\u0000"+ + "\u0000\u01ed\u01ee\u0006\u0004\u0003\u0000\u01ee\u0019\u0001\u0000\u0000"+ + "\u0000\u01ef\u01f0\u0007\u000f\u0000\u0000\u01f0\u01f1\u0007\u0006\u0000"+ + "\u0000\u01f1\u01f2\u0007\u0007\u0000\u0000\u01f2\u01f3\u0007\u0010\u0000"+ + "\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000\u01f4\u01f5\u0006\u0005\u0004"+ + "\u0000\u01f5\u001b\u0001\u0000\u0000\u0000\u01f6\u01f7\u0007\u0011\u0000"+ + "\u0000\u01f7\u01f8\u0007\u0006\u0000\u0000\u01f8\u01f9\u0007\u0007\u0000"+ + "\u0000\u01f9\u01fa\u0007\u0012\u0000\u0000\u01fa\u01fb\u0001\u0000\u0000"+ + "\u0000\u01fb\u01fc\u0006\u0006\u0000\u0000\u01fc\u001d\u0001\u0000\u0000"+ + "\u0000\u01fd\u01fe\u0007\u0012\u0000\u0000\u01fe\u01ff\u0007\u0003\u0000"+ + "\u0000\u01ff\u0200\u0007\u0003\u0000\u0000\u0200\u0201\u0007\b\u0000\u0000"+ + "\u0201\u0202\u0001\u0000\u0000\u0000\u0202\u0203\u0006\u0007\u0001\u0000"+ + "\u0203\u001f\u0001\u0000\u0000\u0000\u0204\u0205\u0007\r\u0000\u0000\u0205"+ + "\u0206\u0007\u0001\u0000\u0000\u0206\u0207\u0007\u0010\u0000\u0000\u0207"+ + "\u0208\u0007\u0001\u0000\u0000\u0208\u0209\u0007\u0005\u0000\u0000\u0209"+ + "\u020a\u0001\u0000\u0000\u0000\u020a\u020b\u0006\b\u0000\u0000\u020b!"+ + "\u0001\u0000\u0000\u0000\u020c\u020d\u0007\u0010\u0000\u0000\u020d\u020e"+ + "\u0007\u000b\u0000\u0000\u020e\u020f\u0005_\u0000\u0000\u020f\u0210\u0007"+ + "\u0003\u0000\u0000\u0210\u0211\u0007\u000e\u0000\u0000\u0211\u0212\u0007"+ + "\b\u0000\u0000\u0212\u0213\u0007\f\u0000\u0000\u0213\u0214\u0007\t\u0000"+ + "\u0000\u0214\u0215\u0007\u0000\u0000\u0000\u0215\u0216\u0001\u0000\u0000"+ + "\u0000\u0216\u0217\u0006\t\u0005\u0000\u0217#\u0001\u0000\u0000\u0000"+ + "\u0218\u0219\u0007\u0006\u0000\u0000\u0219\u021a\u0007\u0003\u0000\u0000"+ + "\u021a\u021b\u0007\t\u0000\u0000\u021b\u021c\u0007\f\u0000\u0000\u021c"+ + "\u021d\u0007\u0010\u0000\u0000\u021d\u021e\u0007\u0003\u0000\u0000\u021e"+ + "\u021f\u0001\u0000\u0000\u0000\u021f\u0220\u0006\n\u0006\u0000\u0220%"+ + "\u0001\u0000\u0000\u0000\u0221\u0222\u0007\u0006\u0000\u0000\u0222\u0223"+ + "\u0007\u0007\u0000\u0000\u0223\u0224\u0007\u0013\u0000\u0000\u0224\u0225"+ + "\u0001\u0000\u0000\u0000\u0225\u0226\u0006\u000b\u0000\u0000\u0226\'\u0001"+ + "\u0000\u0000\u0000\u0227\u0228\u0007\u0002\u0000\u0000\u0228\u0229\u0007"+ + "\n\u0000\u0000\u0229\u022a\u0007\u0007\u0000\u0000\u022a\u022b\u0007\u0013"+ + "\u0000\u0000\u022b\u022c\u0001\u0000\u0000\u0000\u022c\u022d\u0006\f\u0007"+ + "\u0000\u022d)\u0001\u0000\u0000\u0000\u022e\u022f\u0007\u0002\u0000\u0000"+ + "\u022f\u0230\u0007\u0007\u0000\u0000\u0230\u0231\u0007\u0006\u0000\u0000"+ + "\u0231\u0232\u0007\u0005\u0000\u0000\u0232\u0233\u0001\u0000\u0000\u0000"+ + "\u0233\u0234\u0006\r\u0000\u0000\u0234+\u0001\u0000\u0000\u0000\u0235"+ + "\u0236\u0007\u0002\u0000\u0000\u0236\u0237\u0007\u0005\u0000\u0000\u0237"+ + "\u0238\u0007\f\u0000\u0000\u0238\u0239\u0007\u0005\u0000\u0000\u0239\u023a"+ + "\u0007\u0002\u0000\u0000\u023a\u023b\u0001\u0000\u0000\u0000\u023b\u023c"+ + "\u0006\u000e\u0000\u0000\u023c-\u0001\u0000\u0000\u0000\u023d\u023e\u0007"+ + "\u0013\u0000\u0000\u023e\u023f\u0007\n\u0000\u0000\u023f\u0240\u0007\u0003"+ + "\u0000\u0000\u0240\u0241\u0007\u0006\u0000\u0000\u0241\u0242\u0007\u0003"+ + "\u0000\u0000\u0242\u0243\u0001\u0000\u0000\u0000\u0243\u0244\u0006\u000f"+ + "\u0000\u0000\u0244/\u0001\u0000\u0000\u0000\u0245\u0246\u0004\u0010\u0000"+ + "\u0000\u0246\u0247\u0007\u0001\u0000\u0000\u0247\u0248\u0007\t\u0000\u0000"+ + "\u0248\u0249\u0007\r\u0000\u0000\u0249\u024a\u0007\u0001\u0000\u0000\u024a"+ + "\u024b\u0007\t\u0000\u0000\u024b\u024c\u0007\u0003\u0000\u0000\u024c\u024d"+ + "\u0007\u0002\u0000\u0000\u024d\u024e\u0007\u0005\u0000\u0000\u024e\u024f"+ + "\u0007\f\u0000\u0000\u024f\u0250\u0007\u0005\u0000\u0000\u0250\u0251\u0007"+ + "\u0002\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252\u0253\u0006"+ + "\u0010\u0000\u0000\u02531\u0001\u0000\u0000\u0000\u0254\u0255\u0004\u0011"+ + "\u0001\u0000\u0255\u0256\u0007\r\u0000\u0000\u0256\u0257\u0007\u0007\u0000"+ + "\u0000\u0257\u0258\u0007\u0007\u0000\u0000\u0258\u0259\u0007\u0012\u0000"+ + "\u0000\u0259\u025a\u0007\u0014\u0000\u0000\u025a\u025b\u0007\b\u0000\u0000"+ + "\u025b\u025c\u0005_\u0000\u0000\u025c\u025d\u0005\u8001\uf414\u0000\u0000"+ + "\u025d\u025e\u0001\u0000\u0000\u0000\u025e\u025f\u0006\u0011\b\u0000\u025f"+ + "3\u0001\u0000\u0000\u0000\u0260\u0261\u0004\u0012\u0002\u0000\u0261\u0262"+ + "\u0007\u0010\u0000\u0000\u0262\u0263\u0007\u0003\u0000\u0000\u0263\u0264"+ + "\u0007\u0005\u0000\u0000\u0264\u0265\u0007\u0006\u0000\u0000\u0265\u0266"+ + "\u0007\u0001\u0000\u0000\u0266\u0267\u0007\u0004\u0000\u0000\u0267\u0268"+ + "\u0007\u0002\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269\u026a"+ + "\u0006\u0012\t\u0000\u026a5\u0001\u0000\u0000\u0000\u026b\u026c\u0004"+ + "\u0013\u0003\u0000\u026c\u026d\u0007\u0015\u0000\u0000\u026d\u026e\u0007"+ + "\u0007\u0000\u0000\u026e\u026f\u0007\u0001\u0000\u0000\u026f\u0270\u0007"+ + "\t\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000\u0271\u0272\u0006\u0013"+ + "\n\u0000\u02727\u0001\u0000\u0000\u0000\u0273\u0274\u0004\u0014\u0004"+ + "\u0000\u0274\u0275\u0007\u000f\u0000\u0000\u0275\u0276\u0007\u0014\u0000"+ + "\u0000\u0276\u0277\u0007\r\u0000\u0000\u0277\u0278\u0007\r\u0000\u0000"+ + "\u0278\u0279\u0001\u0000\u0000\u0000\u0279\u027a\u0006\u0014\n\u0000\u027a"+ + "9\u0001\u0000\u0000\u0000\u027b\u027c\u0004\u0015\u0005\u0000\u027c\u027d"+ + "\u0007\r\u0000\u0000\u027d\u027e\u0007\u0003\u0000\u0000\u027e\u027f\u0007"+ + "\u000f\u0000\u0000\u027f\u0280\u0007\u0005\u0000\u0000\u0280\u0281\u0001"+ + "\u0000\u0000\u0000\u0281\u0282\u0006\u0015\n\u0000\u0282;\u0001\u0000"+ + "\u0000\u0000\u0283\u0284\u0004\u0016\u0006\u0000\u0284\u0285\u0007\u0006"+ + "\u0000\u0000\u0285\u0286\u0007\u0001\u0000\u0000\u0286\u0287\u0007\u0011"+ + "\u0000\u0000\u0287\u0288\u0007\n\u0000\u0000\u0288\u0289\u0007\u0005\u0000"+ + "\u0000\u0289\u028a\u0001\u0000\u0000\u0000\u028a\u028b\u0006\u0016\n\u0000"+ + "\u028b=\u0001\u0000\u0000\u0000\u028c\u028d\u0004\u0017\u0007\u0000\u028d"+ + "\u028e\u0007\r\u0000\u0000\u028e\u028f\u0007\u0007\u0000\u0000\u028f\u0290"+ + "\u0007\u0007\u0000\u0000\u0290\u0291\u0007\u0012\u0000\u0000\u0291\u0292"+ + "\u0007\u0014\u0000\u0000\u0292\u0293\u0007\b\u0000\u0000\u0293\u0294\u0001"+ + "\u0000\u0000\u0000\u0294\u0295\u0006\u0017\n\u0000\u0295?\u0001\u0000"+ + "\u0000\u0000\u0296\u0298\b\u0016\u0000\u0000\u0297\u0296\u0001\u0000\u0000"+ + "\u0000\u0298\u0299\u0001\u0000\u0000\u0000\u0299\u0297\u0001\u0000\u0000"+ + "\u0000\u0299\u029a\u0001\u0000\u0000\u0000\u029a\u029b\u0001\u0000\u0000"+ + "\u0000\u029b\u029c\u0006\u0018\u0000\u0000\u029cA\u0001\u0000\u0000\u0000"+ + "\u029d\u029e\u0005/\u0000\u0000\u029e\u029f\u0005/\u0000\u0000\u029f\u02a3"+ + "\u0001\u0000\u0000\u0000\u02a0\u02a2\b\u0017\u0000\u0000\u02a1\u02a0\u0001"+ + "\u0000\u0000\u0000\u02a2\u02a5\u0001\u0000\u0000\u0000\u02a3\u02a1\u0001"+ + "\u0000\u0000\u0000\u02a3\u02a4\u0001\u0000\u0000\u0000\u02a4\u02a7\u0001"+ + "\u0000\u0000\u0000\u02a5\u02a3\u0001\u0000\u0000\u0000\u02a6\u02a8\u0005"+ + "\r\u0000\u0000\u02a7\u02a6\u0001\u0000\u0000\u0000\u02a7\u02a8\u0001\u0000"+ + "\u0000\u0000\u02a8\u02aa\u0001\u0000\u0000\u0000\u02a9\u02ab\u0005\n\u0000"+ + "\u0000\u02aa\u02a9\u0001\u0000\u0000\u0000\u02aa\u02ab\u0001\u0000\u0000"+ + "\u0000\u02ab\u02ac\u0001\u0000\u0000\u0000\u02ac\u02ad\u0006\u0019\u000b"+ + "\u0000\u02adC\u0001\u0000\u0000\u0000\u02ae\u02af\u0005/\u0000\u0000\u02af"+ + "\u02b0\u0005*\u0000\u0000\u02b0\u02b5\u0001\u0000\u0000\u0000\u02b1\u02b4"+ + "\u0003D\u001a\u0000\u02b2\u02b4\t\u0000\u0000\u0000\u02b3\u02b1\u0001"+ + "\u0000\u0000\u0000\u02b3\u02b2\u0001\u0000\u0000\u0000\u02b4\u02b7\u0001"+ + "\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000\u0000\u0000\u02b5\u02b3\u0001"+ + "\u0000\u0000\u0000\u02b6\u02b8\u0001\u0000\u0000\u0000\u02b7\u02b5\u0001"+ + "\u0000\u0000\u0000\u02b8\u02b9\u0005*\u0000\u0000\u02b9\u02ba\u0005/\u0000"+ + "\u0000\u02ba\u02bb\u0001\u0000\u0000\u0000\u02bb\u02bc\u0006\u001a\u000b"+ + "\u0000\u02bcE\u0001\u0000\u0000\u0000\u02bd\u02bf\u0007\u0018\u0000\u0000"+ + "\u02be\u02bd\u0001\u0000\u0000\u0000\u02bf\u02c0\u0001\u0000\u0000\u0000"+ + "\u02c0\u02be\u0001\u0000\u0000\u0000\u02c0\u02c1\u0001\u0000\u0000\u0000"+ + "\u02c1\u02c2\u0001\u0000\u0000\u0000\u02c2\u02c3\u0006\u001b\u000b\u0000"+ + "\u02c3G\u0001\u0000\u0000\u0000\u02c4\u02c5\u0005|\u0000\u0000\u02c5\u02c6"+ + "\u0001\u0000\u0000\u0000\u02c6\u02c7\u0006\u001c\f\u0000\u02c7I\u0001"+ + "\u0000\u0000\u0000\u02c8\u02c9\u0007\u0019\u0000\u0000\u02c9K\u0001\u0000"+ + "\u0000\u0000\u02ca\u02cb\u0007\u001a\u0000\u0000\u02cbM\u0001\u0000\u0000"+ + "\u0000\u02cc\u02cd\u0005\\\u0000\u0000\u02cd\u02ce\u0007\u001b\u0000\u0000"+ + "\u02ceO\u0001\u0000\u0000\u0000\u02cf\u02d0\b\u001c\u0000\u0000\u02d0"+ + "Q\u0001\u0000\u0000\u0000\u02d1\u02d3\u0007\u0003\u0000\u0000\u02d2\u02d4"+ + "\u0007\u001d\u0000\u0000\u02d3\u02d2\u0001\u0000\u0000\u0000\u02d3\u02d4"+ + "\u0001\u0000\u0000\u0000\u02d4\u02d6\u0001\u0000\u0000\u0000\u02d5\u02d7"+ + "\u0003J\u001d\u0000\u02d6\u02d5\u0001\u0000\u0000\u0000\u02d7\u02d8\u0001"+ + "\u0000\u0000\u0000\u02d8\u02d6\u0001\u0000\u0000\u0000\u02d8\u02d9\u0001"+ + "\u0000\u0000\u0000\u02d9S\u0001\u0000\u0000\u0000\u02da\u02db\u0005@\u0000"+ + "\u0000\u02dbU\u0001\u0000\u0000\u0000\u02dc\u02dd\u0005`\u0000\u0000\u02dd"+ + "W\u0001\u0000\u0000\u0000\u02de\u02e2\b\u001e\u0000\u0000\u02df\u02e0"+ + "\u0005`\u0000\u0000\u02e0\u02e2\u0005`\u0000\u0000\u02e1\u02de\u0001\u0000"+ + "\u0000\u0000\u02e1\u02df\u0001\u0000\u0000\u0000\u02e2Y\u0001\u0000\u0000"+ + "\u0000\u02e3\u02e4\u0005_\u0000\u0000\u02e4[\u0001\u0000\u0000\u0000\u02e5"+ + "\u02e9\u0003L\u001e\u0000\u02e6\u02e9\u0003J\u001d\u0000\u02e7\u02e9\u0003"+ + "Z%\u0000\u02e8\u02e5\u0001\u0000\u0000\u0000\u02e8\u02e6\u0001\u0000\u0000"+ + "\u0000\u02e8\u02e7\u0001\u0000\u0000\u0000\u02e9]\u0001\u0000\u0000\u0000"+ + "\u02ea\u02ef\u0005\"\u0000\u0000\u02eb\u02ee\u0003N\u001f\u0000\u02ec"+ + "\u02ee\u0003P \u0000\u02ed\u02eb\u0001\u0000\u0000\u0000\u02ed\u02ec\u0001"+ + "\u0000\u0000\u0000\u02ee\u02f1\u0001\u0000\u0000\u0000\u02ef\u02ed\u0001"+ + "\u0000\u0000\u0000\u02ef\u02f0\u0001\u0000\u0000\u0000\u02f0\u02f2\u0001"+ + "\u0000\u0000\u0000\u02f1\u02ef\u0001\u0000\u0000\u0000\u02f2\u0308\u0005"+ + "\"\u0000\u0000\u02f3\u02f4\u0005\"\u0000\u0000\u02f4\u02f5\u0005\"\u0000"+ + "\u0000\u02f5\u02f6\u0005\"\u0000\u0000\u02f6\u02fa\u0001\u0000\u0000\u0000"+ + "\u02f7\u02f9\b\u0017\u0000\u0000\u02f8\u02f7\u0001\u0000\u0000\u0000\u02f9"+ + "\u02fc\u0001\u0000\u0000\u0000\u02fa\u02fb\u0001\u0000\u0000\u0000\u02fa"+ + "\u02f8\u0001\u0000\u0000\u0000\u02fb\u02fd\u0001\u0000\u0000\u0000\u02fc"+ + "\u02fa\u0001\u0000\u0000\u0000\u02fd\u02fe\u0005\"\u0000\u0000\u02fe\u02ff"+ + "\u0005\"\u0000\u0000\u02ff\u0300\u0005\"\u0000\u0000\u0300\u0302\u0001"+ + "\u0000\u0000\u0000\u0301\u0303\u0005\"\u0000\u0000\u0302\u0301\u0001\u0000"+ + "\u0000\u0000\u0302\u0303\u0001\u0000\u0000\u0000\u0303\u0305\u0001\u0000"+ + "\u0000\u0000\u0304\u0306\u0005\"\u0000\u0000\u0305\u0304\u0001\u0000\u0000"+ + "\u0000\u0305\u0306\u0001\u0000\u0000\u0000\u0306\u0308\u0001\u0000\u0000"+ + "\u0000\u0307\u02ea\u0001\u0000\u0000\u0000\u0307\u02f3\u0001\u0000\u0000"+ + "\u0000\u0308_\u0001\u0000\u0000\u0000\u0309\u030b\u0003J\u001d\u0000\u030a"+ + "\u0309\u0001\u0000\u0000\u0000\u030b\u030c\u0001\u0000\u0000\u0000\u030c"+ + "\u030a\u0001\u0000\u0000\u0000\u030c\u030d\u0001\u0000\u0000\u0000\u030d"+ + "a\u0001\u0000\u0000\u0000\u030e\u0310\u0003J\u001d\u0000\u030f\u030e\u0001"+ + "\u0000\u0000\u0000\u0310\u0311\u0001\u0000\u0000\u0000\u0311\u030f\u0001"+ + "\u0000\u0000\u0000\u0311\u0312\u0001\u0000\u0000\u0000\u0312\u0313\u0001"+ + "\u0000\u0000\u0000\u0313\u0317\u0003t2\u0000\u0314\u0316\u0003J\u001d"+ + "\u0000\u0315\u0314\u0001\u0000\u0000\u0000\u0316\u0319\u0001\u0000\u0000"+ + "\u0000\u0317\u0315\u0001\u0000\u0000\u0000\u0317\u0318\u0001\u0000\u0000"+ + "\u0000\u0318\u0339\u0001\u0000\u0000\u0000\u0319\u0317\u0001\u0000\u0000"+ + "\u0000\u031a\u031c\u0003t2\u0000\u031b\u031d\u0003J\u001d\u0000\u031c"+ + "\u031b\u0001\u0000\u0000\u0000\u031d\u031e\u0001\u0000\u0000\u0000\u031e"+ + "\u031c\u0001\u0000\u0000\u0000\u031e\u031f\u0001\u0000\u0000\u0000\u031f"+ + "\u0339\u0001\u0000\u0000\u0000\u0320\u0322\u0003J\u001d\u0000\u0321\u0320"+ + "\u0001\u0000\u0000\u0000\u0322\u0323\u0001\u0000\u0000\u0000\u0323\u0321"+ + "\u0001\u0000\u0000\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324\u032c"+ + "\u0001\u0000\u0000\u0000\u0325\u0329\u0003t2\u0000\u0326\u0328\u0003J"+ + "\u001d\u0000\u0327\u0326\u0001\u0000\u0000\u0000\u0328\u032b\u0001\u0000"+ + "\u0000\u0000\u0329\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001\u0000"+ + "\u0000\u0000\u032a\u032d\u0001\u0000\u0000\u0000\u032b\u0329\u0001\u0000"+ + "\u0000\u0000\u032c\u0325\u0001\u0000\u0000\u0000\u032c\u032d\u0001\u0000"+ + "\u0000\u0000\u032d\u032e\u0001\u0000\u0000\u0000\u032e\u032f\u0003R!\u0000"+ + "\u032f\u0339\u0001\u0000\u0000\u0000\u0330\u0332\u0003t2\u0000\u0331\u0333"+ + "\u0003J\u001d\u0000\u0332\u0331\u0001\u0000\u0000\u0000\u0333\u0334\u0001"+ + "\u0000\u0000\u0000\u0334\u0332\u0001\u0000\u0000\u0000\u0334\u0335\u0001"+ + "\u0000\u0000\u0000\u0335\u0336\u0001\u0000\u0000\u0000\u0336\u0337\u0003"+ + "R!\u0000\u0337\u0339\u0001\u0000\u0000\u0000\u0338\u030f\u0001\u0000\u0000"+ + "\u0000\u0338\u031a\u0001\u0000\u0000\u0000\u0338\u0321\u0001\u0000\u0000"+ + "\u0000\u0338\u0330\u0001\u0000\u0000\u0000\u0339c\u0001\u0000\u0000\u0000"+ + "\u033a\u033b\u0007\u001f\u0000\u0000\u033b\u033c\u0007 \u0000\u0000\u033c"+ + "e\u0001\u0000\u0000\u0000\u033d\u033e\u0007\f\u0000\u0000\u033e\u033f"+ + "\u0007\t\u0000\u0000\u033f\u0340\u0007\u0000\u0000\u0000\u0340g\u0001"+ + "\u0000\u0000\u0000\u0341\u0342\u0007\f\u0000\u0000\u0342\u0343\u0007\u0002"+ + "\u0000\u0000\u0343\u0344\u0007\u0004\u0000\u0000\u0344i\u0001\u0000\u0000"+ + "\u0000\u0345\u0346\u0005=\u0000\u0000\u0346k\u0001\u0000\u0000\u0000\u0347"+ + "\u0348\u0005:\u0000\u0000\u0348\u0349\u0005:\u0000\u0000\u0349m\u0001"+ + "\u0000\u0000\u0000\u034a\u034b\u0005:\u0000\u0000\u034bo\u0001\u0000\u0000"+ + "\u0000\u034c\u034d\u0005,\u0000\u0000\u034dq\u0001\u0000\u0000\u0000\u034e"+ + "\u034f\u0007\u0000\u0000\u0000\u034f\u0350\u0007\u0003\u0000\u0000\u0350"+ + "\u0351\u0007\u0002\u0000\u0000\u0351\u0352\u0007\u0004\u0000\u0000\u0352"+ + "s\u0001\u0000\u0000\u0000\u0353\u0354\u0005.\u0000\u0000\u0354u\u0001"+ + "\u0000\u0000\u0000\u0355\u0356\u0007\u000f\u0000\u0000\u0356\u0357\u0007"+ + "\f\u0000\u0000\u0357\u0358\u0007\r\u0000\u0000\u0358\u0359\u0007\u0002"+ + "\u0000\u0000\u0359\u035a\u0007\u0003\u0000\u0000\u035aw\u0001\u0000\u0000"+ + "\u0000\u035b\u035c\u0007\u000f\u0000\u0000\u035c\u035d\u0007\u0001\u0000"+ + "\u0000\u035d\u035e\u0007\u0006\u0000\u0000\u035e\u035f\u0007\u0002\u0000"+ + "\u0000\u035f\u0360\u0007\u0005\u0000\u0000\u0360y\u0001\u0000\u0000\u0000"+ + "\u0361\u0362\u0007\u0001\u0000\u0000\u0362\u0363\u0007\t\u0000\u0000\u0363"+ + "{\u0001\u0000\u0000\u0000\u0364\u0365\u0007\u0001\u0000\u0000\u0365\u0366"+ + "\u0007\u0002\u0000\u0000\u0366}\u0001\u0000\u0000\u0000\u0367\u0368\u0007"+ + "\r\u0000\u0000\u0368\u0369\u0007\f\u0000\u0000\u0369\u036a\u0007\u0002"+ + "\u0000\u0000\u036a\u036b\u0007\u0005\u0000\u0000\u036b\u007f\u0001\u0000"+ + "\u0000\u0000\u036c\u036d\u0007\r\u0000\u0000\u036d\u036e\u0007\u0001\u0000"+ + "\u0000\u036e\u036f\u0007\u0012\u0000\u0000\u036f\u0370\u0007\u0003\u0000"+ + "\u0000\u0370\u0081\u0001\u0000\u0000\u0000\u0371\u0372\u0005(\u0000\u0000"+ + "\u0372\u0083\u0001\u0000\u0000\u0000\u0373\u0374\u0007\t\u0000\u0000\u0374"+ + "\u0375\u0007\u0007\u0000\u0000\u0375\u0376\u0007\u0005\u0000\u0000\u0376"+ + "\u0085\u0001\u0000\u0000\u0000\u0377\u0378\u0007\t\u0000\u0000\u0378\u0379"+ + "\u0007\u0014\u0000\u0000\u0379\u037a\u0007\r\u0000\u0000\u037a\u037b\u0007"+ + "\r\u0000\u0000\u037b\u0087\u0001\u0000\u0000\u0000\u037c\u037d\u0007\t"+ + "\u0000\u0000\u037d\u037e\u0007\u0014\u0000\u0000\u037e\u037f\u0007\r\u0000"+ + "\u0000\u037f\u0380\u0007\r\u0000\u0000\u0380\u0381\u0007\u0002\u0000\u0000"+ + "\u0381\u0089\u0001\u0000\u0000\u0000\u0382\u0383\u0007\u0007\u0000\u0000"+ + "\u0383\u0384\u0007\u0006\u0000\u0000\u0384\u008b\u0001\u0000\u0000\u0000"+ + "\u0385\u0386\u0005?\u0000\u0000\u0386\u008d\u0001\u0000\u0000\u0000\u0387"+ + "\u0388\u0007\u0006\u0000\u0000\u0388\u0389\u0007\r\u0000\u0000\u0389\u038a"+ + "\u0007\u0001\u0000\u0000\u038a\u038b\u0007\u0012\u0000\u0000\u038b\u038c"+ + "\u0007\u0003\u0000\u0000\u038c\u008f\u0001\u0000\u0000\u0000\u038d\u038e"+ + "\u0005)\u0000\u0000\u038e\u0091\u0001\u0000\u0000\u0000\u038f\u0390\u0007"+ + "\u0005\u0000\u0000\u0390\u0391\u0007\u0006\u0000\u0000\u0391\u0392\u0007"+ + "\u0014\u0000\u0000\u0392\u0393\u0007\u0003\u0000\u0000\u0393\u0093\u0001"+ + "\u0000\u0000\u0000\u0394\u0395\u0005=\u0000\u0000\u0395\u0396\u0005=\u0000"+ + "\u0000\u0396\u0095\u0001\u0000\u0000\u0000\u0397\u0398\u0005=\u0000\u0000"+ + "\u0398\u0399\u0005~\u0000\u0000\u0399\u0097\u0001\u0000\u0000\u0000\u039a"+ + "\u039b\u0005!\u0000\u0000\u039b\u039c\u0005=\u0000\u0000\u039c\u0099\u0001"+ + "\u0000\u0000\u0000\u039d\u039e\u0005<\u0000\u0000\u039e\u009b\u0001\u0000"+ + "\u0000\u0000\u039f\u03a0\u0005<\u0000\u0000\u03a0\u03a1\u0005=\u0000\u0000"+ + "\u03a1\u009d\u0001\u0000\u0000\u0000\u03a2\u03a3\u0005>\u0000\u0000\u03a3"+ + "\u009f\u0001\u0000\u0000\u0000\u03a4\u03a5\u0005>\u0000\u0000\u03a5\u03a6"+ + "\u0005=\u0000\u0000\u03a6\u00a1\u0001\u0000\u0000\u0000\u03a7\u03a8\u0005"+ + "+\u0000\u0000\u03a8\u00a3\u0001\u0000\u0000\u0000\u03a9\u03aa\u0005-\u0000"+ + "\u0000\u03aa\u00a5\u0001\u0000\u0000\u0000\u03ab\u03ac\u0005*\u0000\u0000"+ + "\u03ac\u00a7\u0001\u0000\u0000\u0000\u03ad\u03ae\u0005/\u0000\u0000\u03ae"+ + "\u00a9\u0001\u0000\u0000\u0000\u03af\u03b0\u0005%\u0000\u0000\u03b0\u00ab"+ + "\u0001\u0000\u0000\u0000\u03b1\u03b2\u0004N\b\u0000\u03b2\u03b3\u0005"+ + "{\u0000\u0000\u03b3\u00ad\u0001\u0000\u0000\u0000\u03b4\u03b5\u0004O\t"+ + "\u0000\u03b5\u03b6\u0005}\u0000\u0000\u03b6\u00af\u0001\u0000\u0000\u0000"+ + "\u03b7\u03b8\u0003.\u000f\u0000\u03b8\u03b9\u0001\u0000\u0000\u0000\u03b9"+ + "\u03ba\u0006P\r\u0000\u03ba\u00b1\u0001\u0000\u0000\u0000\u03bb\u03be"+ + "\u0003\u008c>\u0000\u03bc\u03bf\u0003L\u001e\u0000\u03bd\u03bf\u0003Z"+ + "%\u0000\u03be\u03bc\u0001\u0000\u0000\u0000\u03be\u03bd\u0001\u0000\u0000"+ + "\u0000\u03bf\u03c3\u0001\u0000\u0000\u0000\u03c0\u03c2\u0003\\&\u0000"+ + "\u03c1\u03c0\u0001\u0000\u0000\u0000\u03c2\u03c5\u0001\u0000\u0000\u0000"+ + "\u03c3\u03c1\u0001\u0000\u0000\u0000\u03c3\u03c4\u0001\u0000\u0000\u0000"+ + "\u03c4\u03cd\u0001\u0000\u0000\u0000\u03c5\u03c3\u0001\u0000\u0000\u0000"+ + "\u03c6\u03c8\u0003\u008c>\u0000\u03c7\u03c9\u0003J\u001d\u0000\u03c8\u03c7"+ + "\u0001\u0000\u0000\u0000\u03c9\u03ca\u0001\u0000\u0000\u0000\u03ca\u03c8"+ + "\u0001\u0000\u0000\u0000\u03ca\u03cb\u0001\u0000\u0000\u0000\u03cb\u03cd"+ + "\u0001\u0000\u0000\u0000\u03cc\u03bb\u0001\u0000\u0000\u0000\u03cc\u03c6"+ + "\u0001\u0000\u0000\u0000\u03cd\u00b3\u0001\u0000\u0000\u0000\u03ce\u03cf"+ + "\u0005[\u0000\u0000\u03cf\u03d0\u0001\u0000\u0000\u0000\u03d0\u03d1\u0006"+ + "R\u0000\u0000\u03d1\u03d2\u0006R\u0000\u0000\u03d2\u00b5\u0001\u0000\u0000"+ + "\u0000\u03d3\u03d4\u0005]\u0000\u0000\u03d4\u03d5\u0001\u0000\u0000\u0000"+ + "\u03d5\u03d6\u0006S\f\u0000\u03d6\u03d7\u0006S\f\u0000\u03d7\u00b7\u0001"+ + "\u0000\u0000\u0000\u03d8\u03dc\u0003L\u001e\u0000\u03d9\u03db\u0003\\"+ + "&\u0000\u03da\u03d9\u0001\u0000\u0000\u0000\u03db\u03de\u0001\u0000\u0000"+ + "\u0000\u03dc\u03da\u0001\u0000\u0000\u0000\u03dc\u03dd\u0001\u0000\u0000"+ + "\u0000\u03dd\u03e9\u0001\u0000\u0000\u0000\u03de\u03dc\u0001\u0000\u0000"+ + "\u0000\u03df\u03e2\u0003Z%\u0000\u03e0\u03e2\u0003T\"\u0000\u03e1\u03df"+ + "\u0001\u0000\u0000\u0000\u03e1\u03e0\u0001\u0000\u0000\u0000\u03e2\u03e4"+ + "\u0001\u0000\u0000\u0000\u03e3\u03e5\u0003\\&\u0000\u03e4\u03e3\u0001"+ + "\u0000\u0000\u0000\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6\u03e4\u0001"+ + "\u0000\u0000\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7\u03e9\u0001"+ + "\u0000\u0000\u0000\u03e8\u03d8\u0001\u0000\u0000\u0000\u03e8\u03e1\u0001"+ + "\u0000\u0000\u0000\u03e9\u00b9\u0001\u0000\u0000\u0000\u03ea\u03ec\u0003"+ + "V#\u0000\u03eb\u03ed\u0003X$\u0000\u03ec\u03eb\u0001\u0000\u0000\u0000"+ + "\u03ed\u03ee\u0001\u0000\u0000\u0000\u03ee\u03ec\u0001\u0000\u0000\u0000"+ + "\u03ee\u03ef\u0001\u0000\u0000\u0000\u03ef\u03f0\u0001\u0000\u0000\u0000"+ + "\u03f0\u03f1\u0003V#\u0000\u03f1\u00bb\u0001\u0000\u0000\u0000\u03f2\u03f3"+ + "\u0003\u00baU\u0000\u03f3\u00bd\u0001\u0000\u0000\u0000\u03f4\u03f5\u0003"+ + "B\u0019\u0000\u03f5\u03f6\u0001\u0000\u0000\u0000\u03f6\u03f7\u0006W\u000b"+ + "\u0000\u03f7\u00bf\u0001\u0000\u0000\u0000\u03f8\u03f9\u0003D\u001a\u0000"+ + "\u03f9\u03fa\u0001\u0000\u0000\u0000\u03fa\u03fb\u0006X\u000b\u0000\u03fb"+ + "\u00c1\u0001\u0000\u0000\u0000\u03fc\u03fd\u0003F\u001b\u0000\u03fd\u03fe"+ + "\u0001\u0000\u0000\u0000\u03fe\u03ff\u0006Y\u000b\u0000\u03ff\u00c3\u0001"+ + "\u0000\u0000\u0000\u0400\u0401\u0003\u00b4R\u0000\u0401\u0402\u0001\u0000"+ + "\u0000\u0000\u0402\u0403\u0006Z\u000e\u0000\u0403\u0404\u0006Z\u000f\u0000"+ + "\u0404\u00c5\u0001\u0000\u0000\u0000\u0405\u0406\u0003H\u001c\u0000\u0406"+ + "\u0407\u0001\u0000\u0000\u0000\u0407\u0408\u0006[\u0010\u0000\u0408\u0409"+ + "\u0006[\f\u0000\u0409\u00c7\u0001\u0000\u0000\u0000\u040a\u040b\u0003"+ + "F\u001b\u0000\u040b\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006\\"+ + "\u000b\u0000\u040d\u00c9\u0001\u0000\u0000\u0000\u040e\u040f\u0003B\u0019"+ + "\u0000\u040f\u0410\u0001\u0000\u0000\u0000\u0410\u0411\u0006]\u000b\u0000"+ + "\u0411\u00cb\u0001\u0000\u0000\u0000\u0412\u0413\u0003D\u001a\u0000\u0413"+ + "\u0414\u0001\u0000\u0000\u0000\u0414\u0415\u0006^\u000b\u0000\u0415\u00cd"+ + "\u0001\u0000\u0000\u0000\u0416\u0417\u0003H\u001c\u0000\u0417\u0418\u0001"+ + "\u0000\u0000\u0000\u0418\u0419\u0006_\u0010\u0000\u0419\u041a\u0006_\f"+ + "\u0000\u041a\u00cf\u0001\u0000\u0000\u0000\u041b\u041c\u0003\u00b4R\u0000"+ + "\u041c\u041d\u0001\u0000\u0000\u0000\u041d\u041e\u0006`\u000e\u0000\u041e"+ + "\u00d1\u0001\u0000\u0000\u0000\u041f\u0420\u0003\u00b6S\u0000\u0420\u0421"+ + "\u0001\u0000\u0000\u0000\u0421\u0422\u0006a\u0011\u0000\u0422\u00d3\u0001"+ + "\u0000\u0000\u0000\u0423\u0424\u0003n/\u0000\u0424\u0425\u0001\u0000\u0000"+ + "\u0000\u0425\u0426\u0006b\u0012\u0000\u0426\u00d5\u0001\u0000\u0000\u0000"+ + "\u0427\u0428\u0003p0\u0000\u0428\u0429\u0001\u0000\u0000\u0000\u0429\u042a"+ + "\u0006c\u0013\u0000\u042a\u00d7\u0001\u0000\u0000\u0000\u042b\u042c\u0003"+ + "j-\u0000\u042c\u042d\u0001\u0000\u0000\u0000\u042d\u042e\u0006d\u0014"+ + "\u0000\u042e\u00d9\u0001\u0000\u0000\u0000\u042f\u0430\u0007\u0010\u0000"+ + "\u0000\u0430\u0431\u0007\u0003\u0000\u0000\u0431\u0432\u0007\u0005\u0000"+ + "\u0000\u0432\u0433\u0007\f\u0000\u0000\u0433\u0434\u0007\u0000\u0000\u0000"+ + "\u0434\u0435\u0007\f\u0000\u0000\u0435\u0436\u0007\u0005\u0000\u0000\u0436"+ + "\u0437\u0007\f\u0000\u0000\u0437\u00db\u0001\u0000\u0000\u0000\u0438\u043c"+ + "\b!\u0000\u0000\u0439\u043a\u0005/\u0000\u0000\u043a\u043c\b\"\u0000\u0000"+ + "\u043b\u0438\u0001\u0000\u0000\u0000\u043b\u0439\u0001\u0000\u0000\u0000"+ + "\u043c\u00dd\u0001\u0000\u0000\u0000\u043d\u043f\u0003\u00dcf\u0000\u043e"+ + "\u043d\u0001\u0000\u0000\u0000\u043f\u0440\u0001\u0000\u0000\u0000\u0440"+ + "\u043e\u0001\u0000\u0000\u0000\u0440\u0441\u0001\u0000\u0000\u0000\u0441"+ + "\u00df\u0001\u0000\u0000\u0000\u0442\u0443\u0003\u00deg\u0000\u0443\u0444"+ + "\u0001\u0000\u0000\u0000\u0444\u0445\u0006h\u0015\u0000\u0445\u00e1\u0001"+ + "\u0000\u0000\u0000\u0446\u0447\u0003^\'\u0000\u0447\u0448\u0001\u0000"+ + "\u0000\u0000\u0448\u0449\u0006i\u0016\u0000\u0449\u00e3\u0001\u0000\u0000"+ + "\u0000\u044a\u044b\u0003B\u0019\u0000\u044b\u044c\u0001\u0000\u0000\u0000"+ + "\u044c\u044d\u0006j\u000b\u0000\u044d\u00e5\u0001\u0000\u0000\u0000\u044e"+ + "\u044f\u0003D\u001a\u0000\u044f\u0450\u0001\u0000\u0000\u0000\u0450\u0451"+ + "\u0006k\u000b\u0000\u0451\u00e7\u0001\u0000\u0000\u0000\u0452\u0453\u0003"+ + "F\u001b\u0000\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455\u0006l\u000b"+ + "\u0000\u0455\u00e9\u0001\u0000\u0000\u0000\u0456\u0457\u0003H\u001c\u0000"+ + "\u0457\u0458\u0001\u0000\u0000\u0000\u0458\u0459\u0006m\u0010\u0000\u0459"+ + "\u045a\u0006m\f\u0000\u045a\u00eb\u0001\u0000\u0000\u0000\u045b\u045c"+ + "\u0003t2\u0000\u045c\u045d\u0001\u0000\u0000\u0000\u045d\u045e\u0006n"+ + "\u0017\u0000\u045e\u00ed\u0001\u0000\u0000\u0000\u045f\u0460\u0003p0\u0000"+ + "\u0460\u0461\u0001\u0000\u0000\u0000\u0461\u0462\u0006o\u0013\u0000\u0462"+ + "\u00ef\u0001\u0000\u0000\u0000\u0463\u0464\u0004p\n\u0000\u0464\u0465"+ + "\u0003\u008c>\u0000\u0465\u0466\u0001\u0000\u0000\u0000\u0466\u0467\u0006"+ + "p\u0018\u0000\u0467\u00f1\u0001\u0000\u0000\u0000\u0468\u0469\u0004q\u000b"+ + "\u0000\u0469\u046a\u0003\u00b2Q\u0000\u046a\u046b\u0001\u0000\u0000\u0000"+ + "\u046b\u046c\u0006q\u0019\u0000\u046c\u00f3\u0001\u0000\u0000\u0000\u046d"+ + "\u0472\u0003L\u001e\u0000\u046e\u0472\u0003J\u001d\u0000\u046f\u0472\u0003"+ + "Z%\u0000\u0470\u0472\u0003\u00a6K\u0000\u0471\u046d\u0001\u0000\u0000"+ + "\u0000\u0471\u046e\u0001\u0000\u0000\u0000\u0471\u046f\u0001\u0000\u0000"+ + "\u0000\u0471\u0470\u0001\u0000\u0000\u0000\u0472\u00f5\u0001\u0000\u0000"+ + "\u0000\u0473\u0476\u0003L\u001e\u0000\u0474\u0476\u0003\u00a6K\u0000\u0475"+ + "\u0473\u0001\u0000\u0000\u0000\u0475\u0474\u0001\u0000\u0000\u0000\u0476"+ + "\u047a\u0001\u0000\u0000\u0000\u0477\u0479\u0003\u00f4r\u0000\u0478\u0477"+ + "\u0001\u0000\u0000\u0000\u0479\u047c\u0001\u0000\u0000\u0000\u047a\u0478"+ + "\u0001\u0000\u0000\u0000\u047a\u047b\u0001\u0000\u0000\u0000\u047b\u0487"+ + "\u0001\u0000\u0000\u0000\u047c\u047a\u0001\u0000\u0000\u0000\u047d\u0480"+ + "\u0003Z%\u0000\u047e\u0480\u0003T\"\u0000\u047f\u047d\u0001\u0000\u0000"+ + "\u0000\u047f\u047e\u0001\u0000\u0000\u0000\u0480\u0482\u0001\u0000\u0000"+ + "\u0000\u0481\u0483\u0003\u00f4r\u0000\u0482\u0481\u0001\u0000\u0000\u0000"+ + "\u0483\u0484\u0001\u0000\u0000\u0000\u0484\u0482\u0001\u0000\u0000\u0000"+ + "\u0484\u0485\u0001\u0000\u0000\u0000\u0485\u0487\u0001\u0000\u0000\u0000"+ + "\u0486\u0475\u0001\u0000\u0000\u0000\u0486\u047f\u0001\u0000\u0000\u0000"+ + "\u0487\u00f7\u0001\u0000\u0000\u0000\u0488\u048b\u0003\u00f6s\u0000\u0489"+ + "\u048b\u0003\u00baU\u0000\u048a\u0488\u0001\u0000\u0000\u0000\u048a\u0489"+ + "\u0001\u0000\u0000\u0000\u048b\u048c\u0001\u0000\u0000\u0000\u048c\u048a"+ + "\u0001\u0000\u0000\u0000\u048c\u048d\u0001\u0000\u0000\u0000\u048d\u00f9"+ + "\u0001\u0000\u0000\u0000\u048e\u048f\u0003B\u0019\u0000\u048f\u0490\u0001"+ + "\u0000\u0000\u0000\u0490\u0491\u0006u\u000b\u0000\u0491\u00fb\u0001\u0000"+ + "\u0000\u0000\u0492\u0493\u0003D\u001a\u0000\u0493\u0494\u0001\u0000\u0000"+ + "\u0000\u0494\u0495\u0006v\u000b\u0000\u0495\u00fd\u0001\u0000\u0000\u0000"+ + "\u0496\u0497\u0003F\u001b\u0000\u0497\u0498\u0001\u0000\u0000\u0000\u0498"+ + "\u0499\u0006w\u000b\u0000\u0499\u00ff\u0001\u0000\u0000\u0000\u049a\u049b"+ + "\u0003H\u001c\u0000\u049b\u049c\u0001\u0000\u0000\u0000\u049c\u049d\u0006"+ + "x\u0010\u0000\u049d\u049e\u0006x\f\u0000\u049e\u0101\u0001\u0000\u0000"+ + "\u0000\u049f\u04a0\u0003j-\u0000\u04a0\u04a1\u0001\u0000\u0000\u0000\u04a1"+ + "\u04a2\u0006y\u0014\u0000\u04a2\u0103\u0001\u0000\u0000\u0000\u04a3\u04a4"+ + "\u0003p0\u0000\u04a4\u04a5\u0001\u0000\u0000\u0000\u04a5\u04a6\u0006z"+ + "\u0013\u0000\u04a6\u0105\u0001\u0000\u0000\u0000\u04a7\u04a8\u0003t2\u0000"+ + "\u04a8\u04a9\u0001\u0000\u0000\u0000\u04a9\u04aa\u0006{\u0017\u0000\u04aa"+ + "\u0107\u0001\u0000\u0000\u0000\u04ab\u04ac\u0004|\f\u0000\u04ac\u04ad"+ + "\u0003\u008c>\u0000\u04ad\u04ae\u0001\u0000\u0000\u0000\u04ae\u04af\u0006"+ + "|\u0018\u0000\u04af\u0109\u0001\u0000\u0000\u0000\u04b0\u04b1\u0004}\r"+ + "\u0000\u04b1\u04b2\u0003\u00b2Q\u0000\u04b2\u04b3\u0001\u0000\u0000\u0000"+ + "\u04b3\u04b4\u0006}\u0019\u0000\u04b4\u010b\u0001\u0000\u0000\u0000\u04b5"+ + "\u04b6\u0007\f\u0000\u0000\u04b6\u04b7\u0007\u0002\u0000\u0000\u04b7\u010d"+ + "\u0001\u0000\u0000\u0000\u04b8\u04b9\u0003\u00f8t\u0000\u04b9\u04ba\u0001"+ + "\u0000\u0000\u0000\u04ba\u04bb\u0006\u007f\u001a\u0000\u04bb\u010f\u0001"+ + "\u0000\u0000\u0000\u04bc\u04bd\u0003B\u0019\u0000\u04bd\u04be\u0001\u0000"+ + "\u0000\u0000\u04be\u04bf\u0006\u0080\u000b\u0000\u04bf\u0111\u0001\u0000"+ + "\u0000\u0000\u04c0\u04c1\u0003D\u001a\u0000\u04c1\u04c2\u0001\u0000\u0000"+ + "\u0000\u04c2\u04c3\u0006\u0081\u000b\u0000\u04c3\u0113\u0001\u0000\u0000"+ + "\u0000\u04c4\u04c5\u0003F\u001b\u0000\u04c5\u04c6\u0001\u0000\u0000\u0000"+ + "\u04c6\u04c7\u0006\u0082\u000b\u0000\u04c7\u0115\u0001\u0000\u0000\u0000"+ + "\u04c8\u04c9\u0003H\u001c\u0000\u04c9\u04ca\u0001\u0000\u0000\u0000\u04ca"+ + "\u04cb\u0006\u0083\u0010\u0000\u04cb\u04cc\u0006\u0083\f\u0000\u04cc\u0117"+ + "\u0001\u0000\u0000\u0000\u04cd\u04ce\u0003\u00b4R\u0000\u04ce\u04cf\u0001"+ + "\u0000\u0000\u0000\u04cf\u04d0\u0006\u0084\u000e\u0000\u04d0\u04d1\u0006"+ + "\u0084\u001b\u0000\u04d1\u0119\u0001\u0000\u0000\u0000\u04d2\u04d3\u0007"+ + "\u0007\u0000\u0000\u04d3\u04d4\u0007\t\u0000\u0000\u04d4\u04d5\u0001\u0000"+ + "\u0000\u0000\u04d5\u04d6\u0006\u0085\u001c\u0000\u04d6\u011b\u0001\u0000"+ + "\u0000\u0000\u04d7\u04d8\u0007\u0013\u0000\u0000\u04d8\u04d9\u0007\u0001"+ + "\u0000\u0000\u04d9\u04da\u0007\u0005\u0000\u0000\u04da\u04db\u0007\n\u0000"+ + "\u0000\u04db\u04dc\u0001\u0000\u0000\u0000\u04dc\u04dd\u0006\u0086\u001c"+ + "\u0000\u04dd\u011d\u0001\u0000\u0000\u0000\u04de\u04df\b#\u0000\u0000"+ + "\u04df\u011f\u0001\u0000\u0000\u0000\u04e0\u04e2\u0003\u011e\u0087\u0000"+ + "\u04e1\u04e0\u0001\u0000\u0000\u0000\u04e2\u04e3\u0001\u0000\u0000\u0000"+ + "\u04e3\u04e1\u0001\u0000\u0000\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000"+ + "\u04e4\u04e5\u0001\u0000\u0000\u0000\u04e5\u04e6\u0003n/\u0000\u04e6\u04e8"+ + "\u0001\u0000\u0000\u0000\u04e7\u04e1\u0001\u0000\u0000\u0000\u04e7\u04e8"+ + "\u0001\u0000\u0000\u0000\u04e8\u04ea\u0001\u0000\u0000\u0000\u04e9\u04eb"+ + "\u0003\u011e\u0087\u0000\u04ea\u04e9\u0001\u0000\u0000\u0000\u04eb\u04ec"+ + "\u0001\u0000\u0000\u0000\u04ec\u04ea\u0001\u0000\u0000\u0000\u04ec\u04ed"+ + "\u0001\u0000\u0000\u0000\u04ed\u0121\u0001\u0000\u0000\u0000\u04ee\u04ef"+ + "\u0003\u0120\u0088\u0000\u04ef\u04f0\u0001\u0000\u0000\u0000\u04f0\u04f1"+ + "\u0006\u0089\u001d\u0000\u04f1\u0123\u0001\u0000\u0000\u0000\u04f2\u04f3"+ + "\u0003B\u0019\u0000\u04f3\u04f4\u0001\u0000\u0000\u0000\u04f4\u04f5\u0006"+ + "\u008a\u000b\u0000\u04f5\u0125\u0001\u0000\u0000\u0000\u04f6\u04f7\u0003"+ + "D\u001a\u0000\u04f7\u04f8\u0001\u0000\u0000\u0000\u04f8\u04f9\u0006\u008b"+ + "\u000b\u0000\u04f9\u0127\u0001\u0000\u0000\u0000\u04fa\u04fb\u0003F\u001b"+ + "\u0000\u04fb\u04fc\u0001\u0000\u0000\u0000\u04fc\u04fd\u0006\u008c\u000b"+ + "\u0000\u04fd\u0129\u0001\u0000\u0000\u0000\u04fe\u04ff\u0003H\u001c\u0000"+ + "\u04ff\u0500\u0001\u0000\u0000\u0000\u0500\u0501\u0006\u008d\u0010\u0000"+ + "\u0501\u0502\u0006\u008d\f\u0000\u0502\u0503\u0006\u008d\f\u0000\u0503"+ + "\u012b\u0001\u0000\u0000\u0000\u0504\u0505\u0003j-\u0000\u0505\u0506\u0001"+ + "\u0000\u0000\u0000\u0506\u0507\u0006\u008e\u0014\u0000\u0507\u012d\u0001"+ + "\u0000\u0000\u0000\u0508\u0509\u0003p0\u0000\u0509\u050a\u0001\u0000\u0000"+ + "\u0000\u050a\u050b\u0006\u008f\u0013\u0000\u050b\u012f\u0001\u0000\u0000"+ + "\u0000\u050c\u050d\u0003t2\u0000\u050d\u050e\u0001\u0000\u0000\u0000\u050e"+ + "\u050f\u0006\u0090\u0017\u0000\u050f\u0131\u0001\u0000\u0000\u0000\u0510"+ + "\u0511\u0003\u011c\u0086\u0000\u0511\u0512\u0001\u0000\u0000\u0000\u0512"+ + "\u0513\u0006\u0091\u001e\u0000\u0513\u0133\u0001\u0000\u0000\u0000\u0514"+ + "\u0515\u0003\u00f8t\u0000\u0515\u0516\u0001\u0000\u0000\u0000\u0516\u0517"+ + "\u0006\u0092\u001a\u0000\u0517\u0135\u0001\u0000\u0000\u0000\u0518\u0519"+ + "\u0003\u00bcV\u0000\u0519\u051a\u0001\u0000\u0000\u0000\u051a\u051b\u0006"+ + "\u0093\u001f\u0000\u051b\u0137\u0001\u0000\u0000\u0000\u051c\u051d\u0004"+ + "\u0094\u000e\u0000\u051d\u051e\u0003\u008c>\u0000\u051e\u051f\u0001\u0000"+ + "\u0000\u0000\u051f\u0520\u0006\u0094\u0018\u0000\u0520\u0139\u0001\u0000"+ + "\u0000\u0000\u0521\u0522\u0004\u0095\u000f\u0000\u0522\u0523\u0003\u00b2"+ + "Q\u0000\u0523\u0524\u0001\u0000\u0000\u0000\u0524\u0525\u0006\u0095\u0019"+ + "\u0000\u0525\u013b\u0001\u0000\u0000\u0000\u0526\u0527\u0003B\u0019\u0000"+ + "\u0527\u0528\u0001\u0000\u0000\u0000\u0528\u0529\u0006\u0096\u000b\u0000"+ + "\u0529\u013d\u0001\u0000\u0000\u0000\u052a\u052b\u0003D\u001a\u0000\u052b"+ + "\u052c\u0001\u0000\u0000\u0000\u052c\u052d\u0006\u0097\u000b\u0000\u052d"+ + "\u013f\u0001\u0000\u0000\u0000\u052e\u052f\u0003F\u001b\u0000\u052f\u0530"+ + "\u0001\u0000\u0000\u0000\u0530\u0531\u0006\u0098\u000b\u0000\u0531\u0141"+ + "\u0001\u0000\u0000\u0000\u0532\u0533\u0003H\u001c\u0000\u0533\u0534\u0001"+ + "\u0000\u0000\u0000\u0534\u0535\u0006\u0099\u0010\u0000\u0535\u0536\u0006"+ + "\u0099\f\u0000\u0536\u0143\u0001\u0000\u0000\u0000\u0537\u0538\u0003t"+ + "2\u0000\u0538\u0539\u0001\u0000\u0000\u0000\u0539\u053a\u0006\u009a\u0017"+ + "\u0000\u053a\u0145\u0001\u0000\u0000\u0000\u053b\u053c\u0004\u009b\u0010"+ + "\u0000\u053c\u053d\u0003\u008c>\u0000\u053d\u053e\u0001\u0000\u0000\u0000"+ + "\u053e\u053f\u0006\u009b\u0018\u0000\u053f\u0147\u0001\u0000\u0000\u0000"+ + "\u0540\u0541\u0004\u009c\u0011\u0000\u0541\u0542\u0003\u00b2Q\u0000\u0542"+ + "\u0543\u0001\u0000\u0000\u0000\u0543\u0544\u0006\u009c\u0019\u0000\u0544"+ + "\u0149\u0001\u0000\u0000\u0000\u0545\u0546\u0003\u00bcV\u0000\u0546\u0547"+ + "\u0001\u0000\u0000\u0000\u0547\u0548\u0006\u009d\u001f\u0000\u0548\u014b"+ + "\u0001\u0000\u0000\u0000\u0549\u054a\u0003\u00b8T\u0000\u054a\u054b\u0001"+ + "\u0000\u0000\u0000\u054b\u054c\u0006\u009e \u0000\u054c\u014d\u0001\u0000"+ + "\u0000\u0000\u054d\u054e\u0003B\u0019\u0000\u054e\u054f\u0001\u0000\u0000"+ + "\u0000\u054f\u0550\u0006\u009f\u000b\u0000\u0550\u014f\u0001\u0000\u0000"+ + "\u0000\u0551\u0552\u0003D\u001a\u0000\u0552\u0553\u0001\u0000\u0000\u0000"+ + "\u0553\u0554\u0006\u00a0\u000b\u0000\u0554\u0151\u0001\u0000\u0000\u0000"+ + "\u0555\u0556\u0003F\u001b\u0000\u0556\u0557\u0001\u0000\u0000\u0000\u0557"+ + "\u0558\u0006\u00a1\u000b\u0000\u0558\u0153\u0001\u0000\u0000\u0000\u0559"+ + "\u055a\u0003H\u001c\u0000\u055a\u055b\u0001\u0000\u0000\u0000\u055b\u055c"+ + "\u0006\u00a2\u0010\u0000\u055c\u055d\u0006\u00a2\f\u0000\u055d\u0155\u0001"+ + "\u0000\u0000\u0000\u055e\u055f\u0007\u0001\u0000\u0000\u055f\u0560\u0007"+ + "\t\u0000\u0000\u0560\u0561\u0007\u000f\u0000\u0000\u0561\u0562\u0007\u0007"+ + "\u0000\u0000\u0562\u0157\u0001\u0000\u0000\u0000\u0563\u0564\u0003B\u0019"+ + "\u0000\u0564\u0565\u0001\u0000\u0000\u0000\u0565\u0566\u0006\u00a4\u000b"+ + "\u0000\u0566\u0159\u0001\u0000\u0000\u0000\u0567\u0568\u0003D\u001a\u0000"+ + "\u0568\u0569\u0001\u0000\u0000\u0000\u0569\u056a\u0006\u00a5\u000b\u0000"+ + "\u056a\u015b\u0001\u0000\u0000\u0000\u056b\u056c\u0003F\u001b\u0000\u056c"+ + "\u056d\u0001\u0000\u0000\u0000\u056d\u056e\u0006\u00a6\u000b\u0000\u056e"+ + "\u015d\u0001\u0000\u0000\u0000\u056f\u0570\u0003\u00b6S\u0000\u0570\u0571"+ + "\u0001\u0000\u0000\u0000\u0571\u0572\u0006\u00a7\u0011\u0000\u0572\u0573"+ + "\u0006\u00a7\f\u0000\u0573\u015f\u0001\u0000\u0000\u0000\u0574\u0575\u0003"+ + "n/\u0000\u0575\u0576\u0001\u0000\u0000\u0000\u0576\u0577\u0006\u00a8\u0012"+ + "\u0000\u0577\u0161\u0001\u0000\u0000\u0000\u0578\u057e\u0003T\"\u0000"+ + "\u0579\u057e\u0003J\u001d\u0000\u057a\u057e\u0003t2\u0000\u057b\u057e"+ + "\u0003L\u001e\u0000\u057c\u057e\u0003Z%\u0000\u057d\u0578\u0001\u0000"+ + "\u0000\u0000\u057d\u0579\u0001\u0000\u0000\u0000\u057d\u057a\u0001\u0000"+ + "\u0000\u0000\u057d\u057b\u0001\u0000\u0000\u0000\u057d\u057c\u0001\u0000"+ + "\u0000\u0000\u057e\u057f\u0001\u0000\u0000\u0000\u057f\u057d\u0001\u0000"+ + "\u0000\u0000\u057f\u0580\u0001\u0000\u0000\u0000\u0580\u0163\u0001\u0000"+ + "\u0000\u0000\u0581\u0582\u0003B\u0019\u0000\u0582\u0583\u0001\u0000\u0000"+ + "\u0000\u0583\u0584\u0006\u00aa\u000b\u0000\u0584\u0165\u0001\u0000\u0000"+ + "\u0000\u0585\u0586\u0003D\u001a\u0000\u0586\u0587\u0001\u0000\u0000\u0000"+ + "\u0587\u0588\u0006\u00ab\u000b\u0000\u0588\u0167\u0001\u0000\u0000\u0000"+ + "\u0589\u058a\u0003F\u001b\u0000\u058a\u058b\u0001\u0000\u0000\u0000\u058b"+ + "\u058c\u0006\u00ac\u000b\u0000\u058c\u0169\u0001\u0000\u0000\u0000\u058d"+ + "\u058e\u0003H\u001c\u0000\u058e\u058f\u0001\u0000\u0000\u0000\u058f\u0590"+ + "\u0006\u00ad\u0010\u0000\u0590\u0591\u0006\u00ad\f\u0000\u0591\u016b\u0001"+ + "\u0000\u0000\u0000\u0592\u0593\u0003n/\u0000\u0593\u0594\u0001\u0000\u0000"+ + "\u0000\u0594\u0595\u0006\u00ae\u0012\u0000\u0595\u016d\u0001\u0000\u0000"+ + "\u0000\u0596\u0597\u0003p0\u0000\u0597\u0598\u0001\u0000\u0000\u0000\u0598"+ + "\u0599\u0006\u00af\u0013\u0000\u0599\u016f\u0001\u0000\u0000\u0000\u059a"+ + "\u059b\u0003t2\u0000\u059b\u059c\u0001\u0000\u0000\u0000\u059c\u059d\u0006"+ + "\u00b0\u0017\u0000\u059d\u0171\u0001\u0000\u0000\u0000\u059e\u059f\u0003"+ + "\u011a\u0085\u0000\u059f\u05a0\u0001\u0000\u0000\u0000\u05a0\u05a1\u0006"+ + "\u00b1!\u0000\u05a1\u05a2\u0006\u00b1\"\u0000\u05a2\u0173\u0001\u0000"+ + "\u0000\u0000\u05a3\u05a4\u0003\u00deg\u0000\u05a4\u05a5\u0001\u0000\u0000"+ + "\u0000\u05a5\u05a6\u0006\u00b2\u0015\u0000\u05a6\u0175\u0001\u0000\u0000"+ + "\u0000\u05a7\u05a8\u0003^\'\u0000\u05a8\u05a9\u0001\u0000\u0000\u0000"+ + "\u05a9\u05aa\u0006\u00b3\u0016\u0000\u05aa\u0177\u0001\u0000\u0000\u0000"+ + "\u05ab\u05ac\u0003B\u0019\u0000\u05ac\u05ad\u0001\u0000\u0000\u0000\u05ad"+ + "\u05ae\u0006\u00b4\u000b\u0000\u05ae\u0179\u0001\u0000\u0000\u0000\u05af"+ + "\u05b0\u0003D\u001a\u0000\u05b0\u05b1\u0001\u0000\u0000\u0000\u05b1\u05b2"+ + "\u0006\u00b5\u000b\u0000\u05b2\u017b\u0001\u0000\u0000\u0000\u05b3\u05b4"+ + "\u0003F\u001b\u0000\u05b4\u05b5\u0001\u0000\u0000\u0000\u05b5\u05b6\u0006"+ + "\u00b6\u000b\u0000\u05b6\u017d\u0001\u0000\u0000\u0000\u05b7\u05b8\u0003"+ + "H\u001c\u0000\u05b8\u05b9\u0001\u0000\u0000\u0000\u05b9\u05ba\u0006\u00b7"+ + "\u0010\u0000\u05ba\u05bb\u0006\u00b7\f\u0000\u05bb\u05bc\u0006\u00b7\f"+ + "\u0000\u05bc\u017f\u0001\u0000\u0000\u0000\u05bd\u05be\u0003p0\u0000\u05be"+ + "\u05bf\u0001\u0000\u0000\u0000\u05bf\u05c0\u0006\u00b8\u0013\u0000\u05c0"+ + "\u0181\u0001\u0000\u0000\u0000\u05c1\u05c2\u0003t2\u0000\u05c2\u05c3\u0001"+ + "\u0000\u0000\u0000\u05c3\u05c4\u0006\u00b9\u0017\u0000\u05c4\u0183\u0001"+ + "\u0000\u0000\u0000\u05c5\u05c6\u0003\u00f8t\u0000\u05c6\u05c7\u0001\u0000"+ + "\u0000\u0000\u05c7\u05c8\u0006\u00ba\u001a\u0000\u05c8\u0185\u0001\u0000"+ + "\u0000\u0000\u05c9\u05ca\u0003B\u0019\u0000\u05ca\u05cb\u0001\u0000\u0000"+ + "\u0000\u05cb\u05cc\u0006\u00bb\u000b\u0000\u05cc\u0187\u0001\u0000\u0000"+ + "\u0000\u05cd\u05ce\u0003D\u001a\u0000\u05ce\u05cf\u0001\u0000\u0000\u0000"+ + "\u05cf\u05d0\u0006\u00bc\u000b\u0000\u05d0\u0189\u0001\u0000\u0000\u0000"+ + "\u05d1\u05d2\u0003F\u001b\u0000\u05d2\u05d3\u0001\u0000\u0000\u0000\u05d3"+ + "\u05d4\u0006\u00bd\u000b\u0000\u05d4\u018b\u0001\u0000\u0000\u0000\u05d5"+ + "\u05d6\u0003H\u001c\u0000\u05d6\u05d7\u0001\u0000\u0000\u0000\u05d7\u05d8"+ + "\u0006\u00be\u0010\u0000\u05d8\u05d9\u0006\u00be\f\u0000\u05d9\u018d\u0001"+ + "\u0000\u0000\u0000\u05da\u05db\u00036\u0013\u0000\u05db\u05dc\u0001\u0000"+ + "\u0000\u0000\u05dc\u05dd\u0006\u00bf#\u0000\u05dd\u018f\u0001\u0000\u0000"+ + "\u0000\u05de\u05df\u0003\u010c~\u0000\u05df\u05e0\u0001\u0000\u0000\u0000"+ + "\u05e0\u05e1\u0006\u00c0$\u0000\u05e1\u0191\u0001\u0000\u0000\u0000\u05e2"+ + "\u05e3\u0003\u011a\u0085\u0000\u05e3\u05e4\u0001\u0000\u0000\u0000\u05e4"+ + "\u05e5\u0006\u00c1!\u0000\u05e5\u05e6\u0006\u00c1\f\u0000\u05e6\u05e7"+ + "\u0006\u00c1\u0000\u0000\u05e7\u0193\u0001\u0000\u0000\u0000\u05e8\u05e9"+ + "\u0007\u0014\u0000\u0000\u05e9\u05ea\u0007\u0002\u0000\u0000\u05ea\u05eb"+ + "\u0007\u0001\u0000\u0000\u05eb\u05ec\u0007\t\u0000\u0000\u05ec\u05ed\u0007"+ + "\u0011\u0000\u0000\u05ed\u05ee\u0001\u0000\u0000\u0000\u05ee\u05ef\u0006"+ + "\u00c2\f\u0000\u05ef\u05f0\u0006\u00c2\u0000\u0000\u05f0\u0195\u0001\u0000"+ + "\u0000\u0000\u05f1\u05f2\u0003\u00deg\u0000\u05f2\u05f3\u0001\u0000\u0000"+ + "\u0000\u05f3\u05f4\u0006\u00c3\u0015\u0000\u05f4\u0197\u0001\u0000\u0000"+ + "\u0000\u05f5\u05f6\u0003^\'\u0000\u05f6\u05f7\u0001\u0000\u0000\u0000"+ + "\u05f7\u05f8\u0006\u00c4\u0016\u0000\u05f8\u0199\u0001\u0000\u0000\u0000"+ + "\u05f9\u05fa\u0003n/\u0000\u05fa\u05fb\u0001\u0000\u0000\u0000\u05fb\u05fc"+ + "\u0006\u00c5\u0012\u0000\u05fc\u019b\u0001\u0000\u0000\u0000\u05fd\u05fe"+ + "\u0003\u00b8T\u0000\u05fe\u05ff\u0001\u0000\u0000\u0000\u05ff\u0600\u0006"+ + "\u00c6 \u0000\u0600\u019d\u0001\u0000\u0000\u0000\u0601\u0602\u0003\u00bc"+ + "V\u0000\u0602\u0603\u0001\u0000\u0000\u0000\u0603\u0604\u0006\u00c7\u001f"+ + "\u0000\u0604\u019f\u0001\u0000\u0000\u0000\u0605\u0606\u0003B\u0019\u0000"+ + "\u0606\u0607\u0001\u0000\u0000\u0000\u0607\u0608\u0006\u00c8\u000b\u0000"+ + "\u0608\u01a1\u0001\u0000\u0000\u0000\u0609\u060a\u0003D\u001a\u0000\u060a"+ + "\u060b\u0001\u0000\u0000\u0000\u060b\u060c\u0006\u00c9\u000b\u0000\u060c"+ + "\u01a3\u0001\u0000\u0000\u0000\u060d\u060e\u0003F\u001b\u0000\u060e\u060f"+ + "\u0001\u0000\u0000\u0000\u060f\u0610\u0006\u00ca\u000b\u0000\u0610\u01a5"+ + "\u0001\u0000\u0000\u0000\u0611\u0612\u0003H\u001c\u0000\u0612\u0613\u0001"+ + "\u0000\u0000\u0000\u0613\u0614\u0006\u00cb\u0010\u0000\u0614\u0615\u0006"+ + "\u00cb\f\u0000\u0615\u01a7\u0001\u0000\u0000\u0000\u0616\u0617\u0003\u00de"+ + "g\u0000\u0617\u0618\u0001\u0000\u0000\u0000\u0618\u0619\u0006\u00cc\u0015"+ + "\u0000\u0619\u061a\u0006\u00cc\f\u0000\u061a\u061b\u0006\u00cc%\u0000"+ + "\u061b\u01a9\u0001\u0000\u0000\u0000\u061c\u061d\u0003^\'\u0000\u061d"+ + "\u061e\u0001\u0000\u0000\u0000\u061e\u061f\u0006\u00cd\u0016\u0000\u061f"+ + "\u0620\u0006\u00cd\f\u0000\u0620\u0621\u0006\u00cd%\u0000\u0621\u01ab"+ + "\u0001\u0000\u0000\u0000\u0622\u0623\u0003B\u0019\u0000\u0623\u0624\u0001"+ + "\u0000\u0000\u0000\u0624\u0625\u0006\u00ce\u000b\u0000\u0625\u01ad\u0001"+ + "\u0000\u0000\u0000\u0626\u0627\u0003D\u001a\u0000\u0627\u0628\u0001\u0000"+ + "\u0000\u0000\u0628\u0629\u0006\u00cf\u000b\u0000\u0629\u01af\u0001\u0000"+ + "\u0000\u0000\u062a\u062b\u0003F\u001b\u0000\u062b\u062c\u0001\u0000\u0000"+ + "\u0000\u062c\u062d\u0006\u00d0\u000b\u0000\u062d\u01b1\u0001\u0000\u0000"+ + "\u0000\u062e\u062f\u0003n/\u0000\u062f\u0630\u0001\u0000\u0000\u0000\u0630"+ + "\u0631\u0006\u00d1\u0012\u0000\u0631\u0632\u0006\u00d1\f\u0000\u0632\u0633"+ + "\u0006\u00d1\t\u0000\u0633\u01b3\u0001\u0000\u0000\u0000\u0634\u0635\u0003"+ + "p0\u0000\u0635\u0636\u0001\u0000\u0000\u0000\u0636\u0637\u0006\u00d2\u0013"+ + "\u0000\u0637\u0638\u0006\u00d2\f\u0000\u0638\u0639\u0006\u00d2\t\u0000"+ + "\u0639\u01b5\u0001\u0000\u0000\u0000\u063a\u063b\u0003B\u0019\u0000\u063b"+ + "\u063c\u0001\u0000\u0000\u0000\u063c\u063d\u0006\u00d3\u000b\u0000\u063d"+ + "\u01b7\u0001\u0000\u0000\u0000\u063e\u063f\u0003D\u001a\u0000\u063f\u0640"+ + "\u0001\u0000\u0000\u0000\u0640\u0641\u0006\u00d4\u000b\u0000\u0641\u01b9"+ + "\u0001\u0000\u0000\u0000\u0642\u0643\u0003F\u001b\u0000\u0643\u0644\u0001"+ + "\u0000\u0000\u0000\u0644\u0645\u0006\u00d5\u000b\u0000\u0645\u01bb\u0001"+ + "\u0000\u0000\u0000\u0646\u0647\u0003\u00bcV\u0000\u0647\u0648\u0001\u0000"+ + "\u0000\u0000\u0648\u0649\u0006\u00d6\f\u0000\u0649\u064a\u0006\u00d6\u0000"+ + "\u0000\u064a\u064b\u0006\u00d6\u001f\u0000\u064b\u01bd\u0001\u0000\u0000"+ + "\u0000\u064c\u064d\u0003\u00b8T\u0000\u064d\u064e\u0001\u0000\u0000\u0000"+ + "\u064e\u064f\u0006\u00d7\f\u0000\u064f\u0650\u0006\u00d7\u0000\u0000\u0650"+ + "\u0651\u0006\u00d7 \u0000\u0651\u01bf\u0001\u0000\u0000\u0000\u0652\u0653"+ + "\u0003d*\u0000\u0653\u0654\u0001\u0000\u0000\u0000\u0654\u0655\u0006\u00d8"+ + "\f\u0000\u0655\u0656\u0006\u00d8\u0000\u0000\u0656\u0657\u0006\u00d8&"+ + "\u0000\u0657\u01c1\u0001\u0000\u0000\u0000\u0658\u0659\u0003H\u001c\u0000"+ + "\u0659\u065a\u0001\u0000\u0000\u0000\u065a\u065b\u0006\u00d9\u0010\u0000"+ + "\u065b\u065c\u0006\u00d9\f\u0000\u065c\u01c3\u0001\u0000\u0000\u0000B"+ + "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e"+ + "\u000f\u0299\u02a3\u02a7\u02aa\u02b3\u02b5\u02c0\u02d3\u02d8\u02e1\u02e8"+ + "\u02ed\u02ef\u02fa\u0302\u0305\u0307\u030c\u0311\u0317\u031e\u0323\u0329"+ + "\u032c\u0334\u0338\u03be\u03c3\u03ca\u03cc\u03dc\u03e1\u03e6\u03e8\u03ee"+ + "\u043b\u0440\u0471\u0475\u047a\u047f\u0484\u0486\u048a\u048c\u04e3\u04e7"+ + "\u04ec\u057d\u057f\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006\u0000"+ + "\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005"+ + "\t\u0000\u0005\u000b\u0000\u0005\u000e\u0000\u0005\r\u0000\u0000\u0001"+ + "\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007H\u0000\u0005\u0000\u0000"+ + "\u0007\u001d\u0000\u0007I\u0000\u0007&\u0000\u0007\'\u0000\u0007$\u0000"+ + "\u0007S\u0000\u0007\u001e\u0000\u0007)\u0000\u00075\u0000\u0007G\u0000"+ + "\u0007W\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007a\u0000\u0007`\u0000"+ + "\u0007K\u0000\u0007J\u0000\u0007_\u0000\u0005\f\u0000\u0007\u0014\u0000"+ + "\u0007[\u0000\u0005\u000f\u0000\u0007!\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index c9db129e08ba2..9bed77ff31168 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -334,4 +334,4 @@ joinPredicate atn: -[4, 1, 130, 651, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 207, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 215, 8, 5, 10, 5, 12, 5, 218, 9, 5, 1, 6, 1, 6, 3, 6, 222, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 229, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 234, 8, 6, 1, 7, 1, 7, 1, 7, 3, 7, 239, 8, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 249, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 255, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 263, 8, 9, 10, 9, 12, 9, 266, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 276, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 281, 8, 10, 10, 10, 12, 10, 284, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 292, 8, 11, 10, 11, 12, 11, 295, 9, 11, 1, 11, 1, 11, 3, 11, 299, 8, 11, 3, 11, 301, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 312, 8, 13, 10, 13, 12, 13, 315, 9, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 5, 17, 331, 8, 17, 10, 17, 12, 17, 334, 9, 17, 1, 18, 1, 18, 1, 18, 3, 18, 339, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 347, 8, 19, 10, 19, 12, 19, 350, 9, 19, 1, 19, 3, 19, 353, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 358, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 370, 8, 23, 10, 23, 12, 23, 373, 9, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 379, 8, 24, 10, 24, 12, 24, 382, 9, 24, 1, 24, 3, 24, 385, 8, 24, 1, 24, 1, 24, 3, 24, 389, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 396, 8, 26, 1, 26, 1, 26, 3, 26, 400, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 405, 8, 27, 10, 27, 12, 27, 408, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 413, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 418, 8, 29, 10, 29, 12, 29, 421, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 426, 8, 30, 10, 30, 12, 30, 429, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 434, 8, 31, 10, 31, 12, 31, 437, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 444, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 459, 8, 34, 10, 34, 12, 34, 462, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 470, 8, 34, 10, 34, 12, 34, 473, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 481, 8, 34, 10, 34, 12, 34, 484, 9, 34, 1, 34, 1, 34, 3, 34, 488, 8, 34, 1, 35, 1, 35, 3, 35, 492, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 497, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 506, 8, 38, 10, 38, 12, 38, 509, 9, 38, 1, 39, 1, 39, 3, 39, 513, 8, 39, 1, 39, 1, 39, 3, 39, 517, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 529, 8, 42, 10, 42, 12, 42, 532, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 542, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 554, 8, 47, 10, 47, 12, 47, 557, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 567, 8, 50, 1, 51, 3, 51, 570, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 575, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 597, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 603, 8, 58, 10, 58, 12, 58, 606, 9, 58, 3, 58, 608, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 613, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 626, 8, 61, 1, 62, 3, 62, 629, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 638, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 644, 8, 64, 10, 64, 12, 64, 647, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 83, 83, 1, 0, 74, 75, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 678, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 206, 1, 0, 0, 0, 12, 233, 1, 0, 0, 0, 14, 235, 1, 0, 0, 0, 16, 248, 1, 0, 0, 0, 18, 254, 1, 0, 0, 0, 20, 275, 1, 0, 0, 0, 22, 285, 1, 0, 0, 0, 24, 304, 1, 0, 0, 0, 26, 306, 1, 0, 0, 0, 28, 318, 1, 0, 0, 0, 30, 322, 1, 0, 0, 0, 32, 324, 1, 0, 0, 0, 34, 327, 1, 0, 0, 0, 36, 338, 1, 0, 0, 0, 38, 342, 1, 0, 0, 0, 40, 357, 1, 0, 0, 0, 42, 361, 1, 0, 0, 0, 44, 363, 1, 0, 0, 0, 46, 365, 1, 0, 0, 0, 48, 374, 1, 0, 0, 0, 50, 390, 1, 0, 0, 0, 52, 393, 1, 0, 0, 0, 54, 401, 1, 0, 0, 0, 56, 409, 1, 0, 0, 0, 58, 414, 1, 0, 0, 0, 60, 422, 1, 0, 0, 0, 62, 430, 1, 0, 0, 0, 64, 438, 1, 0, 0, 0, 66, 443, 1, 0, 0, 0, 68, 487, 1, 0, 0, 0, 70, 491, 1, 0, 0, 0, 72, 496, 1, 0, 0, 0, 74, 498, 1, 0, 0, 0, 76, 501, 1, 0, 0, 0, 78, 510, 1, 0, 0, 0, 80, 518, 1, 0, 0, 0, 82, 521, 1, 0, 0, 0, 84, 524, 1, 0, 0, 0, 86, 533, 1, 0, 0, 0, 88, 537, 1, 0, 0, 0, 90, 543, 1, 0, 0, 0, 92, 547, 1, 0, 0, 0, 94, 550, 1, 0, 0, 0, 96, 558, 1, 0, 0, 0, 98, 562, 1, 0, 0, 0, 100, 566, 1, 0, 0, 0, 102, 569, 1, 0, 0, 0, 104, 574, 1, 0, 0, 0, 106, 578, 1, 0, 0, 0, 108, 580, 1, 0, 0, 0, 110, 582, 1, 0, 0, 0, 112, 585, 1, 0, 0, 0, 114, 589, 1, 0, 0, 0, 116, 592, 1, 0, 0, 0, 118, 612, 1, 0, 0, 0, 120, 616, 1, 0, 0, 0, 122, 621, 1, 0, 0, 0, 124, 628, 1, 0, 0, 0, 126, 634, 1, 0, 0, 0, 128, 639, 1, 0, 0, 0, 130, 648, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 29, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 38, 19, 0, 148, 153, 3, 32, 16, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 207, 3, 10, 5, 8, 180, 207, 3, 16, 8, 0, 181, 207, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 207, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 207, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 177, 1, 0, 0, 0, 206, 180, 1, 0, 0, 0, 206, 181, 1, 0, 0, 0, 206, 182, 1, 0, 0, 0, 206, 198, 1, 0, 0, 0, 206, 205, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 5, 0, 0, 209, 210, 5, 34, 0, 0, 210, 215, 3, 10, 5, 6, 211, 212, 10, 4, 0, 0, 212, 213, 5, 52, 0, 0, 213, 215, 3, 10, 5, 5, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 221, 3, 16, 8, 0, 220, 222, 5, 49, 0, 0, 221, 220, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 5, 47, 0, 0, 224, 225, 3, 106, 53, 0, 225, 234, 1, 0, 0, 0, 226, 228, 3, 16, 8, 0, 227, 229, 5, 49, 0, 0, 228, 227, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 5, 54, 0, 0, 231, 232, 3, 106, 53, 0, 232, 234, 1, 0, 0, 0, 233, 219, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 234, 13, 1, 0, 0, 0, 235, 238, 3, 58, 29, 0, 236, 237, 5, 37, 0, 0, 237, 239, 3, 30, 15, 0, 238, 236, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 241, 5, 38, 0, 0, 241, 242, 3, 68, 34, 0, 242, 15, 1, 0, 0, 0, 243, 249, 3, 18, 9, 0, 244, 245, 3, 18, 9, 0, 245, 246, 3, 108, 54, 0, 246, 247, 3, 18, 9, 0, 247, 249, 1, 0, 0, 0, 248, 243, 1, 0, 0, 0, 248, 244, 1, 0, 0, 0, 249, 17, 1, 0, 0, 0, 250, 251, 6, 9, -1, 0, 251, 255, 3, 20, 10, 0, 252, 253, 7, 0, 0, 0, 253, 255, 3, 18, 9, 3, 254, 250, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 255, 264, 1, 0, 0, 0, 256, 257, 10, 2, 0, 0, 257, 258, 7, 1, 0, 0, 258, 263, 3, 18, 9, 3, 259, 260, 10, 1, 0, 0, 260, 261, 7, 0, 0, 0, 261, 263, 3, 18, 9, 2, 262, 256, 1, 0, 0, 0, 262, 259, 1, 0, 0, 0, 263, 266, 1, 0, 0, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 19, 1, 0, 0, 0, 266, 264, 1, 0, 0, 0, 267, 268, 6, 10, -1, 0, 268, 276, 3, 68, 34, 0, 269, 276, 3, 58, 29, 0, 270, 276, 3, 22, 11, 0, 271, 272, 5, 48, 0, 0, 272, 273, 3, 10, 5, 0, 273, 274, 5, 55, 0, 0, 274, 276, 1, 0, 0, 0, 275, 267, 1, 0, 0, 0, 275, 269, 1, 0, 0, 0, 275, 270, 1, 0, 0, 0, 275, 271, 1, 0, 0, 0, 276, 282, 1, 0, 0, 0, 277, 278, 10, 1, 0, 0, 278, 279, 5, 37, 0, 0, 279, 281, 3, 30, 15, 0, 280, 277, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 282, 283, 1, 0, 0, 0, 283, 21, 1, 0, 0, 0, 284, 282, 1, 0, 0, 0, 285, 286, 3, 24, 12, 0, 286, 300, 5, 48, 0, 0, 287, 301, 5, 66, 0, 0, 288, 293, 3, 10, 5, 0, 289, 290, 5, 39, 0, 0, 290, 292, 3, 10, 5, 0, 291, 289, 1, 0, 0, 0, 292, 295, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 298, 1, 0, 0, 0, 295, 293, 1, 0, 0, 0, 296, 297, 5, 39, 0, 0, 297, 299, 3, 26, 13, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 301, 1, 0, 0, 0, 300, 287, 1, 0, 0, 0, 300, 288, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 303, 5, 55, 0, 0, 303, 23, 1, 0, 0, 0, 304, 305, 3, 72, 36, 0, 305, 25, 1, 0, 0, 0, 306, 307, 4, 13, 10, 0, 307, 308, 5, 69, 0, 0, 308, 313, 3, 28, 14, 0, 309, 310, 5, 39, 0, 0, 310, 312, 3, 28, 14, 0, 311, 309, 1, 0, 0, 0, 312, 315, 1, 0, 0, 0, 313, 311, 1, 0, 0, 0, 313, 314, 1, 0, 0, 0, 314, 316, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 316, 317, 5, 70, 0, 0, 317, 27, 1, 0, 0, 0, 318, 319, 3, 106, 53, 0, 319, 320, 5, 38, 0, 0, 320, 321, 3, 68, 34, 0, 321, 29, 1, 0, 0, 0, 322, 323, 3, 64, 32, 0, 323, 31, 1, 0, 0, 0, 324, 325, 5, 12, 0, 0, 325, 326, 3, 34, 17, 0, 326, 33, 1, 0, 0, 0, 327, 332, 3, 36, 18, 0, 328, 329, 5, 39, 0, 0, 329, 331, 3, 36, 18, 0, 330, 328, 1, 0, 0, 0, 331, 334, 1, 0, 0, 0, 332, 330, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 35, 1, 0, 0, 0, 334, 332, 1, 0, 0, 0, 335, 336, 3, 58, 29, 0, 336, 337, 5, 36, 0, 0, 337, 339, 1, 0, 0, 0, 338, 335, 1, 0, 0, 0, 338, 339, 1, 0, 0, 0, 339, 340, 1, 0, 0, 0, 340, 341, 3, 10, 5, 0, 341, 37, 1, 0, 0, 0, 342, 343, 5, 6, 0, 0, 343, 348, 3, 40, 20, 0, 344, 345, 5, 39, 0, 0, 345, 347, 3, 40, 20, 0, 346, 344, 1, 0, 0, 0, 347, 350, 1, 0, 0, 0, 348, 346, 1, 0, 0, 0, 348, 349, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351, 353, 3, 46, 23, 0, 352, 351, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 39, 1, 0, 0, 0, 354, 355, 3, 42, 21, 0, 355, 356, 5, 38, 0, 0, 356, 358, 1, 0, 0, 0, 357, 354, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 360, 3, 44, 22, 0, 360, 41, 1, 0, 0, 0, 361, 362, 5, 83, 0, 0, 362, 43, 1, 0, 0, 0, 363, 364, 7, 2, 0, 0, 364, 45, 1, 0, 0, 0, 365, 366, 5, 82, 0, 0, 366, 371, 5, 83, 0, 0, 367, 368, 5, 39, 0, 0, 368, 370, 5, 83, 0, 0, 369, 367, 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 371, 372, 1, 0, 0, 0, 372, 47, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 374, 375, 5, 19, 0, 0, 375, 380, 3, 40, 20, 0, 376, 377, 5, 39, 0, 0, 377, 379, 3, 40, 20, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 385, 3, 54, 27, 0, 384, 383, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 388, 1, 0, 0, 0, 386, 387, 5, 33, 0, 0, 387, 389, 3, 34, 17, 0, 388, 386, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 49, 1, 0, 0, 0, 390, 391, 5, 4, 0, 0, 391, 392, 3, 34, 17, 0, 392, 51, 1, 0, 0, 0, 393, 395, 5, 15, 0, 0, 394, 396, 3, 54, 27, 0, 395, 394, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 399, 1, 0, 0, 0, 397, 398, 5, 33, 0, 0, 398, 400, 3, 34, 17, 0, 399, 397, 1, 0, 0, 0, 399, 400, 1, 0, 0, 0, 400, 53, 1, 0, 0, 0, 401, 406, 3, 56, 28, 0, 402, 403, 5, 39, 0, 0, 403, 405, 3, 56, 28, 0, 404, 402, 1, 0, 0, 0, 405, 408, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 406, 407, 1, 0, 0, 0, 407, 55, 1, 0, 0, 0, 408, 406, 1, 0, 0, 0, 409, 412, 3, 36, 18, 0, 410, 411, 5, 16, 0, 0, 411, 413, 3, 10, 5, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 57, 1, 0, 0, 0, 414, 419, 3, 72, 36, 0, 415, 416, 5, 41, 0, 0, 416, 418, 3, 72, 36, 0, 417, 415, 1, 0, 0, 0, 418, 421, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 419, 420, 1, 0, 0, 0, 420, 59, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 422, 427, 3, 66, 33, 0, 423, 424, 5, 41, 0, 0, 424, 426, 3, 66, 33, 0, 425, 423, 1, 0, 0, 0, 426, 429, 1, 0, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 61, 1, 0, 0, 0, 429, 427, 1, 0, 0, 0, 430, 435, 3, 60, 30, 0, 431, 432, 5, 39, 0, 0, 432, 434, 3, 60, 30, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 63, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 7, 3, 0, 0, 439, 65, 1, 0, 0, 0, 440, 444, 5, 87, 0, 0, 441, 442, 4, 33, 11, 0, 442, 444, 3, 70, 35, 0, 443, 440, 1, 0, 0, 0, 443, 441, 1, 0, 0, 0, 444, 67, 1, 0, 0, 0, 445, 488, 5, 50, 0, 0, 446, 447, 3, 104, 52, 0, 447, 448, 5, 74, 0, 0, 448, 488, 1, 0, 0, 0, 449, 488, 3, 102, 51, 0, 450, 488, 3, 104, 52, 0, 451, 488, 3, 98, 49, 0, 452, 488, 3, 70, 35, 0, 453, 488, 3, 106, 53, 0, 454, 455, 5, 72, 0, 0, 455, 460, 3, 100, 50, 0, 456, 457, 5, 39, 0, 0, 457, 459, 3, 100, 50, 0, 458, 456, 1, 0, 0, 0, 459, 462, 1, 0, 0, 0, 460, 458, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 463, 1, 0, 0, 0, 462, 460, 1, 0, 0, 0, 463, 464, 5, 73, 0, 0, 464, 488, 1, 0, 0, 0, 465, 466, 5, 72, 0, 0, 466, 471, 3, 98, 49, 0, 467, 468, 5, 39, 0, 0, 468, 470, 3, 98, 49, 0, 469, 467, 1, 0, 0, 0, 470, 473, 1, 0, 0, 0, 471, 469, 1, 0, 0, 0, 471, 472, 1, 0, 0, 0, 472, 474, 1, 0, 0, 0, 473, 471, 1, 0, 0, 0, 474, 475, 5, 73, 0, 0, 475, 488, 1, 0, 0, 0, 476, 477, 5, 72, 0, 0, 477, 482, 3, 106, 53, 0, 478, 479, 5, 39, 0, 0, 479, 481, 3, 106, 53, 0, 480, 478, 1, 0, 0, 0, 481, 484, 1, 0, 0, 0, 482, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 485, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 485, 486, 5, 73, 0, 0, 486, 488, 1, 0, 0, 0, 487, 445, 1, 0, 0, 0, 487, 446, 1, 0, 0, 0, 487, 449, 1, 0, 0, 0, 487, 450, 1, 0, 0, 0, 487, 451, 1, 0, 0, 0, 487, 452, 1, 0, 0, 0, 487, 453, 1, 0, 0, 0, 487, 454, 1, 0, 0, 0, 487, 465, 1, 0, 0, 0, 487, 476, 1, 0, 0, 0, 488, 69, 1, 0, 0, 0, 489, 492, 5, 53, 0, 0, 490, 492, 5, 71, 0, 0, 491, 489, 1, 0, 0, 0, 491, 490, 1, 0, 0, 0, 492, 71, 1, 0, 0, 0, 493, 497, 3, 64, 32, 0, 494, 495, 4, 36, 12, 0, 495, 497, 3, 70, 35, 0, 496, 493, 1, 0, 0, 0, 496, 494, 1, 0, 0, 0, 497, 73, 1, 0, 0, 0, 498, 499, 5, 9, 0, 0, 499, 500, 5, 31, 0, 0, 500, 75, 1, 0, 0, 0, 501, 502, 5, 14, 0, 0, 502, 507, 3, 78, 39, 0, 503, 504, 5, 39, 0, 0, 504, 506, 3, 78, 39, 0, 505, 503, 1, 0, 0, 0, 506, 509, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 77, 1, 0, 0, 0, 509, 507, 1, 0, 0, 0, 510, 512, 3, 10, 5, 0, 511, 513, 7, 4, 0, 0, 512, 511, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 516, 1, 0, 0, 0, 514, 515, 5, 51, 0, 0, 515, 517, 7, 5, 0, 0, 516, 514, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 79, 1, 0, 0, 0, 518, 519, 5, 8, 0, 0, 519, 520, 3, 62, 31, 0, 520, 81, 1, 0, 0, 0, 521, 522, 5, 2, 0, 0, 522, 523, 3, 62, 31, 0, 523, 83, 1, 0, 0, 0, 524, 525, 5, 11, 0, 0, 525, 530, 3, 86, 43, 0, 526, 527, 5, 39, 0, 0, 527, 529, 3, 86, 43, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 85, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 534, 3, 60, 30, 0, 534, 535, 5, 91, 0, 0, 535, 536, 3, 60, 30, 0, 536, 87, 1, 0, 0, 0, 537, 538, 5, 1, 0, 0, 538, 539, 3, 20, 10, 0, 539, 541, 3, 106, 53, 0, 540, 542, 3, 94, 47, 0, 541, 540, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 89, 1, 0, 0, 0, 543, 544, 5, 7, 0, 0, 544, 545, 3, 20, 10, 0, 545, 546, 3, 106, 53, 0, 546, 91, 1, 0, 0, 0, 547, 548, 5, 10, 0, 0, 548, 549, 3, 58, 29, 0, 549, 93, 1, 0, 0, 0, 550, 555, 3, 96, 48, 0, 551, 552, 5, 39, 0, 0, 552, 554, 3, 96, 48, 0, 553, 551, 1, 0, 0, 0, 554, 557, 1, 0, 0, 0, 555, 553, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 95, 1, 0, 0, 0, 557, 555, 1, 0, 0, 0, 558, 559, 3, 64, 32, 0, 559, 560, 5, 36, 0, 0, 560, 561, 3, 68, 34, 0, 561, 97, 1, 0, 0, 0, 562, 563, 7, 6, 0, 0, 563, 99, 1, 0, 0, 0, 564, 567, 3, 102, 51, 0, 565, 567, 3, 104, 52, 0, 566, 564, 1, 0, 0, 0, 566, 565, 1, 0, 0, 0, 567, 101, 1, 0, 0, 0, 568, 570, 7, 0, 0, 0, 569, 568, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 5, 32, 0, 0, 572, 103, 1, 0, 0, 0, 573, 575, 7, 0, 0, 0, 574, 573, 1, 0, 0, 0, 574, 575, 1, 0, 0, 0, 575, 576, 1, 0, 0, 0, 576, 577, 5, 31, 0, 0, 577, 105, 1, 0, 0, 0, 578, 579, 5, 30, 0, 0, 579, 107, 1, 0, 0, 0, 580, 581, 7, 7, 0, 0, 581, 109, 1, 0, 0, 0, 582, 583, 5, 5, 0, 0, 583, 584, 3, 112, 56, 0, 584, 111, 1, 0, 0, 0, 585, 586, 5, 72, 0, 0, 586, 587, 3, 2, 1, 0, 587, 588, 5, 73, 0, 0, 588, 113, 1, 0, 0, 0, 589, 590, 5, 13, 0, 0, 590, 591, 5, 107, 0, 0, 591, 115, 1, 0, 0, 0, 592, 593, 5, 3, 0, 0, 593, 596, 5, 97, 0, 0, 594, 595, 5, 95, 0, 0, 595, 597, 3, 60, 30, 0, 596, 594, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 607, 1, 0, 0, 0, 598, 599, 5, 96, 0, 0, 599, 604, 3, 118, 59, 0, 600, 601, 5, 39, 0, 0, 601, 603, 3, 118, 59, 0, 602, 600, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 608, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 598, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 608, 117, 1, 0, 0, 0, 609, 610, 3, 60, 30, 0, 610, 611, 5, 36, 0, 0, 611, 613, 1, 0, 0, 0, 612, 609, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 3, 60, 30, 0, 615, 119, 1, 0, 0, 0, 616, 617, 5, 18, 0, 0, 617, 618, 3, 40, 20, 0, 618, 619, 5, 95, 0, 0, 619, 620, 3, 62, 31, 0, 620, 121, 1, 0, 0, 0, 621, 622, 5, 17, 0, 0, 622, 625, 3, 54, 27, 0, 623, 624, 5, 33, 0, 0, 624, 626, 3, 34, 17, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 123, 1, 0, 0, 0, 627, 629, 7, 8, 0, 0, 628, 627, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 631, 5, 20, 0, 0, 631, 632, 3, 126, 63, 0, 632, 633, 3, 128, 64, 0, 633, 125, 1, 0, 0, 0, 634, 637, 3, 64, 32, 0, 635, 636, 5, 91, 0, 0, 636, 638, 3, 64, 32, 0, 637, 635, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 127, 1, 0, 0, 0, 639, 640, 5, 95, 0, 0, 640, 645, 3, 130, 65, 0, 641, 642, 5, 39, 0, 0, 642, 644, 3, 130, 65, 0, 643, 641, 1, 0, 0, 0, 644, 647, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 129, 1, 0, 0, 0, 647, 645, 1, 0, 0, 0, 648, 649, 3, 16, 8, 0, 649, 131, 1, 0, 0, 0, 63, 143, 152, 172, 184, 193, 201, 206, 214, 216, 221, 228, 233, 238, 248, 254, 262, 264, 275, 282, 293, 298, 300, 313, 332, 338, 348, 352, 357, 371, 380, 384, 388, 395, 399, 406, 412, 419, 427, 435, 443, 460, 471, 482, 487, 491, 496, 507, 512, 516, 530, 541, 555, 566, 569, 574, 596, 604, 607, 612, 625, 628, 637, 645] \ No newline at end of file +[4, 1, 130, 651, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 207, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 215, 8, 5, 10, 5, 12, 5, 218, 9, 5, 1, 6, 1, 6, 3, 6, 222, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 229, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 234, 8, 6, 1, 7, 1, 7, 1, 7, 3, 7, 239, 8, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 249, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 255, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 263, 8, 9, 10, 9, 12, 9, 266, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 276, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 281, 8, 10, 10, 10, 12, 10, 284, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 292, 8, 11, 10, 11, 12, 11, 295, 9, 11, 1, 11, 1, 11, 3, 11, 299, 8, 11, 3, 11, 301, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 312, 8, 13, 10, 13, 12, 13, 315, 9, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 5, 17, 331, 8, 17, 10, 17, 12, 17, 334, 9, 17, 1, 18, 1, 18, 1, 18, 3, 18, 339, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 347, 8, 19, 10, 19, 12, 19, 350, 9, 19, 1, 19, 3, 19, 353, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 358, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 370, 8, 23, 10, 23, 12, 23, 373, 9, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 379, 8, 24, 10, 24, 12, 24, 382, 9, 24, 1, 24, 3, 24, 385, 8, 24, 1, 24, 1, 24, 3, 24, 389, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 396, 8, 26, 1, 26, 1, 26, 3, 26, 400, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 405, 8, 27, 10, 27, 12, 27, 408, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 413, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 418, 8, 29, 10, 29, 12, 29, 421, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 426, 8, 30, 10, 30, 12, 30, 429, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 434, 8, 31, 10, 31, 12, 31, 437, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 444, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 459, 8, 34, 10, 34, 12, 34, 462, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 470, 8, 34, 10, 34, 12, 34, 473, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 481, 8, 34, 10, 34, 12, 34, 484, 9, 34, 1, 34, 1, 34, 3, 34, 488, 8, 34, 1, 35, 1, 35, 3, 35, 492, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 497, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 506, 8, 38, 10, 38, 12, 38, 509, 9, 38, 1, 39, 1, 39, 3, 39, 513, 8, 39, 1, 39, 1, 39, 3, 39, 517, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 529, 8, 42, 10, 42, 12, 42, 532, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 542, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 554, 8, 47, 10, 47, 12, 47, 557, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 567, 8, 50, 1, 51, 3, 51, 570, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 575, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 597, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 603, 8, 58, 10, 58, 12, 58, 606, 9, 58, 3, 58, 608, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 613, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 626, 8, 61, 1, 62, 3, 62, 629, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 638, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 644, 8, 64, 10, 64, 12, 64, 647, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 83, 83, 1, 0, 74, 75, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 678, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 206, 1, 0, 0, 0, 12, 233, 1, 0, 0, 0, 14, 235, 1, 0, 0, 0, 16, 248, 1, 0, 0, 0, 18, 254, 1, 0, 0, 0, 20, 275, 1, 0, 0, 0, 22, 285, 1, 0, 0, 0, 24, 304, 1, 0, 0, 0, 26, 306, 1, 0, 0, 0, 28, 318, 1, 0, 0, 0, 30, 322, 1, 0, 0, 0, 32, 324, 1, 0, 0, 0, 34, 327, 1, 0, 0, 0, 36, 338, 1, 0, 0, 0, 38, 342, 1, 0, 0, 0, 40, 357, 1, 0, 0, 0, 42, 361, 1, 0, 0, 0, 44, 363, 1, 0, 0, 0, 46, 365, 1, 0, 0, 0, 48, 374, 1, 0, 0, 0, 50, 390, 1, 0, 0, 0, 52, 393, 1, 0, 0, 0, 54, 401, 1, 0, 0, 0, 56, 409, 1, 0, 0, 0, 58, 414, 1, 0, 0, 0, 60, 422, 1, 0, 0, 0, 62, 430, 1, 0, 0, 0, 64, 438, 1, 0, 0, 0, 66, 443, 1, 0, 0, 0, 68, 487, 1, 0, 0, 0, 70, 491, 1, 0, 0, 0, 72, 496, 1, 0, 0, 0, 74, 498, 1, 0, 0, 0, 76, 501, 1, 0, 0, 0, 78, 510, 1, 0, 0, 0, 80, 518, 1, 0, 0, 0, 82, 521, 1, 0, 0, 0, 84, 524, 1, 0, 0, 0, 86, 533, 1, 0, 0, 0, 88, 537, 1, 0, 0, 0, 90, 543, 1, 0, 0, 0, 92, 547, 1, 0, 0, 0, 94, 550, 1, 0, 0, 0, 96, 558, 1, 0, 0, 0, 98, 562, 1, 0, 0, 0, 100, 566, 1, 0, 0, 0, 102, 569, 1, 0, 0, 0, 104, 574, 1, 0, 0, 0, 106, 578, 1, 0, 0, 0, 108, 580, 1, 0, 0, 0, 110, 582, 1, 0, 0, 0, 112, 585, 1, 0, 0, 0, 114, 589, 1, 0, 0, 0, 116, 592, 1, 0, 0, 0, 118, 612, 1, 0, 0, 0, 120, 616, 1, 0, 0, 0, 122, 621, 1, 0, 0, 0, 124, 628, 1, 0, 0, 0, 126, 634, 1, 0, 0, 0, 128, 639, 1, 0, 0, 0, 130, 648, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 29, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 38, 19, 0, 148, 153, 3, 32, 16, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 207, 3, 10, 5, 8, 180, 207, 3, 16, 8, 0, 181, 207, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 207, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 207, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 177, 1, 0, 0, 0, 206, 180, 1, 0, 0, 0, 206, 181, 1, 0, 0, 0, 206, 182, 1, 0, 0, 0, 206, 198, 1, 0, 0, 0, 206, 205, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 5, 0, 0, 209, 210, 5, 34, 0, 0, 210, 215, 3, 10, 5, 6, 211, 212, 10, 4, 0, 0, 212, 213, 5, 52, 0, 0, 213, 215, 3, 10, 5, 5, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 221, 3, 16, 8, 0, 220, 222, 5, 49, 0, 0, 221, 220, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 5, 47, 0, 0, 224, 225, 3, 106, 53, 0, 225, 234, 1, 0, 0, 0, 226, 228, 3, 16, 8, 0, 227, 229, 5, 49, 0, 0, 228, 227, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 5, 54, 0, 0, 231, 232, 3, 106, 53, 0, 232, 234, 1, 0, 0, 0, 233, 219, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 234, 13, 1, 0, 0, 0, 235, 238, 3, 58, 29, 0, 236, 237, 5, 37, 0, 0, 237, 239, 3, 30, 15, 0, 238, 236, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 241, 5, 38, 0, 0, 241, 242, 3, 68, 34, 0, 242, 15, 1, 0, 0, 0, 243, 249, 3, 18, 9, 0, 244, 245, 3, 18, 9, 0, 245, 246, 3, 108, 54, 0, 246, 247, 3, 18, 9, 0, 247, 249, 1, 0, 0, 0, 248, 243, 1, 0, 0, 0, 248, 244, 1, 0, 0, 0, 249, 17, 1, 0, 0, 0, 250, 251, 6, 9, -1, 0, 251, 255, 3, 20, 10, 0, 252, 253, 7, 0, 0, 0, 253, 255, 3, 18, 9, 3, 254, 250, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 255, 264, 1, 0, 0, 0, 256, 257, 10, 2, 0, 0, 257, 258, 7, 1, 0, 0, 258, 263, 3, 18, 9, 3, 259, 260, 10, 1, 0, 0, 260, 261, 7, 0, 0, 0, 261, 263, 3, 18, 9, 2, 262, 256, 1, 0, 0, 0, 262, 259, 1, 0, 0, 0, 263, 266, 1, 0, 0, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 19, 1, 0, 0, 0, 266, 264, 1, 0, 0, 0, 267, 268, 6, 10, -1, 0, 268, 276, 3, 68, 34, 0, 269, 276, 3, 58, 29, 0, 270, 276, 3, 22, 11, 0, 271, 272, 5, 48, 0, 0, 272, 273, 3, 10, 5, 0, 273, 274, 5, 55, 0, 0, 274, 276, 1, 0, 0, 0, 275, 267, 1, 0, 0, 0, 275, 269, 1, 0, 0, 0, 275, 270, 1, 0, 0, 0, 275, 271, 1, 0, 0, 0, 276, 282, 1, 0, 0, 0, 277, 278, 10, 1, 0, 0, 278, 279, 5, 37, 0, 0, 279, 281, 3, 30, 15, 0, 280, 277, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 282, 283, 1, 0, 0, 0, 283, 21, 1, 0, 0, 0, 284, 282, 1, 0, 0, 0, 285, 286, 3, 24, 12, 0, 286, 300, 5, 48, 0, 0, 287, 301, 5, 66, 0, 0, 288, 293, 3, 10, 5, 0, 289, 290, 5, 39, 0, 0, 290, 292, 3, 10, 5, 0, 291, 289, 1, 0, 0, 0, 292, 295, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 298, 1, 0, 0, 0, 295, 293, 1, 0, 0, 0, 296, 297, 5, 39, 0, 0, 297, 299, 3, 26, 13, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 301, 1, 0, 0, 0, 300, 287, 1, 0, 0, 0, 300, 288, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 303, 5, 55, 0, 0, 303, 23, 1, 0, 0, 0, 304, 305, 3, 72, 36, 0, 305, 25, 1, 0, 0, 0, 306, 307, 4, 13, 10, 0, 307, 308, 5, 69, 0, 0, 308, 313, 3, 28, 14, 0, 309, 310, 5, 39, 0, 0, 310, 312, 3, 28, 14, 0, 311, 309, 1, 0, 0, 0, 312, 315, 1, 0, 0, 0, 313, 311, 1, 0, 0, 0, 313, 314, 1, 0, 0, 0, 314, 316, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 316, 317, 5, 70, 0, 0, 317, 27, 1, 0, 0, 0, 318, 319, 3, 106, 53, 0, 319, 320, 5, 38, 0, 0, 320, 321, 3, 68, 34, 0, 321, 29, 1, 0, 0, 0, 322, 323, 3, 64, 32, 0, 323, 31, 1, 0, 0, 0, 324, 325, 5, 12, 0, 0, 325, 326, 3, 34, 17, 0, 326, 33, 1, 0, 0, 0, 327, 332, 3, 36, 18, 0, 328, 329, 5, 39, 0, 0, 329, 331, 3, 36, 18, 0, 330, 328, 1, 0, 0, 0, 331, 334, 1, 0, 0, 0, 332, 330, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 35, 1, 0, 0, 0, 334, 332, 1, 0, 0, 0, 335, 336, 3, 58, 29, 0, 336, 337, 5, 36, 0, 0, 337, 339, 1, 0, 0, 0, 338, 335, 1, 0, 0, 0, 338, 339, 1, 0, 0, 0, 339, 340, 1, 0, 0, 0, 340, 341, 3, 10, 5, 0, 341, 37, 1, 0, 0, 0, 342, 343, 5, 6, 0, 0, 343, 348, 3, 40, 20, 0, 344, 345, 5, 39, 0, 0, 345, 347, 3, 40, 20, 0, 346, 344, 1, 0, 0, 0, 347, 350, 1, 0, 0, 0, 348, 346, 1, 0, 0, 0, 348, 349, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351, 353, 3, 46, 23, 0, 352, 351, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 39, 1, 0, 0, 0, 354, 355, 3, 42, 21, 0, 355, 356, 5, 38, 0, 0, 356, 358, 1, 0, 0, 0, 357, 354, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 360, 3, 44, 22, 0, 360, 41, 1, 0, 0, 0, 361, 362, 5, 83, 0, 0, 362, 43, 1, 0, 0, 0, 363, 364, 7, 2, 0, 0, 364, 45, 1, 0, 0, 0, 365, 366, 5, 82, 0, 0, 366, 371, 5, 83, 0, 0, 367, 368, 5, 39, 0, 0, 368, 370, 5, 83, 0, 0, 369, 367, 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 371, 372, 1, 0, 0, 0, 372, 47, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 374, 375, 5, 19, 0, 0, 375, 380, 3, 40, 20, 0, 376, 377, 5, 39, 0, 0, 377, 379, 3, 40, 20, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 385, 3, 54, 27, 0, 384, 383, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 388, 1, 0, 0, 0, 386, 387, 5, 33, 0, 0, 387, 389, 3, 34, 17, 0, 388, 386, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 49, 1, 0, 0, 0, 390, 391, 5, 4, 0, 0, 391, 392, 3, 34, 17, 0, 392, 51, 1, 0, 0, 0, 393, 395, 5, 15, 0, 0, 394, 396, 3, 54, 27, 0, 395, 394, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 399, 1, 0, 0, 0, 397, 398, 5, 33, 0, 0, 398, 400, 3, 34, 17, 0, 399, 397, 1, 0, 0, 0, 399, 400, 1, 0, 0, 0, 400, 53, 1, 0, 0, 0, 401, 406, 3, 56, 28, 0, 402, 403, 5, 39, 0, 0, 403, 405, 3, 56, 28, 0, 404, 402, 1, 0, 0, 0, 405, 408, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 406, 407, 1, 0, 0, 0, 407, 55, 1, 0, 0, 0, 408, 406, 1, 0, 0, 0, 409, 412, 3, 36, 18, 0, 410, 411, 5, 16, 0, 0, 411, 413, 3, 10, 5, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 57, 1, 0, 0, 0, 414, 419, 3, 72, 36, 0, 415, 416, 5, 41, 0, 0, 416, 418, 3, 72, 36, 0, 417, 415, 1, 0, 0, 0, 418, 421, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 419, 420, 1, 0, 0, 0, 420, 59, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 422, 427, 3, 66, 33, 0, 423, 424, 5, 41, 0, 0, 424, 426, 3, 66, 33, 0, 425, 423, 1, 0, 0, 0, 426, 429, 1, 0, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 61, 1, 0, 0, 0, 429, 427, 1, 0, 0, 0, 430, 435, 3, 60, 30, 0, 431, 432, 5, 39, 0, 0, 432, 434, 3, 60, 30, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 63, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 7, 3, 0, 0, 439, 65, 1, 0, 0, 0, 440, 444, 5, 87, 0, 0, 441, 442, 4, 33, 11, 0, 442, 444, 3, 70, 35, 0, 443, 440, 1, 0, 0, 0, 443, 441, 1, 0, 0, 0, 444, 67, 1, 0, 0, 0, 445, 488, 5, 50, 0, 0, 446, 447, 3, 104, 52, 0, 447, 448, 5, 74, 0, 0, 448, 488, 1, 0, 0, 0, 449, 488, 3, 102, 51, 0, 450, 488, 3, 104, 52, 0, 451, 488, 3, 98, 49, 0, 452, 488, 3, 70, 35, 0, 453, 488, 3, 106, 53, 0, 454, 455, 5, 72, 0, 0, 455, 460, 3, 100, 50, 0, 456, 457, 5, 39, 0, 0, 457, 459, 3, 100, 50, 0, 458, 456, 1, 0, 0, 0, 459, 462, 1, 0, 0, 0, 460, 458, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 463, 1, 0, 0, 0, 462, 460, 1, 0, 0, 0, 463, 464, 5, 73, 0, 0, 464, 488, 1, 0, 0, 0, 465, 466, 5, 72, 0, 0, 466, 471, 3, 98, 49, 0, 467, 468, 5, 39, 0, 0, 468, 470, 3, 98, 49, 0, 469, 467, 1, 0, 0, 0, 470, 473, 1, 0, 0, 0, 471, 469, 1, 0, 0, 0, 471, 472, 1, 0, 0, 0, 472, 474, 1, 0, 0, 0, 473, 471, 1, 0, 0, 0, 474, 475, 5, 73, 0, 0, 475, 488, 1, 0, 0, 0, 476, 477, 5, 72, 0, 0, 477, 482, 3, 106, 53, 0, 478, 479, 5, 39, 0, 0, 479, 481, 3, 106, 53, 0, 480, 478, 1, 0, 0, 0, 481, 484, 1, 0, 0, 0, 482, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 485, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 485, 486, 5, 73, 0, 0, 486, 488, 1, 0, 0, 0, 487, 445, 1, 0, 0, 0, 487, 446, 1, 0, 0, 0, 487, 449, 1, 0, 0, 0, 487, 450, 1, 0, 0, 0, 487, 451, 1, 0, 0, 0, 487, 452, 1, 0, 0, 0, 487, 453, 1, 0, 0, 0, 487, 454, 1, 0, 0, 0, 487, 465, 1, 0, 0, 0, 487, 476, 1, 0, 0, 0, 488, 69, 1, 0, 0, 0, 489, 492, 5, 53, 0, 0, 490, 492, 5, 71, 0, 0, 491, 489, 1, 0, 0, 0, 491, 490, 1, 0, 0, 0, 492, 71, 1, 0, 0, 0, 493, 497, 3, 64, 32, 0, 494, 495, 4, 36, 12, 0, 495, 497, 3, 70, 35, 0, 496, 493, 1, 0, 0, 0, 496, 494, 1, 0, 0, 0, 497, 73, 1, 0, 0, 0, 498, 499, 5, 9, 0, 0, 499, 500, 5, 31, 0, 0, 500, 75, 1, 0, 0, 0, 501, 502, 5, 14, 0, 0, 502, 507, 3, 78, 39, 0, 503, 504, 5, 39, 0, 0, 504, 506, 3, 78, 39, 0, 505, 503, 1, 0, 0, 0, 506, 509, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 77, 1, 0, 0, 0, 509, 507, 1, 0, 0, 0, 510, 512, 3, 10, 5, 0, 511, 513, 7, 4, 0, 0, 512, 511, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 516, 1, 0, 0, 0, 514, 515, 5, 51, 0, 0, 515, 517, 7, 5, 0, 0, 516, 514, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 79, 1, 0, 0, 0, 518, 519, 5, 8, 0, 0, 519, 520, 3, 62, 31, 0, 520, 81, 1, 0, 0, 0, 521, 522, 5, 2, 0, 0, 522, 523, 3, 62, 31, 0, 523, 83, 1, 0, 0, 0, 524, 525, 5, 11, 0, 0, 525, 530, 3, 86, 43, 0, 526, 527, 5, 39, 0, 0, 527, 529, 3, 86, 43, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 85, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 534, 3, 60, 30, 0, 534, 535, 5, 91, 0, 0, 535, 536, 3, 60, 30, 0, 536, 87, 1, 0, 0, 0, 537, 538, 5, 1, 0, 0, 538, 539, 3, 20, 10, 0, 539, 541, 3, 106, 53, 0, 540, 542, 3, 94, 47, 0, 541, 540, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 89, 1, 0, 0, 0, 543, 544, 5, 7, 0, 0, 544, 545, 3, 20, 10, 0, 545, 546, 3, 106, 53, 0, 546, 91, 1, 0, 0, 0, 547, 548, 5, 10, 0, 0, 548, 549, 3, 58, 29, 0, 549, 93, 1, 0, 0, 0, 550, 555, 3, 96, 48, 0, 551, 552, 5, 39, 0, 0, 552, 554, 3, 96, 48, 0, 553, 551, 1, 0, 0, 0, 554, 557, 1, 0, 0, 0, 555, 553, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 95, 1, 0, 0, 0, 557, 555, 1, 0, 0, 0, 558, 559, 3, 64, 32, 0, 559, 560, 5, 36, 0, 0, 560, 561, 3, 68, 34, 0, 561, 97, 1, 0, 0, 0, 562, 563, 7, 6, 0, 0, 563, 99, 1, 0, 0, 0, 564, 567, 3, 102, 51, 0, 565, 567, 3, 104, 52, 0, 566, 564, 1, 0, 0, 0, 566, 565, 1, 0, 0, 0, 567, 101, 1, 0, 0, 0, 568, 570, 7, 0, 0, 0, 569, 568, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 5, 32, 0, 0, 572, 103, 1, 0, 0, 0, 573, 575, 7, 0, 0, 0, 574, 573, 1, 0, 0, 0, 574, 575, 1, 0, 0, 0, 575, 576, 1, 0, 0, 0, 576, 577, 5, 31, 0, 0, 577, 105, 1, 0, 0, 0, 578, 579, 5, 30, 0, 0, 579, 107, 1, 0, 0, 0, 580, 581, 7, 7, 0, 0, 581, 109, 1, 0, 0, 0, 582, 583, 5, 5, 0, 0, 583, 584, 3, 112, 56, 0, 584, 111, 1, 0, 0, 0, 585, 586, 5, 72, 0, 0, 586, 587, 3, 2, 1, 0, 587, 588, 5, 73, 0, 0, 588, 113, 1, 0, 0, 0, 589, 590, 5, 13, 0, 0, 590, 591, 5, 107, 0, 0, 591, 115, 1, 0, 0, 0, 592, 593, 5, 3, 0, 0, 593, 596, 5, 97, 0, 0, 594, 595, 5, 95, 0, 0, 595, 597, 3, 60, 30, 0, 596, 594, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 607, 1, 0, 0, 0, 598, 599, 5, 96, 0, 0, 599, 604, 3, 118, 59, 0, 600, 601, 5, 39, 0, 0, 601, 603, 3, 118, 59, 0, 602, 600, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 608, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 598, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 608, 117, 1, 0, 0, 0, 609, 610, 3, 60, 30, 0, 610, 611, 5, 36, 0, 0, 611, 613, 1, 0, 0, 0, 612, 609, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 3, 60, 30, 0, 615, 119, 1, 0, 0, 0, 616, 617, 5, 18, 0, 0, 617, 618, 3, 40, 20, 0, 618, 619, 5, 95, 0, 0, 619, 620, 3, 62, 31, 0, 620, 121, 1, 0, 0, 0, 621, 622, 5, 17, 0, 0, 622, 625, 3, 54, 27, 0, 623, 624, 5, 33, 0, 0, 624, 626, 3, 34, 17, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 123, 1, 0, 0, 0, 627, 629, 7, 8, 0, 0, 628, 627, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 631, 5, 20, 0, 0, 631, 632, 3, 126, 63, 0, 632, 633, 3, 128, 64, 0, 633, 125, 1, 0, 0, 0, 634, 637, 3, 40, 20, 0, 635, 636, 5, 91, 0, 0, 636, 638, 3, 64, 32, 0, 637, 635, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 127, 1, 0, 0, 0, 639, 640, 5, 95, 0, 0, 640, 645, 3, 130, 65, 0, 641, 642, 5, 39, 0, 0, 642, 644, 3, 130, 65, 0, 643, 641, 1, 0, 0, 0, 644, 647, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 129, 1, 0, 0, 0, 647, 645, 1, 0, 0, 0, 648, 649, 3, 16, 8, 0, 649, 131, 1, 0, 0, 0, 63, 143, 152, 172, 184, 193, 201, 206, 214, 216, 221, 228, 233, 238, 248, 254, 262, 264, 275, 282, 293, 298, 300, 313, 332, 338, 348, 352, 357, 371, 380, 384, 388, 395, 399, 406, 412, 419, 427, 435, 443, 460, 471, 482, 487, 491, 496, 507, 512, 516, 530, 541, 555, 566, 569, 574, 596, 604, 607, 612, 625, 628, 637, 645] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index beb14e1588472..35ace5a34f73f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -5540,15 +5540,15 @@ public final JoinCommandContext joinCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class JoinTargetContext extends ParserRuleContext { - public IdentifierContext index; + public IndexPatternContext index; public IdentifierContext alias; - public List identifier() { - return getRuleContexts(IdentifierContext.class); - } - public IdentifierContext identifier(int i) { - return getRuleContext(IdentifierContext.class,i); + public IndexPatternContext indexPattern() { + return getRuleContext(IndexPatternContext.class,0); } public TerminalNode AS() { return getToken(EsqlBaseParser.AS, 0); } + public IdentifierContext identifier() { + return getRuleContext(IdentifierContext.class,0); + } @SuppressWarnings("this-escape") public JoinTargetContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -5577,7 +5577,7 @@ public final JoinTargetContext joinTarget() throws RecognitionException { enterOuterAlt(_localctx, 1); { setState(634); - ((JoinTargetContext)_localctx).index = identifier(); + ((JoinTargetContext)_localctx).index = indexPattern(); setState(637); _errHandler.sync(this); _la = _input.LA(1); @@ -6216,7 +6216,7 @@ private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _loca "\u0000\u0000\u0274\u0273\u0001\u0000\u0000\u0000\u0274\u0275\u0001\u0000"+ "\u0000\u0000\u0275\u0276\u0001\u0000\u0000\u0000\u0276\u0277\u0005\u0014"+ "\u0000\u0000\u0277\u0278\u0003~?\u0000\u0278\u0279\u0003\u0080@\u0000"+ - "\u0279}\u0001\u0000\u0000\u0000\u027a\u027d\u0003@ \u0000\u027b\u027c"+ + "\u0279}\u0001\u0000\u0000\u0000\u027a\u027d\u0003(\u0014\u0000\u027b\u027c"+ "\u0005[\u0000\u0000\u027c\u027e\u0003@ \u0000\u027d\u027b\u0001\u0000"+ "\u0000\u0000\u027d\u027e\u0001\u0000\u0000\u0000\u027e\u007f\u0001\u0000"+ "\u0000\u0000\u027f\u0280\u0005_\u0000\u0000\u0280\u0285\u0003\u0082A\u0000"+ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index cc1ac6ac31385..e7e3527f6b4aa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -523,9 +523,14 @@ public PlanFactory visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { } var target = ctx.joinTarget(); + var rightPattern = visitIndexPattern(List.of(target.index)); + if (rightPattern.contains(WILDCARD)) { + throw new ParsingException(source(target), "invalid index pattern [{}], * is not allowed in LOOKUP JOIN", rightPattern); + } + UnresolvedRelation right = new UnresolvedRelation( source(target), - new TableIdentifier(source(target.index), null, visitIdentifier(target.index)), + new TableIdentifier(source(target.index), null, rightPattern), false, emptyList(), IndexMode.LOOKUP, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index ed1ee71ff1968..89150d6a52534 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -268,7 +268,7 @@ public final void test() throws Throwable { ); assumeFalse( "lookup join disabled for csv tests", - testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V11.capabilityName()) + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName()) ); assumeFalse( "can't use TERM function in csv tests", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java new file mode 100644 index 0000000000000..a1ae1f43ef877 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql; + +import org.elasticsearch.test.ESTestCase; + +public class IdentifierGenerator { + + /** + * Generate random identifier that could be used as a column name + */ + public static String randomIdentifier() { + return ESTestCase.randomIdentifier(); + } + + /** + * Generates one or several coma separated index patterns + */ + public static String randomIndexPatterns(Feature... features) { + return maybeQuote(String.join(",", ESTestCase.randomList(1, 5, () -> randomIndexPattern(features)))); + } + + /** + * Generates a random valid index pattern. + * You may force list of features to be included or excluded using the arguments, eg {@code randomIndexPattern(PATTERN, not(HIDDEN))}. + * Identifier could be an index or alias. It might be hidden or remote or use a pattern. + * See @link valid index patterns + */ + public static String randomIndexPattern(Feature... features) { + var validFirstCharacters = "abcdefghijklmnopqrstuvwxyz0123456789!'$^&"; + var validCharacters = validFirstCharacters + "+-_."; + + var index = new StringBuilder(); + if (canAdd(Features.HIDDEN_INDEX, features)) { + index.append('.'); + } + index.append(randomCharacterFrom(validFirstCharacters)); + for (int i = 0; i < ESTestCase.randomIntBetween(1, 100); i++) { + index.append(randomCharacterFrom(validCharacters)); + } + if (canAdd(Features.WILDCARD_PATTERN, features)) { + if (ESTestCase.randomBoolean()) { + index.append('*'); + } else { + index.insert(ESTestCase.randomIntBetween(0, index.length() - 1), '*'); + } + } else if (canAdd(Features.DATE_MATH, features)) { + // https://www.elastic.co/guide/en/elasticsearch/reference/8.17/api-conventions.html#api-date-math-index-names + index.insert(0, "<"); + index.append("-{now/"); + index.append(ESTestCase.randomFrom("d", "M", "M-1M")); + if (ESTestCase.randomBoolean()) { + index.append("{").append(ESTestCase.randomFrom("yyyy.MM", "yyyy.MM.dd")).append("}"); + } + index.append("}>"); + } + + var pattern = maybeQuote(index.toString()); + if (canAdd(Features.CROSS_CLUSTER, features)) { + var cluster = randomIdentifier(); + pattern = maybeQuote(cluster + ":" + pattern); + } + return pattern; + } + + private static char randomCharacterFrom(String str) { + return str.charAt(ESTestCase.randomInt(str.length() - 1)); + } + + public interface Feature {} + + public enum Features implements Feature { + CROSS_CLUSTER, + WILDCARD_PATTERN, + DATE_MATH, + HIDDEN_INDEX + } + + private record ExcludedFeature(Feature feature) implements Feature {} + + public static Feature without(Feature feature) { + return new ExcludedFeature(feature); + } + + private static boolean canAdd(Feature feature, Feature... features) { + for (var f : features) { + if (f.equals(feature)) { + return true; + } + if (f.equals(without(feature))) { + return false; + } + } + return ESTestCase.randomBoolean(); + } + + public static String maybeQuote(String term) { + if (term.contains("\"")) { + return term; + } + return switch (ESTestCase.randomIntBetween(0, 5)) { + case 0 -> "\"" + term + "\""; + case 1 -> "\"\"\"" + term + "\"\"\""; + default -> term;// no quotes are more likely + }; + } + + public static String unquoteIndexPattern(String term) { + return term.replace("\"\"\"", "").replace("\"", ""); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 1c3b3a5c463e7..48366282e4e10 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -2143,7 +2143,7 @@ public void testLookupMatchTypeWrong() { } public void testLookupJoinUnknownIndex() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); String errorMessage = "Unknown index [foobar]"; IndexResolution missingLookupIndex = IndexResolution.invalid(errorMessage); @@ -2172,7 +2172,7 @@ public void testLookupJoinUnknownIndex() { } public void testLookupJoinUnknownField() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); String query = "FROM test | LOOKUP JOIN languages_lookup ON last_name"; String errorMessage = "1:45: Unknown column [last_name] in right side of join"; @@ -2195,7 +2195,7 @@ public void testLookupJoinUnknownField() { } public void testMultipleLookupJoinsGiveDifferentAttributes() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); // The field attributes that get contributed by different LOOKUP JOIN commands must have different name ids, // even if they have the same names. Otherwise, things like dependency analysis - like in PruneColumns - cannot work based on @@ -2225,7 +2225,7 @@ public void testMultipleLookupJoinsGiveDifferentAttributes() { } public void testLookupJoinIndexMode() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); var indexResolution = AnalyzerTestUtils.expandedDefaultIndexResolution(); var lookupResolution = AnalyzerTestUtils.defaultLookupResolution(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java index 2ee6cf6136114..859e1d788ff06 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java @@ -113,7 +113,7 @@ public void testTooBigQuery() { } public void testJoinOnConstant() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertEquals( "1:55: JOIN ON clause only supports fields at the moment, found [123]", error("row languages = 1, gender = \"f\" | lookup join test on 123") @@ -129,7 +129,7 @@ public void testJoinOnConstant() { } public void testJoinOnMultipleFields() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertEquals( "1:35: JOIN ON clause only supports one field at the moment, found [2]", error("row languages = 1, gender = \"f\" | lookup join test on gender, languages") @@ -137,7 +137,7 @@ public void testJoinOnMultipleFields() { } public void testJoinTwiceOnTheSameField() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertEquals( "1:35: JOIN ON clause only supports one field at the moment, found [2]", error("row languages = 1, gender = \"f\" | lookup join test on languages, languages") @@ -145,7 +145,7 @@ public void testJoinTwiceOnTheSameField() { } public void testJoinTwiceOnTheSameField_TwoLookups() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertEquals( "1:80: JOIN ON clause only supports one field at the moment, found [2]", error("row languages = 1, gender = \"f\" | lookup join test on languages | eval x = 1 | lookup join test on gender, gender") diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index e3214411698b0..c9950bfd34f2c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1983,7 +1983,7 @@ public void testSortByAggregate() { } public void testLookupJoinDataTypeMismatch() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); query("FROM test | EVAL language_code = languages | LOOKUP JOIN languages_lookup ON language_code"); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 95acc84143614..06a08c2b65936 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -4931,7 +4931,7 @@ public void testPlanSanityCheck() throws Exception { } public void testPlanSanityCheckWithBinaryPlans() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); var plan = optimizedPlan(""" FROM test @@ -6006,7 +6006,7 @@ public void testLookupStats() { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownFilterOnJoinKeyWithRename() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); String query = """ FROM test @@ -6048,7 +6048,7 @@ public void testLookupJoinPushDownFilterOnJoinKeyWithRename() { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownFilterOnLeftSideField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); String query = """ FROM test @@ -6091,7 +6091,7 @@ public void testLookupJoinPushDownFilterOnLeftSideField() { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownDisabledForLookupField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); String query = """ FROM test @@ -6135,7 +6135,7 @@ public void testLookupJoinPushDownDisabledForLookupField() { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#19, language_name{f}#20] */ public void testLookupJoinPushDownSeparatedForConjunctionBetweenLeftAndRightField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); String query = """ FROM test @@ -6186,7 +6186,7 @@ public void testLookupJoinPushDownSeparatedForConjunctionBetweenLeftAndRightFiel * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#19, language_name{f}#20] */ public void testLookupJoinPushDownDisabledForDisjunctionBetweenLeftAndRightField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); String query = """ FROM test @@ -6236,7 +6236,7 @@ public void testLookupJoinPushDownDisabledForDisjunctionBetweenLeftAndRightField * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18] */ public void testLookupJoinKeepNoLookupFields() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); String commandDiscardingFields = randomBoolean() ? "| KEEP languages" : """ | DROP _meta_field, emp_no, first_name, gender, language_code, @@ -6275,7 +6275,7 @@ public void testLookupJoinKeepNoLookupFields() { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#20, language_name{f}#21] */ public void testMultipleLookupShadowing() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); String query = """ FROM test diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 1eb7f43ee72ba..a51ad384d9488 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -2649,7 +2649,7 @@ public void testVerifierOnMissingReferences() { } public void testVerifierOnMissingReferencesWithBinaryPlans() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); // Do not assert serialization: // This will have a LookupJoinExec, which is not serializable because it doesn't leave the coordinator. @@ -7336,7 +7336,7 @@ public void testLookupThenTopN() { } public void testLookupJoinFieldLoading() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); TestDataSource data = dataSetWithLookupIndices(Map.of("lookup_index", List.of("first_name", "foo", "bar", "baz"))); @@ -7413,7 +7413,7 @@ public void testLookupJoinFieldLoading() throws Exception { } public void testLookupJoinFieldLoadingTwoLookups() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); TestDataSource data = dataSetWithLookupIndices( Map.of( @@ -7467,7 +7467,7 @@ public void testLookupJoinFieldLoadingTwoLookups() throws Exception { @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/119082") public void testLookupJoinFieldLoadingTwoLookupsProjectInBetween() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); TestDataSource data = dataSetWithLookupIndices( Map.of( @@ -7508,7 +7508,7 @@ public void testLookupJoinFieldLoadingTwoLookupsProjectInBetween() throws Except @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/118778") public void testLookupJoinFieldLoadingDropAllFields() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); TestDataSource data = dataSetWithLookupIndices(Map.of("lookup_index", List.of("first_name", "foo", "bar", "baz"))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 3b7ae5adcd8b2..ac41c7b0f52bc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -62,6 +62,8 @@ import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; +import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; import java.util.ArrayList; import java.util.HashMap; @@ -76,6 +78,11 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.paramAsIdentifier; import static org.elasticsearch.xpack.esql.EsqlTestUtils.paramAsPattern; import static org.elasticsearch.xpack.esql.EsqlTestUtils.referenceAttribute; +import static org.elasticsearch.xpack.esql.IdentifierGenerator.Features.WILDCARD_PATTERN; +import static org.elasticsearch.xpack.esql.IdentifierGenerator.randomIndexPattern; +import static org.elasticsearch.xpack.esql.IdentifierGenerator.randomIndexPatterns; +import static org.elasticsearch.xpack.esql.IdentifierGenerator.unquoteIndexPattern; +import static org.elasticsearch.xpack.esql.IdentifierGenerator.without; import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; @@ -2939,4 +2946,30 @@ public void testNamedFunctionArgumentWithUnsupportedNamedParameterTypes() { ); } } + + public void testValidJoinPattern() { + var basePattern = randomIndexPatterns(); + var joinPattern = randomIndexPattern(without(WILDCARD_PATTERN)); + var onField = randomIdentifier(); + var type = randomFrom("", "LOOKUP "); + + var plan = statement("FROM " + basePattern + " | " + type + " JOIN " + joinPattern + " ON " + onField); + + var join = as(plan, LookupJoin.class); + assertThat(as(join.left(), UnresolvedRelation.class).table().index(), equalTo(unquoteIndexPattern(basePattern))); + assertThat(as(join.right(), UnresolvedRelation.class).table().index(), equalTo(unquoteIndexPattern(joinPattern))); + + var joinType = as(join.config().type(), JoinTypes.UsingJoinType.class); + assertThat(joinType.columns(), hasSize(1)); + assertThat(as(joinType.columns().getFirst(), UnresolvedAttribute.class).name(), equalTo(onField)); + assertThat(joinType.coreJoin().joinName(), equalTo("LEFT OUTER")); + } + + public void testInvalidJoinPatterns() { + var joinPattern = randomIndexPattern(WILDCARD_PATTERN); + expectError( + "FROM " + randomIndexPatterns() + " | JOIN " + joinPattern + " ON " + randomIdentifier(), + "invalid index pattern [" + unquoteIndexPattern(joinPattern) + "], * is not allowed in LOOKUP JOIN" + ); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java index b1c9030db7a43..e7ea479d199d8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java @@ -1365,7 +1365,7 @@ public void testMetrics() { } public void testLookupJoin() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertFieldNames( "FROM employees | KEEP languages | RENAME languages AS language_code | LOOKUP JOIN languages_lookup ON language_code", Set.of("languages", "languages.*", "language_code", "language_code.*"), @@ -1374,7 +1374,7 @@ public void testLookupJoin() { } public void testLookupJoinKeep() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertFieldNames( """ FROM employees @@ -1388,7 +1388,7 @@ public void testLookupJoinKeep() { } public void testLookupJoinKeepWildcard() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertFieldNames( """ FROM employees @@ -1402,7 +1402,7 @@ public void testLookupJoinKeepWildcard() { } public void testMultiLookupJoin() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1415,7 +1415,7 @@ public void testMultiLookupJoin() { } public void testMultiLookupJoinKeepBefore() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1429,7 +1429,7 @@ public void testMultiLookupJoinKeepBefore() { } public void testMultiLookupJoinKeepBetween() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1454,7 +1454,7 @@ public void testMultiLookupJoinKeepBetween() { } public void testMultiLookupJoinKeepAfter() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1481,7 +1481,7 @@ public void testMultiLookupJoinKeepAfter() { } public void testMultiLookupJoinKeepAfterWildcard() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1495,7 +1495,7 @@ public void testMultiLookupJoinKeepAfterWildcard() { } public void testMultiLookupJoinSameIndex() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1509,7 +1509,7 @@ public void testMultiLookupJoinSameIndex() { } public void testMultiLookupJoinSameIndexKeepBefore() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1524,7 +1524,7 @@ public void testMultiLookupJoinSameIndexKeepBefore() { } public void testMultiLookupJoinSameIndexKeepBetween() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1550,7 +1550,7 @@ public void testMultiLookupJoinSameIndexKeepBetween() { } public void testMultiLookupJoinSameIndexKeepAfter() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); assertFieldNames( """ FROM sample_data diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml index 16125df6384c3..e8c9df0d3287e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml @@ -6,7 +6,7 @@ setup: - method: POST path: /_query parameters: [] - capabilities: [join_lookup_v11] + capabilities: [join_lookup_v12] reason: "uses LOOKUP JOIN" - do: indices.create: @@ -81,7 +81,7 @@ basic: - do: esql.query: body: - query: 'FROM test | SORT key | LOOKUP JOIN `test-lookup-1` ON key | LIMIT 3' + query: 'FROM test | SORT key | LOOKUP JOIN test-lookup-1 ON key | LIMIT 3' - match: {columns.0.name: "key"} - match: {columns.0.type: "long"} @@ -95,7 +95,7 @@ non-lookup index: - do: esql.query: body: - query: 'FROM test-lookup-1 | SORT key | LOOKUP JOIN `test` ON key | LIMIT 3' + query: 'FROM test-lookup-1 | SORT key | LOOKUP JOIN test ON key | LIMIT 3' catch: "bad_request" - match: { error.type: "verification_exception" } @@ -106,7 +106,7 @@ non-lookup index: - do: esql.query: body: - query: 'FROM test | SORT key | LOOKUP JOIN `test-lookup-alias` ON key | LIMIT 3' + query: 'FROM test | SORT key | LOOKUP JOIN test-lookup-alias ON key | LIMIT 3' - match: {columns.0.name: "key"} - match: {columns.0.type: "long"} @@ -120,7 +120,7 @@ alias-repeated-alias: - do: esql.query: body: - query: 'FROM test-lookup-alias | SORT key | LOOKUP JOIN `test-lookup-alias` ON key | LIMIT 3' + query: 'FROM test-lookup-alias | SORT key | LOOKUP JOIN test-lookup-alias ON key | LIMIT 3' - match: {columns.0.name: "key"} - match: {columns.0.type: "long"} @@ -134,7 +134,7 @@ alias-repeated-index: - do: esql.query: body: - query: 'FROM test-lookup-1 | SORT key | LOOKUP JOIN `test-lookup-alias` ON key | LIMIT 3' + query: 'FROM test-lookup-1 | SORT key | LOOKUP JOIN test-lookup-alias ON key | LIMIT 3' - match: {columns.0.name: "key"} - match: {columns.0.type: "long"} @@ -148,7 +148,7 @@ alias-pattern-multiple: - do: esql.query: body: - query: 'FROM test-lookup-1 | LOOKUP JOIN `test-lookup-alias-pattern-multiple` ON key' + query: 'FROM test-lookup-1 | LOOKUP JOIN test-lookup-alias-pattern-multiple ON key' catch: "bad_request" - match: { error.type: "verification_exception" } @@ -159,7 +159,7 @@ alias-pattern-single: - do: esql.query: body: - query: 'FROM test | SORT key | LOOKUP JOIN `test-lookup-alias-pattern-single` ON key | LIMIT 3' + query: 'FROM test | SORT key | LOOKUP JOIN test-lookup-alias-pattern-single ON key | LIMIT 3' - match: {columns.0.name: "key"} - match: {columns.0.type: "long"} @@ -167,3 +167,25 @@ alias-pattern-single: - match: {columns.1.type: "keyword"} - match: {values.0: [1, "cyan"]} - match: {values.1: [2, "yellow"]} + +--- +pattern-multiple: + - do: + esql.query: + body: + query: 'FROM test-lookup-1 | LOOKUP JOIN test-lookup-* ON key' + catch: "bad_request" + + - match: { error.type: "parsing_exception" } + - contains: { error.reason: "line 1:34: invalid index pattern [test-lookup-*], * is not allowed in LOOKUP JOIN" } + +--- +pattern-single: + - do: + esql.query: + body: + query: 'FROM test | SORT key | LOOKUP JOIN test-lookup-1* ON key | LIMIT 3' + catch: "bad_request" + + - match: { error.type: "parsing_exception" } + - contains: { error.reason: "line 1:36: invalid index pattern [test-lookup-1*], * is not allowed in LOOKUP JOIN" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml index 7d1b3a90c6081..ab1ab0ff5d88a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml @@ -6,7 +6,7 @@ setup: - method: POST path: /_query parameters: [] - capabilities: [lookup_join_text, join_lookup_v11] + capabilities: [lookup_join_text, join_lookup_v12] reason: "uses LOOKUP JOIN" - do: indices.create: @@ -69,7 +69,7 @@ keyword-keyword: - do: esql.query: body: - query: 'FROM test | SORT color | LOOKUP JOIN `test-lookup` ON color.keyword | LIMIT 3' + query: 'FROM test | SORT color | LOOKUP JOIN test-lookup ON color.keyword | LIMIT 3' - length: { columns: 4 } - length: { values: 3 } @@ -90,7 +90,7 @@ text-keyword: - do: esql.query: body: - query: 'FROM test | SORT color | RENAME color AS x | EVAL color.keyword = x | LOOKUP JOIN `test-lookup` ON color.keyword | LIMIT 3' + query: 'FROM test | SORT color | RENAME color AS x | EVAL color.keyword = x | LOOKUP JOIN test-lookup ON color.keyword | LIMIT 3' - length: { columns: 5 } - length: { values: 3 } @@ -113,20 +113,20 @@ text-text: - do: esql.query: body: - query: 'FROM test | SORT color | LOOKUP JOIN `test-lookup` ON color | LIMIT 3' + query: 'FROM test | SORT color | LOOKUP JOIN test-lookup ON color | LIMIT 3' catch: "bad_request" - match: { error.type: "verification_exception" } - - contains: { error.reason: "Found 1 problem\nline 1:55: JOIN with right field [color] of type [TEXT] is not supported" } + - contains: { error.reason: "Found 1 problem\nline 1:53: JOIN with right field [color] of type [TEXT] is not supported" } --- keyword-text: - do: esql.query: body: - query: 'FROM test | SORT color | EVAL color = color.keyword | LOOKUP JOIN `test-lookup` ON color | LIMIT 3' + query: 'FROM test | SORT color | EVAL color = color.keyword | LOOKUP JOIN test-lookup ON color | LIMIT 3' catch: "bad_request" - match: { error.type: "verification_exception" } - - contains: { error.reason: "Found 1 problem\nline 1:84: JOIN with right field [color] of type [TEXT] is not supported" } + - contains: { error.reason: "Found 1 problem\nline 1:82: JOIN with right field [color] of type [TEXT] is not supported" } From dc195f4db7f8bd09e0782140f0bf31108103a0da Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Fri, 24 Jan 2025 12:59:23 +0100 Subject: [PATCH 007/383] ES|QL: Fix queries with document level security on lookup indexes (#120617) --- docs/changelog/120617.yaml | 5 + x-pack/plugin/build.gradle | 6 + .../xpack/esql/EsqlSecurityIT.java | 149 +++++++++++++++++- .../src/javaRestTest/resources/roles.yml | 47 ++++++ .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../xpack/esql/analysis/Analyzer.java | 30 ---- .../esql/enrich/AbstractLookupService.java | 136 ++++------------ .../esql/enrich/EnrichLookupService.java | 89 ++++++++++- .../esql/enrich/LookupFromIndexService.java | 5 - .../esql/plan/logical/join/LookupJoin.java | 34 +++- .../test/esql/190_lookup_join.yml | 11 ++ .../esql/191_lookup_join_on_datastreams.yml | 68 ++++++++ 12 files changed, 437 insertions(+), 150 deletions(-) create mode 100644 docs/changelog/120617.yaml create mode 100644 x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_on_datastreams.yml diff --git a/docs/changelog/120617.yaml b/docs/changelog/120617.yaml new file mode 100644 index 0000000000000..cdf93ef4e71f2 --- /dev/null +++ b/docs/changelog/120617.yaml @@ -0,0 +1,5 @@ +pr: 120617 +summary: Fix queries with document level security on lookup indexes +area: ES|QL +type: bug +issues: [120509] diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 5ab0112d822ce..5987f75f4f198 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -96,5 +96,11 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("esql/180_match_operator/match with non text field", "Match operator can now be used on non-text fields") task.skipTest("esql/180_match_operator/match with functions", "Error message changed") task.skipTest("esql/40_unsupported_types/semantic_text declared in mapping", "The semantic text field format changed") + task.skipTest("esql/190_lookup_join/Alias as lookup index", "LOOKUP JOIN does not support index aliases for now") + task.skipTest("esql/190_lookup_join/alias-repeated-alias", "LOOKUP JOIN does not support index aliases for now") + task.skipTest("esql/190_lookup_join/alias-repeated-index", "LOOKUP JOIN does not support index aliases for now") + task.skipTest("esql/190_lookup_join/alias-pattern-multiple", "LOOKUP JOIN does not support index aliases for now") + task.skipTest("esql/190_lookup_join/alias-pattern-single", "LOOKUP JOIN does not support index aliases for now") + }) diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index 5adac8fdd70d0..a809bd50a45b8 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -57,6 +57,10 @@ public class EsqlSecurityIT extends ESRestTestCase { .user("user4", "x-pack-test-password", "user4", false) .user("user5", "x-pack-test-password", "user5", false) .user("fls_user", "x-pack-test-password", "fls_user", false) + .user("fls_user2", "x-pack-test-password", "fls_user2", false) + .user("fls_user3", "x-pack-test-password", "fls_user3", false) + .user("fls_user4_1", "x-pack-test-password", "fls_user4_1", false) + .user("dls_user", "x-pack-test-password", "dls_user", false) .user("metadata1_read2", "x-pack-test-password", "metadata1_read2", false) .user("alias_user1", "x-pack-test-password", "alias_user1", false) .user("alias_user2", "x-pack-test-password", "alias_user2", false) @@ -92,7 +96,7 @@ private void indexDocument(String index, int id, double value, String org) throw public void indexDocuments() throws IOException { Settings lookupSettings = Settings.builder().put("index.mode", "lookup").build(); String mapping = """ - "properties":{"value": {"type": "double"}, "org": {"type": "keyword"}} + "properties":{"value": {"type": "double"}, "org": {"type": "keyword"}, "other": {"type": "keyword"}} """; createIndex("index", Settings.EMPTY, mapping); @@ -163,6 +167,32 @@ public void indexDocuments() throws IOException { """); assertOK(client().performRequest(aliasRequest)); } + + createMultiRoleUsers(); + } + + private void createMultiRoleUsers() throws IOException { + Request request = new Request("POST", "_security/user/dls_user2"); + request.setJsonEntity(""" + { + "password" : "x-pack-test-password", + "roles" : [ "dls_user", "dls_user2" ], + "full_name" : "Test Role", + "email" : "test.role@example.com" + } + """); + assertOK(client().performRequest(request)); + + request = new Request("POST", "_security/user/fls_user4"); + request.setJsonEntity(""" + { + "password" : "x-pack-test-password", + "roles" : [ "fls_user4_1", "fls_user4_2" ], + "full_name" : "Test Role", + "email" : "test.role@example.com" + } + """); + assertOK(client().performRequest(request)); } protected MapMatcher responseMatcher(Map result) { @@ -553,25 +583,130 @@ public void testLookupJoinIndexAllowed() throws Exception { ); assertThat(respMap.get("values"), equalTo(List.of(List.of(40.0, "sales")))); - // Alias, should find the index and the row - resp = runESQLCommand("alias_user1", "ROW x = 31.0 | EVAL value = x | LOOKUP JOIN lookup-first-alias ON value | KEEP x, org"); + // Aliases are not allowed in LOOKUP JOIN + var resp2 = expectThrows( + ResponseException.class, + () -> runESQLCommand("alias_user1", "ROW x = 31.0 | EVAL value = x | LOOKUP JOIN lookup-first-alias ON value | KEEP x, org") + ); + + assertThat(resp2.getMessage(), containsString("Aliases and index patterns are not allowed for LOOKUP JOIN [lookup-first-alias]")); + assertThat(resp2.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); + + // Aliases are not allowed in LOOKUP JOIN, regardless of alias filters + resp2 = expectThrows( + ResponseException.class, + () -> runESQLCommand("alias_user1", "ROW x = 123.0 | EVAL value = x | LOOKUP JOIN lookup-first-alias ON value | KEEP x, org") + ); + assertThat(resp2.getMessage(), containsString("Aliases and index patterns are not allowed for LOOKUP JOIN [lookup-first-alias]")); + assertThat(resp2.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); + } + + @SuppressWarnings("unchecked") + public void testLookupJoinDocLevelSecurity() throws Exception { + assumeTrue( + "Requires LOOKUP JOIN capability", + EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName())) + ); + + Response resp = runESQLCommand("dls_user", "ROW x = 40.0 | EVAL value = x | LOOKUP JOIN lookup-user2 ON value | KEEP x, org"); + assertOK(resp); + Map respMap = entityAsMap(resp); + assertThat( + respMap.get("columns"), + equalTo(List.of(Map.of("name", "x", "type", "double"), Map.of("name", "org", "type", "keyword"))) + ); + + assertThat(respMap.get("values"), equalTo(List.of(Arrays.asList(40.0, null)))); + + resp = runESQLCommand("dls_user", "ROW x = 32.0 | EVAL value = x | LOOKUP JOIN lookup-user2 ON value | KEEP x, org"); + assertOK(resp); + respMap = entityAsMap(resp); + assertThat( + respMap.get("columns"), + equalTo(List.of(Map.of("name", "x", "type", "double"), Map.of("name", "org", "type", "keyword"))) + ); + assertThat(respMap.get("values"), equalTo(List.of(List.of(32.0, "marketing")))); + + // same, but with a user that has two dls roles that allow him more visibility + + resp = runESQLCommand("dls_user2", "ROW x = 40.0 | EVAL value = x | LOOKUP JOIN lookup-user2 ON value | KEEP x, org"); assertOK(resp); respMap = entityAsMap(resp); assertThat( respMap.get("columns"), equalTo(List.of(Map.of("name", "x", "type", "double"), Map.of("name", "org", "type", "keyword"))) ); - assertThat(respMap.get("values"), equalTo(List.of(List.of(31.0, "sales")))); - // Alias, for a row that's filtered out - resp = runESQLCommand("alias_user1", "ROW x = 123.0 | EVAL value = x | LOOKUP JOIN lookup-first-alias ON value | KEEP x, org"); + assertThat(respMap.get("values"), equalTo(List.of(Arrays.asList(40.0, "sales")))); + + resp = runESQLCommand("dls_user2", "ROW x = 32.0 | EVAL value = x | LOOKUP JOIN lookup-user2 ON value | KEEP x, org"); assertOK(resp); respMap = entityAsMap(resp); assertThat( respMap.get("columns"), equalTo(List.of(Map.of("name", "x", "type", "double"), Map.of("name", "org", "type", "keyword"))) ); - assertThat(respMap.get("values"), equalTo(List.of(Arrays.asList(123.0, null)))); + assertThat(respMap.get("values"), equalTo(List.of(List.of(32.0, "marketing")))); + + } + + @SuppressWarnings("unchecked") + public void testLookupJoinFieldLevelSecurity() throws Exception { + assumeTrue( + "Requires LOOKUP JOIN capability", + EsqlSpecTestCase.hasCapabilities(adminClient(), List.of(EsqlCapabilities.Cap.JOIN_LOOKUP_V12.capabilityName())) + ); + + Response resp = runESQLCommand("fls_user2", "ROW x = 40.0 | EVAL value = x | LOOKUP JOIN lookup-user2 ON value"); + assertOK(resp); + Map respMap = entityAsMap(resp); + assertThat( + respMap.get("columns"), + equalTo( + List.of( + Map.of("name", "x", "type", "double"), + Map.of("name", "value", "type", "double"), + Map.of("name", "org", "type", "keyword") + ) + ) + ); + + resp = runESQLCommand("fls_user3", "ROW x = 40.0 | EVAL value = x | LOOKUP JOIN lookup-user2 ON value"); + assertOK(resp); + respMap = entityAsMap(resp); + assertThat( + respMap.get("columns"), + equalTo( + List.of( + Map.of("name", "x", "type", "double"), + Map.of("name", "value", "type", "double"), + Map.of("name", "org", "type", "keyword"), + Map.of("name", "other", "type", "keyword") + ) + ) + + ); + + resp = runESQLCommand("fls_user4", "ROW x = 40.0 | EVAL value = x | LOOKUP JOIN lookup-user2 ON value"); + assertOK(resp); + respMap = entityAsMap(resp); + assertThat( + respMap.get("columns"), + equalTo( + List.of( + Map.of("name", "x", "type", "double"), + Map.of("name", "value", "type", "double"), + Map.of("name", "org", "type", "keyword") + ) + ) + ); + + ResponseException error = expectThrows( + ResponseException.class, + () -> runESQLCommand("fls_user4_1", "ROW x = 40.0 | EVAL value = x | LOOKUP JOIN lookup-user2 ON value") + ); + assertThat(error.getMessage(), containsString("Unknown column [value] in right side of join")); + assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); } public void testLookupJoinIndexForbidden() throws Exception { diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml b/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml index f46e7ef56f3a1..745ae43cf640c 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml @@ -93,6 +93,53 @@ fls_user: field_security: grant: [ value ] +fls_user2: + cluster: [] + indices: + - names: [ 'lookup-user2' ] + privileges: [ 'read' ] + field_security: + grant: [ "org", "value" ] + +fls_user3: + cluster: [] + indices: + - names: [ 'lookup-user2' ] + privileges: [ 'read' ] + field_security: + grant: [ "org", "value", "other" ] + +fls_user4_1: + cluster: [] + indices: + - names: [ 'lookup-user2' ] + privileges: [ 'read' ] + field_security: + grant: [ "org" ] + +fls_user4_2: + cluster: [] + indices: + - names: [ 'lookup-user2' ] + privileges: [ 'read' ] + field_security: + grant: [ "value" ] + +dls_user: + cluster: [] + indices: + - names: [ 'lookup-user2' ] + privileges: [ 'read' ] + query: '{"match": {"org": "marketing"}}' + +dls_user2: + cluster: [] + indices: + - names: [ 'lookup-user2' ] + privileges: [ 'read' ] + query: '{"match": {"org": "sales"}}' + + logs_foo_all: cluster: [] indices: diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 12a25c9ce2453..182328b54c4c5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -751,7 +751,12 @@ public enum Cap { /** * Support named argument for function in map format. */ - OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION(Build.current().isSnapshot()); + OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION(Build.current().isSnapshot()), + + /** + * Disabled support for index aliases in lookup joins + */ + LOOKUP_JOIN_NO_ALIASES(JOIN_LOOKUP_V12.isEnabled()); private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index fc1b7f6329ab3..552e90e0e90f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -237,36 +237,6 @@ private LogicalPlan resolveIndex(UnresolvedRelation plan, IndexResolution indexR EsIndex esIndex = indexResolution.get(); - if (plan.indexMode().equals(IndexMode.LOOKUP)) { - String indexResolutionMessage = null; - - var indexNameWithModes = esIndex.indexNameWithModes(); - if (indexNameWithModes.size() != 1) { - indexResolutionMessage = "invalid [" - + table - + "] resolution in lookup mode to [" - + indexNameWithModes.size() - + "] indices"; - } else if (indexNameWithModes.values().iterator().next() != IndexMode.LOOKUP) { - indexResolutionMessage = "invalid [" - + table - + "] resolution in lookup mode to an index in [" - + indexNameWithModes.values().iterator().next() - + "] mode"; - } - - if (indexResolutionMessage != null) { - return new UnresolvedRelation( - plan.source(), - plan.table(), - plan.frozen(), - plan.metadataFields(), - plan.indexMode(), - indexResolutionMessage, - plan.commandName() - ); - } - } var attributes = mappingAsAttributes(plan.source(), esIndex.mapping()); attributes.addAll(plan.metadataFields()); return new EsRelation( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java index 961d74794961f..cb2582db2ad33 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.support.ChannelActionListener; -import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -23,7 +22,6 @@ import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.compute.data.Block; @@ -67,15 +65,6 @@ import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.security.SecurityContext; -import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; -import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; -import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.support.Exceptions; -import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -93,7 +82,6 @@ import java.util.Objects; import java.util.concurrent.Executor; import java.util.function.Function; -import java.util.stream.Collectors; import java.util.stream.IntStream; /** @@ -132,10 +120,10 @@ */ public abstract class AbstractLookupService { private final String actionName; - private final ClusterService clusterService; + protected final ClusterService clusterService; private final LookupShardContextFactory lookupShardContextFactory; - private final TransportService transportService; - private final Executor executor; + protected final TransportService transportService; + protected final Executor executor; private final BigArrays bigArrays; private final BlockFactory blockFactory; private final LocalCircuitBreaker.SizeSettings localBreakerSettings; @@ -218,97 +206,43 @@ protected static QueryList termQueryList( * Perform the actual lookup. */ public final void lookupAsync(R request, CancellableTask parentTask, ActionListener> outListener) { - ThreadContext threadContext = transportService.getThreadPool().getThreadContext(); - ActionListener> listener = ContextPreservingActionListener.wrapPreservingContext(outListener, threadContext); - hasPrivilege(listener.delegateFailureAndWrap((delegate, ignored) -> { - ClusterState clusterState = clusterService.state(); - GroupShardsIterator shardIterators = clusterService.operationRouting() - .searchShards(clusterState, new String[] { request.index }, Map.of(), "_local"); - if (shardIterators.size() != 1) { - delegate.onFailure(new EsqlIllegalArgumentException("target index {} has more than one shard", request.index)); - return; - } - ShardIterator shardIt = shardIterators.get(0); - ShardRouting shardRouting = shardIt.nextOrNull(); - ShardId shardId = shardIt.shardId(); - if (shardRouting == null) { - delegate.onFailure(new UnavailableShardsException(shardId, "target index is not available")); - return; - } - DiscoveryNode targetNode = clusterState.nodes().get(shardRouting.currentNodeId()); - T transportRequest = transportRequest(request, shardId); - // TODO: handle retry and avoid forking for the local lookup - try (ThreadContext.StoredContext unused = threadContext.stashWithOrigin(ClientHelper.ENRICH_ORIGIN)) { - transportService.sendChildRequest( - targetNode, - actionName, - transportRequest, - parentTask, - TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>( - delegate.map(LookupResponse::takePages), - in -> readLookupResponse(in, blockFactory), - executor - ) - ); - } - })); - } - - /** - * Get the privilege required to perform the lookup. - *

- * If null is returned, no privilege check will be performed. - *

- */ - @Nullable - protected abstract String getRequiredPrivilege(); - - private void hasPrivilege(ActionListener outListener) { - final Settings settings = clusterService.getSettings(); - String privilegeName = getRequiredPrivilege(); - if (privilegeName == null - || settings.hasValue(XPackSettings.SECURITY_ENABLED.getKey()) == false - || XPackSettings.SECURITY_ENABLED.get(settings) == false) { - outListener.onResponse(null); + ClusterState clusterState = clusterService.state(); + GroupShardsIterator shardIterators = clusterService.operationRouting() + .searchShards(clusterState, new String[] { request.index }, Map.of(), "_local"); + if (shardIterators.size() != 1) { + outListener.onFailure(new EsqlIllegalArgumentException("target index {} has more than one shard", request.index)); return; } - final ThreadContext threadContext = transportService.getThreadPool().getThreadContext(); - final SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); - final User user = securityContext.getUser(); - if (user == null) { - outListener.onFailure(new IllegalStateException("missing or unable to read authentication info on request")); + ShardIterator shardIt = shardIterators.get(0); + ShardRouting shardRouting = shardIt.nextOrNull(); + ShardId shardId = shardIt.shardId(); + if (shardRouting == null) { + outListener.onFailure(new UnavailableShardsException(shardId, "target index is not available")); return; } - HasPrivilegesRequest request = new HasPrivilegesRequest(); - request.username(user.principal()); - request.clusterPrivileges(privilegeName); - request.indexPrivileges(new RoleDescriptor.IndicesPrivileges[0]); - request.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); - ActionListener listener = outListener.delegateFailureAndWrap((l, resp) -> { - if (resp.isCompleteMatch()) { - l.onResponse(null); - return; - } - String detailed = resp.getClusterPrivileges() - .entrySet() - .stream() - .filter(e -> e.getValue() == false) - .map(e -> "privilege [" + e.getKey() + "] is missing") - .collect(Collectors.joining(", ")); - String message = "user [" - + user.principal() - + "] doesn't have " - + "sufficient privileges to perform enrich lookup: " - + detailed; - l.onFailure(Exceptions.authorizationError(message)); - }); - transportService.sendRequest( - transportService.getLocalNode(), - HasPrivilegesAction.NAME, - request, + DiscoveryNode targetNode = clusterState.nodes().get(shardRouting.currentNodeId()); + T transportRequest = transportRequest(request, shardId); + // TODO: handle retry and avoid forking for the local lookup + sendChildRequest(parentTask, outListener, targetNode, transportRequest); + } + + protected void sendChildRequest( + CancellableTask parentTask, + ActionListener> delegate, + DiscoveryNode targetNode, + T transportRequest + ) { + transportService.sendChildRequest( + targetNode, + actionName, + transportRequest, + parentTask, TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(listener, HasPrivilegesResponse::new, executor) + new ActionListenerResponseHandler<>( + delegate.map(LookupResponse::takePages), + in -> readLookupResponse(in, blockFactory), + executor + ) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index acb4206ad7af8..480b69ecd8e60 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -8,10 +8,16 @@ package org.elasticsearch.xpack.esql.enrich; import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.support.ContextPreservingActionListener; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; @@ -23,9 +29,20 @@ import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; +import org.elasticsearch.xpack.core.security.support.Exceptions; +import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -36,6 +53,7 @@ import java.io.IOException; import java.util.List; +import java.util.stream.Collectors; /** * {@link EnrichLookupService} performs enrich lookup for a given input page. @@ -90,11 +108,6 @@ protected QueryList queryList(TransportRequest request, SearchExecutionContext c }; } - @Override - protected String getRequiredPrivilege() { - return ClusterPrivilegeResolver.MONITOR_ENRICH.name(); - } - @Override protected LookupResponse createLookupResponse(List pages, BlockFactory blockFactory) throws IOException { if (pages.size() != 1) { @@ -270,4 +283,70 @@ protected void innerRelease() { } } } + + @Override + protected void sendChildRequest( + CancellableTask parentTask, + ActionListener> delegate, + DiscoveryNode targetNode, + TransportRequest transportRequest + ) { + ThreadContext threadContext = transportService.getThreadPool().getThreadContext(); + ActionListener> listener = ContextPreservingActionListener.wrapPreservingContext(delegate, threadContext); + hasEnrichPrivilege(listener.delegateFailureAndWrap((l, ignored) -> { + // Since we just checked the needed privileges + // we can access the index regardless of the user/role that is executing the query + try (ThreadContext.StoredContext unused = threadContext.stashWithOrigin(ClientHelper.ENRICH_ORIGIN)) { + super.sendChildRequest(parentTask, l, targetNode, transportRequest); + } + })); + } + + protected void hasEnrichPrivilege(ActionListener outListener) { + final Settings settings = clusterService.getSettings(); + String privilegeName = ClusterPrivilegeResolver.MONITOR_ENRICH.name(); + if (privilegeName == null + || settings.hasValue(XPackSettings.SECURITY_ENABLED.getKey()) == false + || XPackSettings.SECURITY_ENABLED.get(settings) == false) { + outListener.onResponse(null); + return; + } + final ThreadContext threadContext = transportService.getThreadPool().getThreadContext(); + final SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); + final User user = securityContext.getUser(); + if (user == null) { + outListener.onFailure(new IllegalStateException("missing or unable to read authentication info on request")); + return; + } + HasPrivilegesRequest request = new HasPrivilegesRequest(); + request.username(user.principal()); + request.clusterPrivileges(privilegeName); + request.indexPrivileges(new RoleDescriptor.IndicesPrivileges[0]); + request.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); + ActionListener listener = outListener.delegateFailureAndWrap((l, resp) -> { + if (resp.isCompleteMatch()) { + l.onResponse(null); + return; + } + String detailed = resp.getClusterPrivileges() + .entrySet() + .stream() + .filter(e -> e.getValue() == false) + .map(e -> "privilege [" + e.getKey() + "] is missing") + .collect(Collectors.joining(", ")); + String message = "user [" + + user.principal() + + "] doesn't have " + + "sufficient privileges to perform enrich lookup: " + + detailed; + l.onFailure(Exceptions.authorizationError(message)); + }); + transportService.sendRequest( + transportService.getLocalNode(), + HasPrivilegesAction.NAME, + request, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(listener, HasPrivilegesResponse::new, executor) + ); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java index 9bea212a56aa8..131d8ddfa5ccd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java @@ -90,11 +90,6 @@ protected AbstractLookupService.LookupResponse readLookupResponse(StreamInput in return new LookupResponse(in, blockFactory); } - @Override - protected String getRequiredPrivilege() { - return null; - } - public static class Request extends AbstractLookupService.Request { private final String matchField; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java index 4e009156072df..c29cf0ec7f414 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java @@ -7,9 +7,13 @@ package org.elasticsearch.xpack.esql.plan.logical.join; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.SurrogateLogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.UsingJoinType; @@ -17,12 +21,13 @@ import java.util.List; import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.esql.common.Failure.fail; import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.LEFT; /** * Lookup join - specialized LEFT (OUTER) JOIN between the main left side and a lookup index (index_mode = lookup) on the right. */ -public class LookupJoin extends Join implements SurrogateLogicalPlan { +public class LookupJoin extends Join implements SurrogateLogicalPlan, PostAnalysisVerificationAware { public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, List joinFields) { this(source, left, right, new UsingJoinType(LEFT, joinFields), emptyList(), emptyList(), emptyList()); @@ -71,4 +76,31 @@ protected NodeInfo info() { config().rightFields() ); } + + @Override + public void postAnalysisVerification(Failures failures) { + super.postAnalysisVerification(failures); + right().forEachDown(EsRelation.class, esr -> { + var indexNameWithModes = esr.indexNameWithModes(); + if (indexNameWithModes.size() != 1) { + failures.add( + fail(esr, "invalid [{}] resolution in lookup mode to [{}] indices", esr.indexPattern(), indexNameWithModes.size()) + ); + } else if (indexNameWithModes.values().iterator().next() != IndexMode.LOOKUP) { + failures.add( + fail( + esr, + "invalid [{}] resolution in lookup mode to an index in [{}] mode", + esr.indexPattern(), + indexNameWithModes.values().iterator().next() + ) + ); + } + + // this check is crucial for security: ES|QL would use the concrete indices, so it would bypass the security on the alias + if (esr.concreteIndices().contains(esr.indexPattern()) == false) { + failures.add(fail(this, "Aliases and index patterns are not allowed for LOOKUP JOIN [{}]", esr.indexPattern())); + } + }); + } } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml index e8c9df0d3287e..f72cdd65b275c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml @@ -102,7 +102,10 @@ non-lookup index: - contains: { error.reason: "Found 1 problem\nline 1:45: invalid [test] resolution in lookup mode to an index in [standard] mode" } --- + "Alias as lookup index": + - skip: + awaits_fix: "LOOKUP JOIN does not support index aliases for now" - do: esql.query: body: @@ -117,6 +120,8 @@ non-lookup index: --- alias-repeated-alias: + - skip: + awaits_fix: "LOOKUP JOIN does not support index aliases for now" - do: esql.query: body: @@ -131,6 +136,8 @@ alias-repeated-alias: --- alias-repeated-index: + - skip: + awaits_fix: "LOOKUP JOIN does not support index aliases for now" - do: esql.query: body: @@ -145,6 +152,8 @@ alias-repeated-index: --- alias-pattern-multiple: + - skip: + awaits_fix: "LOOKUP JOIN does not support index aliases for now" - do: esql.query: body: @@ -156,6 +165,8 @@ alias-pattern-multiple: --- alias-pattern-single: + - skip: + awaits_fix: "LOOKUP JOIN does not support index aliases for now" - do: esql.query: body: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_on_datastreams.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_on_datastreams.yml new file mode 100644 index 0000000000000..6f9b70b0d94f1 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_on_datastreams.yml @@ -0,0 +1,68 @@ +--- +setup: + - requires: + test_runner_features: [capabilities, contains] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [lookup_join_no_aliases] + reason: "uses LOOKUP JOIN" + + - do: + cluster.put_component_template: + name: my_settings + body: + template: + settings: + index: + mode: lookup + + + - do: + cluster.put_component_template: + name: my_mappings + body: + template: + mappings: + properties: + "@timestamp": + type: date + x: + type: keyword + + - do: + indices.put_index_template: + name: my_index_template + body: + index_patterns: my_data_stream* + data_stream: {} + composed_of: [ "my_mappings", "my_settings" ] + priority: 500 + + + - do: + bulk: + index: "my_data_stream" + refresh: true + body: + - { "index": { } } + - { "x": "foo", "y": "y1" } + - { "index": { } } + - { "x": "bar", "y": "y2" } + + + +--- +"data streams not supported in LOOKUP JOIN": + - do: + esql.query: + body: + query: 'row x = "foo" | LOOKUP JOIN my_data_stream ON x' + catch: "bad_request" + + - match: { error.type: "verification_exception" } + - contains: { error.reason: "Found 1 problem\nline 1:17: Aliases and index patterns are not allowed for LOOKUP JOIN [my_data_stream]" } + + + From 385e1fdf21fb30c20adf94c29f35703b344b97f5 Mon Sep 17 00:00:00 2001 From: Navarone Feekery <13634519+navarone-feekery@users.noreply.github.com> Date: Fri, 24 Jan 2025 13:10:38 +0100 Subject: [PATCH 008/383] [Search] Add system index descriptors to Connector indices (#118991) Update the .elastic-connectors and .elastic-connectors-sync-jobs indices into system indices --- .../xpack/core/ClientHelper.java | 3 + ...json => elastic-connectors-sync-jobs.json} | 30 ++-- ...-mappings.json => elastic-connectors.json} | 51 ++++--- .../elastic-connectors-settings.json | 14 -- .../elastic-connectors-sync-jobs.json | 14 -- .../connector/elastic-connectors.json | 14 -- .../xpack/application/EnterpriseSearch.java | 9 +- .../connector/ConnectorIndexService.java | 50 +++++- .../connector/ConnectorTemplateRegistry.java | 81 +--------- .../syncjob/ConnectorSyncJobIndexService.java | 47 +++++- .../connector/ConnectorIndexServiceTests.java | 25 ++- .../ConnectorTemplateRegistryTests.java | 144 +----------------- .../connector/ConnectorTestUtils.java | 53 +------ .../ConnectorSyncJobIndexServiceTests.java | 44 ++++-- .../syncjob/ConnectorSyncJobTestUtils.java | 26 ++-- 15 files changed, 230 insertions(+), 375 deletions(-) rename x-pack/plugin/core/template-resources/src/main/resources/{entsearch/connector/elastic-connectors-sync-jobs-mappings.json => elastic-connectors-sync-jobs.json} (88%) rename x-pack/plugin/core/template-resources/src/main/resources/{entsearch/connector/elastic-connectors-mappings.json => elastic-connectors.json} (92%) delete mode 100644 x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-settings.json delete mode 100644 x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs.json delete mode 100644 x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors.json diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java index 9a0d1a58a30a1..680b72cb970c9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java @@ -196,6 +196,9 @@ private static String maybeRewriteSingleAuthenticationHeaderForVersion( public static final String APM_ORIGIN = "apm"; public static final String OTEL_ORIGIN = "otel"; public static final String REINDEX_DATA_STREAM_ORIGIN = "reindex_data_stream"; + // TODO consolidate the Kibana origin with the one defined in org/elasticsearch/kibana/KibanaPlugin.java + public static final String KIBANA_ORIGIN = "kibana"; + public static final String CLOUD_ORIGIN = "cloud"; private ClientHelper() {} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/elastic-connectors-sync-jobs.json similarity index 88% rename from x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/elastic-connectors-sync-jobs.json index 4dd6e0681c7cc..7d1e7fa3a0418 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs-mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/elastic-connectors-sync-jobs.json @@ -1,12 +1,16 @@ { - "template": { - "aliases": { - ".elastic-connectors-sync-jobs": {} - }, - "mappings": { - "dynamic": "false", + "settings": { + "index": { + "number_of_shards": "1", + "auto_expand_replicas": "0-1" + } + }, + "mappings": { + "_doc": { + "dynamic": "strict", "_meta": { - "version": ${xpack.application.connector.template.version} + "version": "${elastic-connectors-sync-jobs.version}", + "managed_index_mappings_version": ${elastic-connectors-sync-jobs.managed.index.version} }, "properties": { "cancelation_requested_at": { @@ -21,9 +25,11 @@ "connector": { "properties": { "configuration": { + "dynamic": "false", "type": "object" }, "filtering": { + "dynamic": "false", "properties": { "advanced_snippet": { "properties": { @@ -91,6 +97,7 @@ "type": "keyword" }, "pipeline": { + "dynamic": "false", "properties": { "extract_binary_content": { "type": "boolean" @@ -110,6 +117,7 @@ "type": "keyword" }, "sync_cursor": { + "dynamic": "false", "type": "object" } } @@ -136,6 +144,7 @@ "type": "date" }, "metadata": { + "dynamic": "false", "type": "object" }, "started_at": { @@ -155,10 +164,5 @@ } } } - }, - "_meta": { - "description": "Built-in mappings applied by default to elastic-connectors indices", - "managed": true - }, - "version": ${xpack.application.connector.template.version} + } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/elastic-connectors.json similarity index 92% rename from x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/elastic-connectors.json index 25409dbf8460e..a98018e76f0e0 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/elastic-connectors.json @@ -1,29 +1,35 @@ { - "template": { - "aliases": { - ".elastic-connectors": {} - }, - "mappings": { - "dynamic": "false", + "settings": { + "index": { + "number_of_shards": "1", + "auto_expand_replicas": "0-1" + } + }, + "mappings": { + "_doc": { + "dynamic": "strict", "_meta": { - "pipeline": { - "default_name": "search-default-ingestion", - "default_extract_binary_content": true, - "default_run_ml_inference": true, - "default_reduce_whitespace": true - }, - "version": ${xpack.application.connector.template.version} + "version": "${elastic-connectors.version}", + "managed_index_mappings_version": ${elastic-connectors.managed.index.version} }, "properties": { "api_key_id": { "type": "keyword" }, + "api_key_secret_id": { + "type": "keyword" + }, "configuration": { + "dynamic": "false", "type": "object" }, "custom_scheduling": { + "dynamic": "false", "type": "object" }, + "deleted": { + "type": "boolean" + }, "description": { "type": "text" }, @@ -31,6 +37,7 @@ "type": "keyword" }, "features": { + "dynamic": "false", "properties": { "filtering_advanced_config": { "type": "boolean" @@ -66,6 +73,7 @@ } }, "filtering": { + "dynamic": "false", "properties": { "active": { "properties": { @@ -78,6 +86,7 @@ "type": "date" }, "value": { + "dynamic": "false", "type": "object" } } @@ -143,6 +152,7 @@ "type": "date" }, "value": { + "dynamic": "false", "type": "object" } } @@ -242,6 +252,7 @@ "type": "keyword" }, "pipeline": { + "dynamic": "false", "properties": { "extract_binary_content": { "type": "boolean" @@ -258,6 +269,7 @@ } }, "scheduling": { + "dynamic": "false", "properties": { "access_control": { "properties": { @@ -298,22 +310,13 @@ "type": "keyword" }, "sync_cursor": { + "dynamic": "false", "type": "object" }, "sync_now": { "type": "boolean" - }, - "deleted": { - "type": "boolean" } } } - }, - "_meta": { - "description": "Built-in mappings applied by default to elastic-connectors indices", - "managed": true - }, - "version": ${xpack.application.connector.template.version} + } } - - diff --git a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-settings.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-settings.json deleted file mode 100644 index 6ff9510574281..0000000000000 --- a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-settings.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "template": { - "settings": { - "hidden": true, - "number_of_shards": "1", - "auto_expand_replicas": "0-1" - } - }, - "_meta": { - "description": "Built-in settings applied by default to connector management indices", - "managed": true - }, - "version": ${xpack.application.connector.template.version} -} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs.json deleted file mode 100644 index db5404a30c6e4..0000000000000 --- a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "index_patterns": ["${connectors-sync-jobs.index_pattern}"], - "priority": 100, - "composed_of": [ - "elastic-connectors-settings", - "elastic-connectors-sync-jobs-mappings" - ], - "allow_auto_create": true, - "_meta": { - "description": "Built-in template for elastic-connectors-sync-jobs", - "managed": true - }, - "version": ${xpack.application.connector.template.version} -} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors.json deleted file mode 100644 index 17c0b1eef0610..0000000000000 --- a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "index_patterns": ["${connectors.index_pattern}"], - "priority": 100, - "composed_of": [ - "elastic-connectors-settings", - "elastic-connectors-mappings" - ], - "allow_auto_create": true, - "_meta": { - "description": "Built-in template for elastic-connectors", - "managed": true - }, - "version": ${xpack.application.connector.template.version} -} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index df1c76ccf770f..4142d907d0c5c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -46,6 +46,7 @@ import org.elasticsearch.xpack.application.analytics.action.TransportPutAnalyticsCollectionAction; import org.elasticsearch.xpack.application.analytics.ingest.AnalyticsEventIngestConfig; import org.elasticsearch.xpack.application.connector.ConnectorAPIFeature; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; import org.elasticsearch.xpack.application.connector.action.DeleteConnectorAction; import org.elasticsearch.xpack.application.connector.action.GetConnectorAction; @@ -124,6 +125,7 @@ import org.elasticsearch.xpack.application.connector.secrets.action.TransportGetConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.TransportPostConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.TransportPutConnectorSecretAction; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.ClaimConnectorSyncJobAction; @@ -477,7 +479,12 @@ public Collection createComponents(PluginServices services) { @Override public Collection getSystemIndexDescriptors(Settings settings) { Collection systemIndices = new ArrayList<>( - List.of(SearchApplicationIndexService.getSystemIndexDescriptor(), QueryRulesIndexService.getSystemIndexDescriptor()) + List.of( + SearchApplicationIndexService.getSystemIndexDescriptor(), + QueryRulesIndexService.getSystemIndexDescriptor(), + ConnectorSyncJobIndexService.getSystemIndexDescriptor(), + ConnectorIndexService.getSystemIndexDescriptor() + ) ); if (ConnectorSecretsFeature.isEnabled()) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 3120124c17523..a9ca8552feeea 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -10,10 +10,12 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; @@ -33,6 +35,7 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.query.WildcardQueryBuilder; +import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; @@ -59,6 +62,7 @@ import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationState; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; +import org.elasticsearch.xpack.core.template.TemplateUtils; import java.time.Instant; import java.util.ArrayList; @@ -76,6 +80,7 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.application.connector.ConnectorFiltering.fromXContentBytesConnectorFiltering; import static org.elasticsearch.xpack.application.connector.ConnectorFiltering.sortFilteringRulesByOrder; +import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTORS_ALLOWED_PRODUCT_ORIGINS; import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.MANAGED_CONNECTOR_INDEX_PREFIX; import static org.elasticsearch.xpack.core.ClientHelper.CONNECTORS_ORIGIN; @@ -87,7 +92,20 @@ public class ConnectorIndexService { // The client to interact with the system index (internal user). private final Client clientWithOrigin; - public static final String CONNECTOR_INDEX_NAME = ConnectorTemplateRegistry.CONNECTOR_INDEX_NAME_PATTERN; + // TODO use proper version IDs (see org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java) + // TODO if this version is updated, a test should be added to + // javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java + private static final int CONNECTORS_INDEX_VERSION = 1; + // TODO rename to CONNECTOR_ALIAS_NAME + public static final String CONNECTOR_INDEX_NAME = ".elastic-connectors"; + public static final String CONNECTOR_INDEX_PREFIX = ".elastic-connectors-v"; + public static final String CONNECTOR_CONCRETE_INDEX_NAME = CONNECTOR_INDEX_PREFIX + CONNECTORS_INDEX_VERSION; + // The index pattern needs a stricter regex to prevent conflicts with .elastic-connectors-sync-jobs + + public static final String CONNECTOR_INDEX_NAME_PATTERN = CONNECTOR_INDEX_PREFIX + "*"; + + private static final String CONNECTORS_MAPPING_VERSION_VARIABLE = "elastic-connectors.version"; + private static final String CONNECTORS_MAPPING_MANAGED_VERSION_VARIABLE = "elastic-connectors.managed.index.version"; /** * @param client A client for executing actions on the connector index @@ -96,6 +114,36 @@ public ConnectorIndexService(Client client) { this.clientWithOrigin = new OriginSettingClient(client, CONNECTORS_ORIGIN); } + /** + * Returns the {@link SystemIndexDescriptor} for the Connector system index. + * + * @return The {@link SystemIndexDescriptor} for the Connector system index. + */ + public static SystemIndexDescriptor getSystemIndexDescriptor() { + PutIndexTemplateRequest request = new PutIndexTemplateRequest(); + String templateSource = TemplateUtils.loadTemplate( + "/elastic-connectors.json", + Version.CURRENT.toString(), + CONNECTORS_MAPPING_VERSION_VARIABLE, + Map.of(CONNECTORS_MAPPING_MANAGED_VERSION_VARIABLE, Integer.toString(CONNECTORS_INDEX_VERSION)) + ); + request.source(templateSource, XContentType.JSON); + + // The index pattern needs a stricter regex to prevent conflicts with .elastic-connectors-sync-jobs + return SystemIndexDescriptor.builder() + .setIndexPattern(CONNECTOR_INDEX_NAME_PATTERN) + .setPrimaryIndex(CONNECTOR_CONCRETE_INDEX_NAME) + .setAliasName(CONNECTOR_INDEX_NAME) + .setDescription("Search connectors") + .setMappings(request.mappings()) + .setSettings(request.settings()) + .setOrigin(CONNECTORS_ORIGIN) + .setType(SystemIndexDescriptor.Type.EXTERNAL_MANAGED) + .setAllowedElasticProductOrigins(CONNECTORS_ALLOWED_PRODUCT_ORIGINS) + .setNetNew() + .build(); + } + /** * Creates or updates the {@link Connector} in the underlying index with a specific doc ID * if connectorId is provided. Otherwise, the connector doc is indexed with auto-generated doc ID. diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java index fd35acc89db5c..97ac05c443ad0 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java @@ -8,25 +8,23 @@ package org.elasticsearch.xpack.application.connector; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.template.IndexTemplateConfig; import org.elasticsearch.xpack.core.template.IndexTemplateRegistry; import org.elasticsearch.xpack.core.template.IngestPipelineConfig; import org.elasticsearch.xpack.core.template.JsonIngestPipelineConfig; -import java.io.IOException; -import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.core.ClientHelper.CLOUD_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.CONNECTORS_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.ENT_SEARCH_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.KIBANA_ORIGIN; public class ConnectorTemplateRegistry extends IndexTemplateRegistry { @@ -34,13 +32,6 @@ public class ConnectorTemplateRegistry extends IndexTemplateRegistry { static final int REGISTRY_VERSION = 3; // Connector indices constants - - public static final String CONNECTOR_INDEX_NAME_PATTERN = ".elastic-connectors-v1"; - public static final String CONNECTOR_TEMPLATE_NAME = "elastic-connectors"; - - public static final String CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN = ".elastic-connectors-sync-jobs-v1"; - public static final String CONNECTOR_SYNC_JOBS_TEMPLATE_NAME = "elastic-connectors-sync-jobs"; - public static final String ACCESS_CONTROL_INDEX_PREFIX = ".search-acl-filter-"; public static final String ACCESS_CONTROL_INDEX_NAME_PATTERN = ".search-acl-filter-*"; public static final String ACCESS_CONTROL_TEMPLATE_NAME = "search-acl-filter"; @@ -58,51 +49,8 @@ public class ConnectorTemplateRegistry extends IndexTemplateRegistry { // Variable used to replace template version in index templates public static final String TEMPLATE_VERSION_VARIABLE = "xpack.application.connector.template.version"; - private static final String MAPPINGS_SUFFIX = "-mappings"; - - private static final String SETTINGS_SUFFIX = "-settings"; - - private static final String JSON_EXTENSION = ".json"; - - static final Map COMPONENT_TEMPLATES; - - static { - final Map componentTemplates = new HashMap<>(); - for (IndexTemplateConfig config : List.of( - new IndexTemplateConfig( - CONNECTOR_TEMPLATE_NAME + MAPPINGS_SUFFIX, - ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_TEMPLATE_NAME + MAPPINGS_SUFFIX + JSON_EXTENSION, - REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE - ), - new IndexTemplateConfig( - CONNECTOR_TEMPLATE_NAME + SETTINGS_SUFFIX, - ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_TEMPLATE_NAME + SETTINGS_SUFFIX + JSON_EXTENSION, - REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE - ), - new IndexTemplateConfig( - CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + MAPPINGS_SUFFIX, - ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + MAPPINGS_SUFFIX + JSON_EXTENSION, - REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE - ), - new IndexTemplateConfig( - CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + SETTINGS_SUFFIX, - ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_TEMPLATE_NAME + SETTINGS_SUFFIX + JSON_EXTENSION, - REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE - ) - )) { - - try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, config.loadBytes())) { - componentTemplates.put(config.getTemplateName(), ComponentTemplate.parse(parser)); - } catch (IOException e) { - throw new AssertionError(e); - } - } - COMPONENT_TEMPLATES = Map.copyOf(componentTemplates); - } + // Sources allowed to access system indices using X-elastic-product-origin header + public static final List CONNECTORS_ALLOWED_PRODUCT_ORIGINS = List.of(KIBANA_ORIGIN, CONNECTORS_ORIGIN, CLOUD_ORIGIN); @Override protected List getIngestPipelines() { @@ -117,20 +65,6 @@ protected List getIngestPipelines() { } static final Map COMPOSABLE_INDEX_TEMPLATES = parseComposableTemplates( - new IndexTemplateConfig( - CONNECTOR_TEMPLATE_NAME, - ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_TEMPLATE_NAME + ".json", - REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE, - Map.of("connectors.index_pattern", CONNECTOR_INDEX_NAME_PATTERN) - ), - new IndexTemplateConfig( - CONNECTOR_SYNC_JOBS_TEMPLATE_NAME, - ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + ".json", - REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE, - Map.of("connectors-sync-jobs.index_pattern", CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN) - ), new IndexTemplateConfig( ACCESS_CONTROL_TEMPLATE_NAME, ROOT_TEMPLATE_RESOURCE_PATH + ACCESS_CONTROL_TEMPLATE_NAME + ".json", @@ -154,11 +88,6 @@ protected String getOrigin() { return ENT_SEARCH_ORIGIN; } - @Override - protected Map getComponentTemplateConfigs() { - return COMPONENT_TEMPLATES; - } - @Override protected Map getComposableTemplateConfigs() { return COMPOSABLE_INDEX_TEMPLATES; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index f46d915a7123f..85de2f900ddff 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -11,10 +11,12 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; @@ -40,6 +42,7 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -49,10 +52,10 @@ import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorFiltering; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; -import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; import org.elasticsearch.xpack.application.connector.filtering.FilteringRules; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; +import org.elasticsearch.xpack.core.template.TemplateUtils; import java.io.IOException; import java.time.Instant; @@ -69,6 +72,7 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.application.connector.ConnectorIndexService.CONNECTOR_INDEX_NAME; +import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTORS_ALLOWED_PRODUCT_ORIGINS; import static org.elasticsearch.xpack.core.ClientHelper.CONNECTORS_ORIGIN; /** @@ -81,7 +85,17 @@ public class ConnectorSyncJobIndexService { // The client to interact with the system index (internal user). private final Client clientWithOrigin; - public static final String CONNECTOR_SYNC_JOB_INDEX_NAME = ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN; + // TODO use proper version IDs (see org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java) + // TODO if this version is updated, a test should be added to + // javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java + private static final int CONNECTOR_SYNC_JOB_INDEX_VERSION = 1; + public static final String CONNECTOR_SYNC_JOB_INDEX_NAME = ".elastic-connectors-sync-jobs"; + public static final String CONNECTOR_SYNC_JOB_INDEX_PREFIX = ".elastic-connectors-sync-jobs-v"; + public static final String CONNECTOR_SYNC_JOB_CONCRETE_INDEX_NAME = CONNECTOR_SYNC_JOB_INDEX_PREFIX + CONNECTOR_SYNC_JOB_INDEX_VERSION; + public static final String CONNECTOR_SYNC_JOB_INDEX_NAME_PATTERN = CONNECTOR_SYNC_JOB_INDEX_NAME + "*"; + + private static final String CONNECTOR_SYNC_JOB_MAPPING_VERSION_VARIABLE = "elastic-connectors-sync-jobs.version"; + private static final String CONNECTOR_SYNC_JOB_MAPPING_MANAGED_VERSION_VARIABLE = "elastic-connectors-sync-jobs.managed.index.version"; /** * @param client A client for executing actions on the connectors sync jobs index. @@ -90,6 +104,35 @@ public ConnectorSyncJobIndexService(Client client) { this.clientWithOrigin = new OriginSettingClient(client, CONNECTORS_ORIGIN); } + /** + * Returns the {@link SystemIndexDescriptor} for the Connector system index. + * + * @return The {@link SystemIndexDescriptor} for the Connector system index. + */ + public static SystemIndexDescriptor getSystemIndexDescriptor() { + PutIndexTemplateRequest request = new PutIndexTemplateRequest(); + String templateSource = TemplateUtils.loadTemplate( + "/elastic-connectors-sync-jobs.json", + Version.CURRENT.toString(), + CONNECTOR_SYNC_JOB_MAPPING_VERSION_VARIABLE, + Map.of(CONNECTOR_SYNC_JOB_MAPPING_MANAGED_VERSION_VARIABLE, Integer.toString(CONNECTOR_SYNC_JOB_INDEX_VERSION)) + ); + request.source(templateSource, XContentType.JSON); + + return SystemIndexDescriptor.builder() + .setIndexPattern(CONNECTOR_SYNC_JOB_INDEX_NAME_PATTERN) + .setPrimaryIndex(CONNECTOR_SYNC_JOB_CONCRETE_INDEX_NAME) + .setAliasName(CONNECTOR_SYNC_JOB_INDEX_NAME) + .setDescription("Search connectors sync jobs") + .setMappings(request.mappings()) + .setSettings(request.settings()) + .setOrigin(CONNECTORS_ORIGIN) + .setType(SystemIndexDescriptor.Type.EXTERNAL_MANAGED) + .setAllowedElasticProductOrigins(CONNECTORS_ALLOWED_PRODUCT_ORIGINS) + .setNetNew() + .build(); + } + /** * @param request Request for creating a connector sync job. * @param listener Listener to respond to a successful response or an error. diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 7b6d9c9b14df9..53a8c7ac96944 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -14,7 +14,9 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; +import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.MockScriptPlugin; @@ -59,7 +61,6 @@ import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.getRandomConnectorFeatures; import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.getRandomCronExpression; import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.randomConnectorFeatureEnabled; -import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.registerSimplifiedConnectorIndexTemplates; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; @@ -72,7 +73,6 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { @Before public void setup() { - registerSimplifiedConnectorIndexTemplates(indicesAdmin()); this.connectorIndexService = new ConnectorIndexService(client()); } @@ -80,6 +80,7 @@ public void setup() { protected Collection> getPlugins() { List> plugins = new ArrayList<>(super.getPlugins()); plugins.add(MockPainlessScriptEngine.TestPlugin.class); + plugins.add(ConnectorIndexServiceTests.TestPlugin.class); return plugins; } @@ -1612,4 +1613,24 @@ public void execute() { } } + /** + * Test plugin to register the {@link ConnectorIndexService} system index descriptor. + */ + public static class TestPlugin extends Plugin implements SystemIndexPlugin { + @Override + public Collection getSystemIndexDescriptors(Settings settings) { + return List.of(ConnectorIndexService.getSystemIndexDescriptor()); + } + + @Override + public String getFeatureName() { + return this.getClass().getSimpleName(); + } + + @Override + public String getFeatureDescription() { + return this.getClass().getCanonicalName(); + } + } + } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java index 068b99626af9d..89bdabe78300c 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java @@ -55,15 +55,13 @@ import java.util.stream.Collectors; import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.ACCESS_CONTROL_INDEX_NAME_PATTERN; -import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTOR_INDEX_NAME_PATTERN; -import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.oneOf; import static org.hamcrest.Matchers.sameInstance; +import static org.junit.Assert.assertNotNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; @@ -92,14 +90,6 @@ public void testThatNonExistingComposableTemplatesAreAddedImmediately() throws E DiscoveryNode node = DiscoveryNodeUtils.create("node"); DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); Map existingComponentTemplates = Map.of( - ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-mappings", - ConnectorTemplateRegistry.REGISTRY_VERSION, - ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-settings", - ConnectorTemplateRegistry.REGISTRY_VERSION, - ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-mappings", - ConnectorTemplateRegistry.REGISTRY_VERSION, - ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-settings", - ConnectorTemplateRegistry.REGISTRY_VERSION, ConnectorTemplateRegistry.ACCESS_CONTROL_TEMPLATE_NAME, ConnectorTemplateRegistry.REGISTRY_VERSION ); @@ -125,131 +115,6 @@ public void testThatNonExistingComposableTemplatesAreAddedImmediately() throws E }); } - public void testThatNonExistingComponentTemplatesAreAddedImmediately() throws Exception { - DiscoveryNode node = DiscoveryNodeUtils.create("node"); - DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); - - ClusterChangedEvent event = createClusterChangedEvent( - Collections.emptyMap(), - Collections.emptyMap(), - Collections.singletonMap(ConnectorTemplateRegistry.SEARCH_DEFAULT_PIPELINE_NAME, ConnectorTemplateRegistry.REGISTRY_VERSION), - Collections.emptyMap(), - nodes - ); - - AtomicInteger calledTimes = new AtomicInteger(0); - client.setVerifier((action, request, listener) -> verifyComponentTemplateInstalled(calledTimes, action, request, listener)); - registry.clusterChanged(event); - assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getComponentTemplateConfigs().size()))); - - calledTimes.set(0); - - // attempting to register the event multiple times as a race condition can yield this test flaky, namely: - // when calling registry.clusterChanged(newEvent) the templateCreationsInProgress state that the IndexTemplateRegistry maintains - // might've not yet been updated to reflect that the first template registration was complete, so a second template registration - // will not be issued anymore, leaving calledTimes to 0 - assertBusy(() -> { - // now delete all templates from the cluster state and let's retry - ClusterChangedEvent newEvent = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes); - registry.clusterChanged(newEvent); - assertThat(calledTimes.get(), greaterThan(4)); - }); - } - - public void testThatVersionedOldComponentTemplatesAreUpgraded() throws Exception { - DiscoveryNode node = DiscoveryNodeUtils.create("node"); - DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); - - ClusterChangedEvent event = createClusterChangedEvent( - Collections.emptyMap(), - Collections.singletonMap( - ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-settings", - ConnectorTemplateRegistry.REGISTRY_VERSION - 1 - ), - Collections.singletonMap(ConnectorTemplateRegistry.SEARCH_DEFAULT_PIPELINE_NAME, ConnectorTemplateRegistry.REGISTRY_VERSION), - Collections.emptyMap(), - nodes - ); - AtomicInteger calledTimes = new AtomicInteger(0); - client.setVerifier((action, request, listener) -> verifyComponentTemplateInstalled(calledTimes, action, request, listener)); - registry.clusterChanged(event); - assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getComponentTemplateConfigs().size()))); - } - - public void testThatUnversionedOldComponentTemplatesAreUpgraded() throws Exception { - DiscoveryNode node = DiscoveryNodeUtils.create("node"); - DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); - - ClusterChangedEvent event = createClusterChangedEvent( - Collections.emptyMap(), - Collections.singletonMap(ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-mappings", null), - Collections.singletonMap(ConnectorTemplateRegistry.SEARCH_DEFAULT_PIPELINE_NAME, ConnectorTemplateRegistry.REGISTRY_VERSION), - Collections.emptyMap(), - nodes - ); - AtomicInteger calledTimes = new AtomicInteger(0); - client.setVerifier((action, request, listener) -> verifyComponentTemplateInstalled(calledTimes, action, request, listener)); - registry.clusterChanged(event); - assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getComponentTemplateConfigs().size()))); - } - - public void testSameOrHigherVersionComponentTemplateNotUpgraded() { - DiscoveryNode node = DiscoveryNodeUtils.create("node"); - DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); - - Map versions = new HashMap<>(); - versions.put(ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-mappings", ConnectorTemplateRegistry.REGISTRY_VERSION); - versions.put(ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-settings", ConnectorTemplateRegistry.REGISTRY_VERSION); - versions.put(ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-mappings", ConnectorTemplateRegistry.REGISTRY_VERSION); - versions.put(ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-settings", ConnectorTemplateRegistry.REGISTRY_VERSION); - versions.put(ConnectorTemplateRegistry.ACCESS_CONTROL_TEMPLATE_NAME, ConnectorTemplateRegistry.REGISTRY_VERSION); - ClusterChangedEvent sameVersionEvent = createClusterChangedEvent(Collections.emptyMap(), versions, nodes); - client.setVerifier((action, request, listener) -> { - if (action == PutPipelineTransportAction.TYPE) { - // Ignore this, it's verified in another test - return AcknowledgedResponse.TRUE; - } - if (action instanceof PutComponentTemplateAction) { - fail("template should not have been re-installed"); - return null; - } else if (action == ILMActions.PUT) { - // Ignore this, it's verified in another test - return AcknowledgedResponse.TRUE; - } else if (action == TransportPutComposableIndexTemplateAction.TYPE) { - // Ignore this, it's verified in another test - return AcknowledgedResponse.TRUE; - } else { - fail("client called with unexpected request:" + request.toString()); - return null; - } - }); - registry.clusterChanged(sameVersionEvent); - - versions.clear(); - versions.put( - ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-mappings", - ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) - ); - versions.put( - ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-settings", - ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) - ); - versions.put( - ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-mappings", - ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) - ); - versions.put( - ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-settings", - ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) - ); - versions.put( - ConnectorTemplateRegistry.ACCESS_CONTROL_TEMPLATE_NAME, - ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) - ); - ClusterChangedEvent higherVersionEvent = createClusterChangedEvent(Collections.emptyMap(), versions, nodes); - registry.clusterChanged(higherVersionEvent); - } - public void testThatMissingMasterNodeDoesNothing() { DiscoveryNode localNode = DiscoveryNodeUtils.create("node"); DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").add(localNode).build(); @@ -260,7 +125,7 @@ public void testThatMissingMasterNodeDoesNothing() { }); ClusterChangedEvent event = createClusterChangedEvent( - Collections.singletonMap(ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME, null), + Collections.singletonMap(ConnectorTemplateRegistry.ACCESS_CONTROL_TEMPLATE_NAME, null), Collections.emptyMap(), nodes ); @@ -357,10 +222,7 @@ private ActionResponse verifyComposableTemplateInstalled( assertThat(putRequest.indexTemplate().version(), equalTo((long) ConnectorTemplateRegistry.REGISTRY_VERSION)); final List indexPatterns = putRequest.indexTemplate().indexPatterns(); assertThat(indexPatterns, hasSize(1)); - assertThat( - indexPatterns, - contains(oneOf(ACCESS_CONTROL_INDEX_NAME_PATTERN, CONNECTOR_INDEX_NAME_PATTERN, CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN)) - ); + assertThat(indexPatterns, contains(ACCESS_CONTROL_INDEX_NAME_PATTERN)); assertNotNull(listener); return new TestPutIndexTemplateResponse(true); } else { diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index c563bc0a14ee3..3f2f47e190882 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.application.connector; -import org.elasticsearch.client.internal.IndicesAdminClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.XContentType; @@ -27,7 +26,6 @@ import org.elasticsearch.xpack.application.connector.filtering.FilteringValidation; import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationInfo; import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationState; -import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobType; import org.elasticsearch.xpack.core.scheduler.Cron; @@ -47,55 +45,14 @@ import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomInt; import static org.elasticsearch.test.ESTestCase.randomList; -import static org.elasticsearch.test.ESTestCase.randomLong; import static org.elasticsearch.test.ESTestCase.randomLongBetween; -import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTOR_INDEX_NAME_PATTERN; -import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN; -import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME; -import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME; +import static org.elasticsearch.test.ESTestCase.randomNonNegativeLong; +import static org.elasticsearch.test.ESTestCase.randomShort; public final class ConnectorTestUtils { public static final String NULL_STRING = null; - /** - * Registers index templates for instances of {@link Connector} and {@link ConnectorSyncJob} with essential field mappings. This method - * only includes mappings for fields relevant to test cases, specifying field types to ensure correct ES query logic behavior. - * - * @param indicesAdminClient The Elasticsearch indices admin client used for template registration. - */ - - public static void registerSimplifiedConnectorIndexTemplates(IndicesAdminClient indicesAdminClient) { - - indicesAdminClient.preparePutTemplate(CONNECTOR_TEMPLATE_NAME) - .setPatterns(List.of(CONNECTOR_INDEX_NAME_PATTERN)) - .setVersion(0) - .setMapping( - "service_type", - "type=keyword,store=true", - "status", - "type=keyword,store=true", - "index_name", - "type=keyword,store=true", - "configuration", - "type=object" - ) - .get(); - - indicesAdminClient.preparePutTemplate(CONNECTOR_SYNC_JOBS_TEMPLATE_NAME) - .setPatterns(List.of(CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN)) - .setVersion(0) - .setMapping( - "job_type", - "type=keyword,store=true", - "connector.id", - "type=keyword,store=true", - "status", - "type=keyword,store=true" - ) - .get(); - } - public static PutConnectorAction.Request getRandomPutConnectorActionRequest() { return new PutConnectorAction.Request( randomAlphaOfLengthBetween(5, 15), @@ -144,9 +101,9 @@ public static ConnectorSyncInfo getRandomConnectorSyncInfo() { return new ConnectorSyncInfo.Builder().setLastAccessControlSyncError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setLastAccessControlSyncScheduledAt(randomFrom(new Instant[] { null, ConnectorTestUtils.randomInstant() })) .setLastAccessControlSyncStatus(randomFrom(new ConnectorSyncStatus[] { null, getRandomSyncStatus() })) - .setLastDeletedDocumentCount(randomLong()) + .setLastDeletedDocumentCount(randomNonNegativeLong()) .setLastIncrementalSyncScheduledAt(randomFrom(new Instant[] { null, ConnectorTestUtils.randomInstant() })) - .setLastIndexedDocumentCount(randomLong()) + .setLastIndexedDocumentCount(randomNonNegativeLong()) .setLastSyncError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setLastSyncScheduledAt(randomFrom(new Instant[] { null, ConnectorTestUtils.randomInstant() })) .setLastSyncStatus(randomFrom(new ConnectorSyncStatus[] { null, getRandomSyncStatus() })) @@ -197,7 +154,7 @@ private static FilteringValidation getRandomFilteringValidationError() { public static ConnectorFiltering getRandomConnectorFiltering() { Instant currentTimestamp = Instant.now(); - int order = randomInt(); + int order = randomShort(); return new ConnectorFiltering.Builder().setActive( new FilteringRules.Builder().setAdvancedSnippet( diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index f6c0a54f107b4..fe6d97a871e0c 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -20,8 +20,11 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -58,7 +61,6 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.ACCESS_CONTROL_INDEX_PREFIX; -import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.registerSimplifiedConnectorIndexTemplates; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -86,14 +88,12 @@ protected Collection> getPlugins() { List> plugins = new ArrayList<>(super.getPlugins()); // Reindex plugin is required for testDeleteAllSyncJobsByConnectorId (supports delete_by_query) plugins.add(ReindexPlugin.class); + plugins.add(TestPlugin.class); return plugins; } @Before public void setup() throws Exception { - - registerSimplifiedConnectorIndexTemplates(indicesAdmin()); - connectorOneId = createConnector(ConnectorTestUtils.getRandomConnector()); connectorTwoId = createConnector(ConnectorTestUtils.getRandomConnector()); connectorThreeId = createConnector(ConnectorTestUtils.getRandomConnectorWithDetachedIndex()); @@ -805,18 +805,18 @@ public void testUpdateConnectorSyncJobIngestionStats() throws Exception { Instant requestLastSeen = request.getLastSeen(); Map metadata = request.getMetadata(); - Long deletedDocumentCountAfterUpdate = (Long) syncJobSourceAfterUpdate.get( + Long deletedDocumentCountAfterUpdate = ((Number) syncJobSourceAfterUpdate.get( ConnectorSyncJob.DELETED_DOCUMENT_COUNT_FIELD.getPreferredName() - ); - Long indexedDocumentCountAfterUpdate = (Long) syncJobSourceAfterUpdate.get( + )).longValue(); + Long indexedDocumentCountAfterUpdate = ((Number) syncJobSourceAfterUpdate.get( ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName() - ); - Long indexedDocumentVolumeAfterUpdate = (Long) syncJobSourceAfterUpdate.get( + )).longValue(); + Long indexedDocumentVolumeAfterUpdate = ((Number) syncJobSourceAfterUpdate.get( ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName() - ); - Long totalDocumentCountAfterUpdate = (Long) syncJobSourceAfterUpdate.get( + )).longValue(); + Long totalDocumentCountAfterUpdate = ((Number) syncJobSourceAfterUpdate.get( ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName() - ); + )).longValue(); Instant lastSeenAfterUpdate = Instant.parse( (String) syncJobSourceAfterUpdate.get(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName()) ); @@ -1411,4 +1411,24 @@ private String updateConnectorSyncJobStatusWithoutStateMachineGuard(String syncJ // wait 10 seconds for connector creation return index.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).getId(); } + + /** + * Test plugin to register the {@link ConnectorSyncJobIndexService} system index descriptor. + */ + public static class TestPlugin extends Plugin implements SystemIndexPlugin { + @Override + public Collection getSystemIndexDescriptors(Settings settings) { + return List.of(ConnectorSyncJobIndexService.getSystemIndexDescriptor()); + } + + @Override + public String getFeatureName() { + return this.getClass().getSimpleName(); + } + + @Override + public String getFeatureDescription() { + return this.getClass().getCanonicalName(); + } + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index e72bf04fb7e55..1e6426051e04b 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -36,7 +36,7 @@ import static org.elasticsearch.test.ESTestCase.randomInt; import static org.elasticsearch.test.ESTestCase.randomLong; import static org.elasticsearch.test.ESTestCase.randomMap; -import static org.elasticsearch.test.ESTestCase.randomNonNegativeLong; +import static org.elasticsearch.test.ESTestCase.randomNonNegativeInt; public class ConnectorSyncJobTestUtils { @@ -51,11 +51,11 @@ public static ConnectorSyncJob getRandomConnectorSyncJob() { .setCompletedAt(randomFrom(new Instant[] { null, randomInstantBetween(lowerBoundInstant, upperBoundInstant) })) .setConnector(ConnectorTestUtils.getRandomSyncJobConnectorInfo()) .setCreatedAt(randomInstantBetween(lowerBoundInstant, upperBoundInstant)) - .setDeletedDocumentCount(randomLong()) + .setDeletedDocumentCount(randomNonNegativeInt()) .setError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setId(randomAlphaOfLength(10)) - .setIndexedDocumentCount(randomLong()) - .setIndexedDocumentVolume(randomLong()) + .setIndexedDocumentCount(randomNonNegativeInt()) + .setIndexedDocumentVolume(randomNonNegativeInt()) .setJobType(getRandomConnectorJobType()) .setLastSeen(randomFrom(new Instant[] { null, randomInstantBetween(lowerBoundInstant, upperBoundInstant) })) .setMetadata( @@ -67,7 +67,7 @@ public static ConnectorSyncJob getRandomConnectorSyncJob() { ) .setStartedAt(randomFrom(new Instant[] { null, randomInstantBetween(lowerBoundInstant, upperBoundInstant) })) .setStatus(ConnectorTestUtils.getRandomSyncStatus()) - .setTotalDocumentCount(randomLong()) + .setTotalDocumentCount(randomNonNegativeInt()) .setTriggerMethod(getRandomConnectorSyncJobTriggerMethod()) .setWorkerHostname(randomAlphaOfLength(10)) .build(); @@ -156,10 +156,10 @@ public static UpdateConnectorSyncJobIngestionStatsAction.Request getRandomUpdate return new UpdateConnectorSyncJobIngestionStatsAction.Request( randomAlphaOfLength(10), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), + (long) randomNonNegativeInt(), + (long) randomNonNegativeInt(), + (long) randomNonNegativeInt(), + (long) randomNonNegativeInt(), randomInstantBetween(lowerBoundInstant, upperBoundInstant), randomMap(2, 3, () -> new Tuple<>(randomAlphaOfLength(4), randomAlphaOfLength(4))) ); @@ -173,10 +173,10 @@ public static UpdateConnectorSyncJobIngestionStatsAction.Request getRandomUpdate return new UpdateConnectorSyncJobIngestionStatsAction.Request( syncJobId, - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), + (long) randomNonNegativeInt(), + (long) randomNonNegativeInt(), + (long) randomNonNegativeInt(), + (long) randomNonNegativeInt(), randomInstantBetween(lowerBoundInstant, upperBoundInstant), randomMap(2, 3, () -> new Tuple<>(randomAlphaOfLength(4), randomAlphaOfLength(4))) ); From 39c4eda6ce362ba75363f83523d61e242c45a2dc Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Fri, 24 Jan 2025 16:31:30 +0400 Subject: [PATCH 009/383] Add back keep_alive to async_search.submit rest-api-spec (#120781) --- docs/changelog/120781.yaml | 5 +++++ .../resources/rest-api-spec/api/async_search.submit.json | 5 +++++ 2 files changed, 10 insertions(+) create mode 100644 docs/changelog/120781.yaml diff --git a/docs/changelog/120781.yaml b/docs/changelog/120781.yaml new file mode 100644 index 0000000000000..67c7d90528d6e --- /dev/null +++ b/docs/changelog/120781.yaml @@ -0,0 +1,5 @@ +pr: 120781 +summary: Add back `keep_alive` to `async_search.submit` rest-api-spec +area: Search +type: bug +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json b/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json index 3de0dec85f547..8ae2fff22281c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json @@ -43,6 +43,11 @@ "description":"Control whether the response should be stored in the cluster if it completed within the provided [wait_for_completion] time (default: false)", "default":false }, + "keep_alive": { + "type": "time", + "description": "Update the time interval in which the results (partial or final) for this search will be available", + "default": "5d" + }, "batched_reduce_size":{ "type":"number", "description":"The number of shard results that should be reduced at once on the coordinating node. This value should be used as the granularity at which progress results will be made available.", From 32eada688f7de63c75a22c4b9f6464aab75c01b7 Mon Sep 17 00:00:00 2001 From: Alexey Ivanov Date: Fri, 24 Jan 2025 12:41:00 +0000 Subject: [PATCH 010/383] Introduce minimumCompatibilityVersion to BuildVersion (ES-9378) (#119101) --- .../org/elasticsearch/ReleaseVersions.java | 5 +-- .../main/java/org/elasticsearch/Version.java | 20 ++++----- .../elasticsearch/cluster/ClusterState.java | 7 ---- .../org/elasticsearch/env/BuildVersion.java | 6 +++ .../env/DefaultBuildVersion.java | 5 +++ .../elasticsearch/env/NodeEnvironment.java | 29 ++++--------- .../org/elasticsearch/env/NodeMetadata.java | 4 +- .../elasticsearch/env/BuildVersionTests.java | 10 +++++ .../env/NodeEnvironmentTests.java | 41 ++++++++++++------- .../elasticsearch/env/NodeMetadataTests.java | 28 ++++++------- .../index/IndexVersionTests.java | 1 + .../index/mapper/MapperTestCase.java | 2 - 12 files changed, 81 insertions(+), 77 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/ReleaseVersions.java b/server/src/main/java/org/elasticsearch/ReleaseVersions.java index 09d6bdd1b4799..22cd18c7b4ac3 100644 --- a/server/src/main/java/org/elasticsearch/ReleaseVersions.java +++ b/server/src/main/java/org/elasticsearch/ReleaseVersions.java @@ -10,7 +10,7 @@ package org.elasticsearch; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.internal.BuildExtension; import org.elasticsearch.plugins.ExtensionLoader; @@ -114,8 +114,7 @@ private static IntFunction lookupFunction(NavigableMap, ToXContentFragment { VERSION_STRINGS = Map.copyOf(builderByString); } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - // Re-enable this assertion once the rest api version is bumped private static void assertRestApiVersion() { - // assert RestApiVersion.current().major == CURRENT.major && RestApiVersion.previous().major == CURRENT.major - 1 - // : "RestApiVersion must be upgraded " - // + "to reflect major from Version.CURRENT [" - // + CURRENT.major - // + "]" - // + " but is still set to [" - // + RestApiVersion.current().major - // + "]"; + assert RestApiVersion.current().major == CURRENT.major && RestApiVersion.previous().major == CURRENT.major - 1 + : "RestApiVersion must be upgraded " + + "to reflect major from Version.CURRENT [" + + CURRENT.major + + "]" + + " but is still set to [" + + RestApiVersion.current().major + + "]"; } public static Version readVersion(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index 62f2947d06a41..6b222fb8f5bdc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -47,7 +47,6 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.shard.IndexLongFieldRange; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.xcontent.ToXContent; @@ -1039,12 +1038,6 @@ public static ClusterState readFrom(StreamInput in, DiscoveryNode localNode) thr return builder.build(); } - /** - * If the cluster state does not contain transport version information, this is the version - * that is inferred for all nodes on version 8.8.0 or above. - */ - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final TransportVersion INFERRED_TRANSPORT_VERSION = TransportVersions.V_8_8_0; public static final Version VERSION_INTRODUCING_TRANSPORT_VERSIONS = Version.V_8_8_0; @Override diff --git a/server/src/main/java/org/elasticsearch/env/BuildVersion.java b/server/src/main/java/org/elasticsearch/env/BuildVersion.java index 5c3602283fef3..877a07d9a3ee5 100644 --- a/server/src/main/java/org/elasticsearch/env/BuildVersion.java +++ b/server/src/main/java/org/elasticsearch/env/BuildVersion.java @@ -73,6 +73,12 @@ public abstract class BuildVersion implements ToXContentFragment, Writeable { */ public abstract String toNodeMetadata(); + /** + * Returns the minimum compatible build version based on the current version. + * Ie a node needs to have at least the return version in order to communicate with a node running the current version. + */ + public abstract BuildVersion minimumCompatibilityVersion(); + /** * Create a {@link BuildVersion} from a version ID number. * diff --git a/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java b/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java index 913a352debfb8..39b9278a78c97 100644 --- a/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java +++ b/server/src/main/java/org/elasticsearch/env/DefaultBuildVersion.java @@ -73,6 +73,11 @@ public String toNodeMetadata() { return Integer.toString(version.id()); } + @Override + public BuildVersion minimumCompatibilityVersion() { + return fromVersionId(version.minimumCompatibilityVersion().id); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return builder.value(version.id()); diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index afadb8f5b3011..90e2ae5c62703 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -86,8 +86,6 @@ import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -525,7 +523,7 @@ static void checkForIndexCompatibility(Logger logger, DataPath... dataPaths) thr logger.info("oldest index version recorded in NodeMetadata {}", metadata.oldestIndexVersion()); if (metadata.oldestIndexVersion().before(IndexVersions.MINIMUM_COMPATIBLE)) { - String bestDowngradeVersion = getBestDowngradeVersion(metadata.previousNodeVersion().toString()); + BuildVersion bestDowngradeVersion = getBestDowngradeVersion(metadata.previousNodeVersion()); throw new IllegalStateException( "Cannot start this node because it holds metadata for indices with version [" + metadata.oldestIndexVersion().toReleaseVersion() @@ -1504,28 +1502,17 @@ private static void tryWriteTempFile(Path path) throws IOException { /** * Get a useful version string to direct a user's downgrade operation * - *

If a user is trying to install 8.0 but has incompatible indices, the user should - * downgrade to 7.17.x. We return 7.17.0, unless the user is trying to upgrade from - * a 7.17.x release, in which case we return the last installed version. + *

If a user is trying to install current major N but has incompatible indices, the user should + * downgrade to last minor of the previous major (N-1).last. We return (N-1).last, unless the user is trying to upgrade from + * a (N-1).last.x release, in which case we return the last installed version. * @return Version to downgrade to */ // visible for testing - static String getBestDowngradeVersion(String previousNodeVersion) { - // this method should only be called in the context of an upgrade to 8.x - assert Build.current().version().startsWith("9.") == false; - Pattern pattern = Pattern.compile("^7\\.(\\d+)\\.\\d+$"); - Matcher matcher = pattern.matcher(previousNodeVersion); - if (matcher.matches()) { - try { - int minorVersion = Integer.parseInt(matcher.group(1)); - if (minorVersion >= 17) { - return previousNodeVersion; - } - } catch (NumberFormatException e) { - // continue and return default - } + static BuildVersion getBestDowngradeVersion(BuildVersion previousNodeVersion) { + if (previousNodeVersion.onOrAfterMinimumCompatible()) { + return previousNodeVersion; } - return "7.17.0"; + return BuildVersion.current().minimumCompatibilityVersion(); } } diff --git a/server/src/main/java/org/elasticsearch/env/NodeMetadata.java b/server/src/main/java/org/elasticsearch/env/NodeMetadata.java index c71a3798be1f7..48268b5001f3e 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeMetadata.java +++ b/server/src/main/java/org/elasticsearch/env/NodeMetadata.java @@ -10,7 +10,6 @@ package org.elasticsearch.env; import org.elasticsearch.Build; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.gateway.MetadataStateFormat; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -158,12 +157,11 @@ public void setOldestIndexVersion(int oldestIndexVersion) { this.oldestIndexVersion = IndexVersion.fromId(oldestIndexVersion); } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // version is required in the node metadata from v9 onwards public NodeMetadata build() { final IndexVersion oldestIndexVersion; if (this.nodeVersion == null) { - nodeVersion = BuildVersion.fromVersionId(0); + throw new IllegalStateException("Node version is required in node metadata"); } if (this.previousNodeVersion == null) { previousNodeVersion = nodeVersion; diff --git a/server/src/test/java/org/elasticsearch/env/BuildVersionTests.java b/server/src/test/java/org/elasticsearch/env/BuildVersionTests.java index 9fd889426fd2d..96875ac1a95e5 100644 --- a/server/src/test/java/org/elasticsearch/env/BuildVersionTests.java +++ b/server/src/test/java/org/elasticsearch/env/BuildVersionTests.java @@ -43,6 +43,16 @@ public void testIsFutureVersion() { assertTrue(futureVersion.isFutureVersion()); } + public void testMinimumCompatibilityVersion() { + BuildVersion minCompatible = BuildVersion.fromVersionId(Version.CURRENT.minimumCompatibilityVersion().id()); + assertThat(BuildVersion.current().minimumCompatibilityVersion(), equalTo(minCompatible)); + + BuildVersion previousCompatible = BuildVersion.fromVersionId( + Version.CURRENT.minimumCompatibilityVersion().minimumCompatibilityVersion().id() + ); + assertThat(minCompatible.minimumCompatibilityVersion(), equalTo(previousCompatible)); + } + public static BuildVersion increment(BuildVersion version) { return BuildVersion.fromVersionId(((DefaultBuildVersion) version).version.id() + 1); } diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index 42a94ebf8c6ff..82934bda2d259 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -584,7 +584,9 @@ public void testIndexCompatibilityChecks() throws IOException { containsString("it holds metadata for indices with version [" + oldIndexVersion.toReleaseVersion() + "]"), containsString( "Revert this node to version [" - + (previousNodeVersion.major == Version.V_8_0_0.major ? Version.V_7_17_0 : previousNodeVersion) + + (previousNodeVersion.major == Version.CURRENT.major + ? Version.CURRENT.minimumCompatibilityVersion() + : previousNodeVersion) + "]" ) ) @@ -638,20 +640,31 @@ public void testSymlinkDataDirectory() throws Exception { env.close(); } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - @AwaitsFix(bugUrl = "test won't work until we remove and bump minimum index versions") public void testGetBestDowngradeVersion() { - assertThat(NodeEnvironment.getBestDowngradeVersion("7.17.0"), Matchers.equalTo("7.17.0")); - assertThat(NodeEnvironment.getBestDowngradeVersion("7.17.5"), Matchers.equalTo("7.17.5")); - assertThat(NodeEnvironment.getBestDowngradeVersion("7.17.1234"), Matchers.equalTo("7.17.1234")); - assertThat(NodeEnvironment.getBestDowngradeVersion("7.18.0"), Matchers.equalTo("7.18.0")); - assertThat(NodeEnvironment.getBestDowngradeVersion("7.17.x"), Matchers.equalTo("7.17.0")); - assertThat(NodeEnvironment.getBestDowngradeVersion("7.17.5-SNAPSHOT"), Matchers.equalTo("7.17.0")); - assertThat(NodeEnvironment.getBestDowngradeVersion("7.17.6b"), Matchers.equalTo("7.17.0")); - assertThat(NodeEnvironment.getBestDowngradeVersion("7.16.0"), Matchers.equalTo("7.17.0")); - // when we get to version 7.2147483648.0 we will have to rethink our approach, but for now we return 7.17.0 with an integer overflow - assertThat(NodeEnvironment.getBestDowngradeVersion("7." + Integer.MAX_VALUE + "0.0"), Matchers.equalTo("7.17.0")); - assertThat(NodeEnvironment.getBestDowngradeVersion("foo"), Matchers.equalTo("7.17.0")); + assertThat( + NodeEnvironment.getBestDowngradeVersion(BuildVersion.fromString("8.18.0")), + Matchers.equalTo(BuildVersion.fromString("8.18.0")) + ); + assertThat( + NodeEnvironment.getBestDowngradeVersion(BuildVersion.fromString("8.18.5")), + Matchers.equalTo(BuildVersion.fromString("8.18.5")) + ); + assertThat( + NodeEnvironment.getBestDowngradeVersion(BuildVersion.fromString("8.18.12")), + Matchers.equalTo(BuildVersion.fromString("8.18.12")) + ); + assertThat( + NodeEnvironment.getBestDowngradeVersion(BuildVersion.fromString("8.19.0")), + Matchers.equalTo(BuildVersion.fromString("8.19.0")) + ); + assertThat( + NodeEnvironment.getBestDowngradeVersion(BuildVersion.fromString("8.17.0")), + Matchers.equalTo(BuildVersion.fromString("8.18.0")) + ); + assertThat( + NodeEnvironment.getBestDowngradeVersion(BuildVersion.fromString("7.17.0")), + Matchers.equalTo(BuildVersion.fromString("8.18.0")) + ); } private void verifyFailsOnShardData(Settings settings, Path indexPath, String shardDataDirName) { diff --git a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java index eccdd1c6ffea7..dab2932369c21 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java @@ -9,12 +9,11 @@ package org.elasticsearch.env; import org.elasticsearch.Build; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.core.Tuple; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.gateway.MetadataStateFormat; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; import org.elasticsearch.test.VersionUtils; @@ -28,6 +27,7 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; @@ -80,22 +80,20 @@ public void testEqualsHashcodeSerialization() { ); } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - @AwaitsFix(bugUrl = "as mentioned in the comment below, the behavior here is changing for 9.0 so this test needs updating") - public void testReadsFormatWithoutVersion() throws IOException { - // the behaviour tested here is only appropriate if the current version is compatible with versions 7 and earlier - assertTrue(IndexVersions.MINIMUM_COMPATIBLE.onOrBefore(IndexVersions.V_7_0_0)); - // when the current version is incompatible with version 7, the behaviour should change to reject files like the given resource - // which do not have the version field - + public void testFailsToReadFormatWithoutVersion() throws IOException { final Path tempDir = createTempDir(); final Path stateDir = Files.createDirectory(tempDir.resolve(MetadataStateFormat.STATE_DIR_NAME)); final InputStream resource = this.getClass().getResourceAsStream("testReadsFormatWithoutVersion.binary"); assertThat(resource, notNullValue()); Files.copy(resource, stateDir.resolve(NodeMetadata.FORMAT.getStateFileName(between(0, Integer.MAX_VALUE)))); - final NodeMetadata nodeMetadata = NodeMetadata.FORMAT.loadLatestState(logger, xContentRegistry(), tempDir); - assertThat(nodeMetadata.nodeId(), equalTo("y6VUVMSaStO4Tz-B5BxcOw")); - assertThat(nodeMetadata.nodeVersion(), equalTo(BuildVersion.fromVersionId(0))); + + ElasticsearchException ex = expectThrows( + ElasticsearchException.class, + () -> NodeMetadata.FORMAT.loadLatestState(logger, xContentRegistry(), tempDir) + ); + Throwable rootCause = ex.getRootCause(); + assertThat(rootCause, instanceOf(IllegalStateException.class)); + assertThat("Node version is required in node metadata", equalTo(rootCause.getMessage())); } public void testUpgradesLegitimateVersions() { @@ -155,11 +153,9 @@ public void testDoesNotUpgradeAncientVersion() { ); } - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - @AwaitsFix(bugUrl = "Needs to be updated for 9.0 version bump") public void testUpgradeMarksPreviousVersion() { final String nodeId = randomAlphaOfLength(10); - final Version version = VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.V_8_0_0); + final Version version = VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.V_9_0_0); final BuildVersion buildVersion = BuildVersion.fromVersionId(version.id()); final NodeMetadata nodeMetadata = new NodeMetadata(nodeId, buildVersion, IndexVersion.current()).upgradeToCurrentVersion(); diff --git a/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java b/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java index 8575b87c36799..f6ebd33aae9dd 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java @@ -153,6 +153,7 @@ public void testMax() { public void testGetMinimumCompatibleIndexVersion() { assertThat(IndexVersion.getMinimumCompatibleIndexVersion(7170099), equalTo(IndexVersion.fromId(6000099))); assertThat(IndexVersion.getMinimumCompatibleIndexVersion(8000099), equalTo(IndexVersion.fromId(7000099))); + assertThat(IndexVersion.getMinimumCompatibleIndexVersion(9000099), equalTo(IndexVersion.fromId(8000099))); assertThat(IndexVersion.getMinimumCompatibleIndexVersion(10000000), equalTo(IndexVersion.fromId(9000000))); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index a62af5729a096..809660c5e9af8 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -105,8 +105,6 @@ */ public abstract class MapperTestCase extends MapperServiceTestCase { - public static final IndexVersion DEPRECATED_BOOST_INDEX_VERSION = IndexVersions.V_7_10_0; - protected abstract void minimalMapping(XContentBuilder b) throws IOException; /** From 2fea5939cfa4ed5a945a08ea5cf334fa72c6cccb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Fri, 24 Jan 2025 14:45:35 +0100 Subject: [PATCH 011/383] [Profiling] Rename profiling.host.name to host.name in profiling-hosts (#120783) --- .../profiling/component-template/profiling-hosts.json | 6 +++--- .../resources/data/profiling-hosts.ndjson | 4 ++-- .../resources/rest-api-spec/test/profiling/10_basic.yml | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json index b0c99bf8a0cea..e23ae42ce1f4d 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json @@ -40,6 +40,9 @@ "id": { "type": "keyword" }, + "name": { + "type": "keyword" + }, "type": { "type": "keyword" } @@ -177,9 +180,6 @@ "tags": { "type": "keyword" }, - "name": { - "type": "keyword" - }, "machine": { "type": "keyword" }, diff --git a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson index e12a670a79d18..8cd5425c86994 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson +++ b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson @@ -1,4 +1,4 @@ {"create": {"_index": "profiling-hosts","_id":"eLH27YsBj2lLi3tJYlvr"}} -{"profiling.project.id":100,"host.id":"8457605156473051743","@timestamp":1700504426,"ecs.version":"1.12.0","profiling.agent.build_timestamp":1688111067,"profiling.instance.private_ipv4s":["192.168.1.2"],"ec2.instance_life_cycle":"on-demand","profiling.agent.config.map_scale_factor":0,"host.type":"i3.2xlarge","profiling.host.ip":"192.168.1.2","profiling.agent.config.bpf_log_level":0,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.file":"/etc/prodfiler/prodfiler.conf","ec2.local_ipv4":"192.168.1.2","profiling.agent.config.no_kernel_version_check":false,"host.arch":"amd64","profiling.host.tags":["cloud_provider:aws","cloud_environment:qa","cloud_region:eu-west-1"],"profiling.agent.config.probabilistic_threshold":100,"profiling.agent.config.disable_tls":false,"profiling.agent.config.tracers":"all","profiling.agent.start_time":1700090045589,"profiling.agent.config.max_elements_per_interval":800,"cloud.provider":"aws","cloud.region":"eu-west-1","profiling.agent.config.present_cpu_cores":8,"profiling.host.kernel_version":"9.9.9-0-aws","profiling.agent.config.bpf_log_size":65536,"profiling.agent.config.known_traces_entries":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.verbose":false,"profiling.agent.config.probabilistic_interval":"1m0s","ec2.placement.availability_zone_id":"euw1-az1","ec2.security_groups":"","ec2.local_hostname":"ip-192-168-1-2.eu-west-1.compute.internal","ec2.placement.availability_zone":"eu-west-1c","profiling.agent.config.upload_symbols":false,"profiling.host.sysctl.kernel.bpf_stats_enabled":0,"profiling.host.name":"ip-192-168-1-2","ec2.mac":"00:11:22:33:44:55","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-aws","profiling.agent.config.cache_directory":"/var/cache/optimyze/","profiling.agent.version":"v8.12.0","ec2.hostname":"ip-192-168-1-2.eu-west-1.compute.internal","profiling.agent.config.elastic_mode":false,"ec2.ami_id":"ami-aaaaaaaaaaa","ec2.instance_id":"i-0b999999999999999"} +{"profiling.project.id":100,"host.id":"8457605156473051743","@timestamp":1700504426,"ecs.version":"1.12.0","profiling.agent.build_timestamp":1688111067,"profiling.instance.private_ipv4s":["192.168.1.2"],"ec2.instance_life_cycle":"on-demand","profiling.agent.config.map_scale_factor":0,"host.type":"i3.2xlarge","profiling.host.ip":"192.168.1.2","profiling.agent.config.bpf_log_level":0,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.file":"/etc/prodfiler/prodfiler.conf","ec2.local_ipv4":"192.168.1.2","profiling.agent.config.no_kernel_version_check":false,"host.arch":"amd64","profiling.host.tags":["cloud_provider:aws","cloud_environment:qa","cloud_region:eu-west-1"],"profiling.agent.config.probabilistic_threshold":100,"profiling.agent.config.disable_tls":false,"profiling.agent.config.tracers":"all","profiling.agent.start_time":1700090045589,"profiling.agent.config.max_elements_per_interval":800,"cloud.provider":"aws","cloud.region":"eu-west-1","profiling.agent.config.present_cpu_cores":8,"profiling.host.kernel_version":"9.9.9-0-aws","profiling.agent.config.bpf_log_size":65536,"profiling.agent.config.known_traces_entries":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.verbose":false,"profiling.agent.config.probabilistic_interval":"1m0s","ec2.placement.availability_zone_id":"euw1-az1","ec2.security_groups":"","ec2.local_hostname":"ip-192-168-1-2.eu-west-1.compute.internal","ec2.placement.availability_zone":"eu-west-1c","profiling.agent.config.upload_symbols":false,"profiling.host.sysctl.kernel.bpf_stats_enabled":0,"host.name":"ip-192-168-1-2","ec2.mac":"00:11:22:33:44:55","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-aws","profiling.agent.config.cache_directory":"/var/cache/optimyze/","profiling.agent.version":"v8.12.0","ec2.hostname":"ip-192-168-1-2.eu-west-1.compute.internal","profiling.agent.config.elastic_mode":false,"ec2.ami_id":"ami-aaaaaaaaaaa","ec2.instance_id":"i-0b999999999999999"} {"create": {"_index": "profiling-hosts", "_id": "u_fHlYwBkmZvQ6tVo1Lr"}} -{"profiling.project.id":100,"host.id":"7416508186220657211","@timestamp":1703319912,"ecs.version":"1.12.0","profiling.agent.version":"8.11.0","profiling.agent.config.map_scale_factor":0,"profiling.agent.config.probabilistic_threshold":100,"profiling.host.name":"ip-192-186-1-3","profiling.agent.config.no_kernel_version_check":false,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.elastic_mode":false,"host.type":"Standard_D4s_v3","azure.compute.environment":"AzurePublicCloud","profiling.agent.config.bpf_log_level":0,"profiling.agent.config.known_traces_entries":65536,"profiling.agent.config.ca_address":"example.com:443","profiling.agent.config.tags":"cloud_provider:azure;cloud_environment:qa;cloud_region:eastus2","profiling.host.tags":["cloud_provider:azure","cloud_environment:qa","cloud_region:eastus2"],"profiling.host.kernel_version":"9.9.9-0-azure","profiling.agent.revision":"head-52cc2030","azure.compute.subscriptionid":"1-2-3-4-5","profiling.host.sysctl.kernel.bpf_stats_enabled":0,"host.arch":"amd64","azure.compute.zone":"3","profiling.agent.config.cache_directory":"/var/cache/Elastic/universal-profiling","azure.compute.name":"example-qa-eastus2-001-v1-zone3_6","profiling.agent.config.probabilistic_interval":"1m0s","cloud.provider":"azure","cloud.region":"eastus2","azure.compute.version":"1234.20230510.233254","profiling.instance.private_ipv4s":["192.168.1.3"],"profiling.agent.build_timestamp":1699000836,"profiling.agent.config.file":"/etc/Elastic/universal-profiling/pf-host-agent.conf","profiling.agent.config.bpf_log_size":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.tracers":"all","profiling.agent.config.present_cpu_cores":4,"profiling.agent.start_time":1702306987358,"profiling.agent.config.disable_tls":false,"azure.compute.ostype":"Linux","profiling.host.ip":"192.168.1.3","profiling.agent.config.max_elements_per_interval":400,"profiling.agent.config.upload_symbols":false,"azure.compute.tags":"bootstrap-version:v1;ece-id:001;environment:qa;identifier:v1;initial-config:;managed-by:terraform;monitored-by:core-infrastructure;owner:core-infrastructure;region_type:ess;role:blueprint;secondary_role:;vars-identifier:eastus2-001-v1","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-azure","profiling.agent.config.verbose":false,"azure.compute.vmid":"1-2-3-4-5"} +{"profiling.project.id":100,"host.id":"7416508186220657211","@timestamp":1703319912,"ecs.version":"1.12.0","profiling.agent.version":"8.11.0","profiling.agent.config.map_scale_factor":0,"profiling.agent.config.probabilistic_threshold":100,"host.name":"ip-192-186-1-3","profiling.agent.config.no_kernel_version_check":false,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.elastic_mode":false,"host.type":"Standard_D4s_v3","azure.compute.environment":"AzurePublicCloud","profiling.agent.config.bpf_log_level":0,"profiling.agent.config.known_traces_entries":65536,"profiling.agent.config.ca_address":"example.com:443","profiling.agent.config.tags":"cloud_provider:azure;cloud_environment:qa;cloud_region:eastus2","profiling.host.tags":["cloud_provider:azure","cloud_environment:qa","cloud_region:eastus2"],"profiling.host.kernel_version":"9.9.9-0-azure","profiling.agent.revision":"head-52cc2030","azure.compute.subscriptionid":"1-2-3-4-5","profiling.host.sysctl.kernel.bpf_stats_enabled":0,"host.arch":"amd64","azure.compute.zone":"3","profiling.agent.config.cache_directory":"/var/cache/Elastic/universal-profiling","azure.compute.name":"example-qa-eastus2-001-v1-zone3_6","profiling.agent.config.probabilistic_interval":"1m0s","cloud.provider":"azure","cloud.region":"eastus2","azure.compute.version":"1234.20230510.233254","profiling.instance.private_ipv4s":["192.168.1.3"],"profiling.agent.build_timestamp":1699000836,"profiling.agent.config.file":"/etc/Elastic/universal-profiling/pf-host-agent.conf","profiling.agent.config.bpf_log_size":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.tracers":"all","profiling.agent.config.present_cpu_cores":4,"profiling.agent.start_time":1702306987358,"profiling.agent.config.disable_tls":false,"azure.compute.ostype":"Linux","profiling.host.ip":"192.168.1.3","profiling.agent.config.max_elements_per_interval":400,"profiling.agent.config.upload_symbols":false,"azure.compute.tags":"bootstrap-version:v1;ece-id:001;environment:qa;identifier:v1;initial-config:;managed-by:terraform;monitored-by:core-infrastructure;owner:core-infrastructure;region_type:ess;role:blueprint;secondary_role:;vars-identifier:eastus2-001-v1","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-azure","profiling.agent.config.verbose":false,"azure.compute.vmid":"1-2-3-4-5"} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml index 4a9212b8a7158..7d93e7f6e08c7 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml @@ -116,7 +116,7 @@ setup: - {"create": {"_index": "profiling-executables", "_id": "lHp5_WAgpLy2alrUVab6HA"}} - {"@timestamp": "1698624000", "Executable": {"build": {"id": "c5f89ea1c68710d2a493bb604c343a92c4f8ddeb"}, "file": {"name": "vmlinux"}}, "Symbolization": {"next_time": "4852491791"}, "ecs": {"version": "1.12.0"}} - {"create": {"_index": "profiling-hosts", "_id": "eLH27YsBj2lLi3tJYlvr"}} - - {"profiling.project.id": 100, "host.id": "8457605156473051743", "@timestamp": 1700504426, "ecs.version": "1.12.0", "profiling.agent.build_timestamp": 1688111067, "profiling.instance.private_ipv4s": ["192.168.1.2"], "ec2.instance_life_cycle": "on-demand", "profiling.agent.config.map_scale_factor": 0, "host.type": "i3.2xlarge", "profiling.host.ip": "192.168.1.2", "profiling.agent.config.bpf_log_level": 0, "profiling.host.sysctl.net.core.bpf_jit_enable": 1, "profiling.agent.config.file": "/etc/prodfiler/prodfiler.conf", "ec2.local_ipv4": "192.168.1.2", "profiling.agent.config.no_kernel_version_check": false, "host.arch": "amd64", "profiling.host.tags": ["cloud_provider:aws", "cloud_environment:qa", "cloud_region:eu-west-1"], "profiling.agent.config.probabilistic_threshold": 100, "profiling.agent.config.disable_tls": false, "profiling.agent.config.tracers": "all", "profiling.agent.start_time": 1700090045589, "profiling.agent.config.max_elements_per_interval": 800, "cloud.provider": "aws", "cloud.region": "eu-west-1", "profiling.agent.config.present_cpu_cores": 8, "profiling.host.kernel_version": "9.9.9-0-aws", "profiling.agent.config.bpf_log_size": 65536, "profiling.agent.config.known_traces_entries": 65536, "profiling.host.sysctl.kernel.unprivileged_bpf_disabled": 1, "profiling.agent.config.verbose": false, "profiling.agent.config.probabilistic_interval": "1m0s", "ec2.placement.availability_zone_id": "euw1-az1", "ec2.security_groups": "", "ec2.local_hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "ec2.placement.availability_zone": "eu-west-1c", "profiling.agent.config.upload_symbols": false, "profiling.host.sysctl.kernel.bpf_stats_enabled": 0, "profiling.host.name": "ip-192-168-1-2", "ec2.mac": "00:11:22:33:44:55", "profiling.host.kernel_proc_version": "Linux version 9.9.9-0-aws", "profiling.agent.config.cache_directory": "/var/cache/optimyze/", "profiling.agent.version": "v8.12.0", "ec2.hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "profiling.agent.config.elastic_mode": false, "ec2.ami_id": "ami-aaaaaaaaaaa", "ec2.instance_id": "i-0b999999999999999" } + - {"profiling.project.id": 100, "host.id": "8457605156473051743", "@timestamp": 1700504426, "ecs.version": "1.12.0", "profiling.agent.build_timestamp": 1688111067, "profiling.instance.private_ipv4s": ["192.168.1.2"], "ec2.instance_life_cycle": "on-demand", "profiling.agent.config.map_scale_factor": 0, "host.type": "i3.2xlarge", "profiling.host.ip": "192.168.1.2", "profiling.agent.config.bpf_log_level": 0, "profiling.host.sysctl.net.core.bpf_jit_enable": 1, "profiling.agent.config.file": "/etc/prodfiler/prodfiler.conf", "ec2.local_ipv4": "192.168.1.2", "profiling.agent.config.no_kernel_version_check": false, "host.arch": "amd64", "profiling.host.tags": ["cloud_provider:aws", "cloud_environment:qa", "cloud_region:eu-west-1"], "profiling.agent.config.probabilistic_threshold": 100, "profiling.agent.config.disable_tls": false, "profiling.agent.config.tracers": "all", "profiling.agent.start_time": 1700090045589, "profiling.agent.config.max_elements_per_interval": 800, "cloud.provider": "aws", "cloud.region": "eu-west-1", "profiling.agent.config.present_cpu_cores": 8, "profiling.host.kernel_version": "9.9.9-0-aws", "profiling.agent.config.bpf_log_size": 65536, "profiling.agent.config.known_traces_entries": 65536, "profiling.host.sysctl.kernel.unprivileged_bpf_disabled": 1, "profiling.agent.config.verbose": false, "profiling.agent.config.probabilistic_interval": "1m0s", "ec2.placement.availability_zone_id": "euw1-az1", "ec2.security_groups": "", "ec2.local_hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "ec2.placement.availability_zone": "eu-west-1c", "profiling.agent.config.upload_symbols": false, "profiling.host.sysctl.kernel.bpf_stats_enabled": 0, "host.name": "ip-192-168-1-2", "ec2.mac": "00:11:22:33:44:55", "profiling.host.kernel_proc_version": "Linux version 9.9.9-0-aws", "profiling.agent.config.cache_directory": "/var/cache/optimyze/", "profiling.agent.version": "v8.12.0", "ec2.hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "profiling.agent.config.elastic_mode": false, "ec2.ami_id": "ami-aaaaaaaaaaa", "ec2.instance_id": "i-0b999999999999999" } - {"index": {"_index": "test-events"}} - {"@timestamp": "1700504427", "events": ["S07KmaoGhvNte78xwwRbZQ"]} --- From 66db8c7d4cafd057dc25fadecdab311ccec6180e Mon Sep 17 00:00:00 2001 From: Dimitris Rempapis Date: Fri, 24 Jan 2025 15:47:44 +0200 Subject: [PATCH 012/383] Test/107515 RestoreTemplateWithMatchOnlyTextMapperIT (#120392) Update the way of comparing stateMaps for equality --- docs/changelog/120392.yaml | 6 ++++++ .../datastreams/DataStreamsSnapshotsIT.java | 14 ++------------ .../org/elasticsearch/test/ESIntegTestCase.java | 9 ++++----- 3 files changed, 12 insertions(+), 17 deletions(-) create mode 100644 docs/changelog/120392.yaml diff --git a/docs/changelog/120392.yaml b/docs/changelog/120392.yaml new file mode 100644 index 0000000000000..69587b4d48241 --- /dev/null +++ b/docs/changelog/120392.yaml @@ -0,0 +1,6 @@ +pr: 120392 +summary: Test/107515 restore template with match only text mapper it fail +area: Search +type: bug +issues: + - 107515 diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java index e78d9b4f2b8cf..53d75454339a9 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java @@ -1137,11 +1137,8 @@ public void testPartialRestoreSnapshotThatIncludesDataStream() { /** * This test is a copy of the {@link #testPartialRestoreSnapshotThatIncludesDataStream()} the only difference - * is that one include the global state and one doesn't. In general this shouldn't matter that's why it used to be - * a random parameter of the test, but because of #107515 it fails when we include the global state. Keep them - * separate until this is fixed. + * is that one include the global state and one doesn't. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107515") public void testPartialRestoreSnapshotThatIncludesDataStreamWithGlobalState() { final String snapshot = "test-snapshot"; final String indexWithoutDataStream = "test-idx-no-ds"; @@ -1307,11 +1304,8 @@ public void testExcludeDSFromSnapshotWhenExcludingAnyOfItsIndices() { /** * This test is a copy of the {@link #testExcludeDSFromSnapshotWhenExcludingAnyOfItsIndices()} ()} the only difference - * is that one include the global state and one doesn't. In general this shouldn't matter that's why it used to be - * a random parameter of the test, but because of #107515 it fails when we include the global state. Keep them - * separate until this is fixed. + * is that one include the global state and one doesn't. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107515") public void testExcludeDSFromSnapshotWhenExcludingItsIndicesWithGlobalState() { final String snapshot = "test-snapshot"; final String indexWithoutDataStream = "test-idx-no-ds"; @@ -1477,10 +1471,6 @@ public void testWarningHeaderAbsentOnRestoreWithTemplates() throws Exception { } - /** - * This test is muted as it's awaiting the same fix as {@link #testPartialRestoreSnapshotThatIncludesDataStreamWithGlobalState()} - */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107515") public void testWarningHeaderOnRestoreTemplateFromSnapshot() throws Exception { String datastreamName = "ds"; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index bdfc0a693f7f4..bb259cb9b9788 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1243,7 +1243,6 @@ protected final void doEnsureClusterStateConsistency(NamedWriteableRegistry name namedWriteableRegistry ); Map masterStateMap = convertToMap(masterClusterState); - int masterClusterStateSize = ClusterState.Builder.toBytes(masterClusterState).length; String masterId = masterClusterState.nodes().getMasterNodeId(); for (SubscribableListener localStateListener : localStates) { localStateListener.andThenAccept(localClusterStateResponse -> { @@ -1255,7 +1254,6 @@ protected final void doEnsureClusterStateConsistency(NamedWriteableRegistry name namedWriteableRegistry ); final Map localStateMap = convertToMap(localClusterState); - final int localClusterStateSize = ClusterState.Builder.toBytes(localClusterState).length; // Check that the non-master node has the same version of the cluster state as the master and // that the master node matches the master (otherwise there is no requirement for the cluster state to // match) @@ -1267,9 +1265,10 @@ protected final void doEnsureClusterStateConsistency(NamedWriteableRegistry name masterClusterState.stateUUID(), localClusterState.stateUUID() ); - // We cannot compare serialization bytes since serialization order of maps is not guaranteed - // but we can compare serialization sizes - they should be the same - assertEquals("cluster state size does not match", masterClusterStateSize, localClusterStateSize); + + // Compare the stateMaps for equality. + assertNull(XContentTestUtils.differenceBetweenMapsIgnoringArrayOrder(masterStateMap, localStateMap)); + // Compare JSON serialization assertNull( "cluster state JSON serialization does not match", From 856a4d7cebc4330e5e27225597275f3d9738bfed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Fri, 24 Jan 2025 14:51:02 +0100 Subject: [PATCH 013/383] LTR - Fix explain failure when index has multiple shards (#120717) --- docs/changelog/120717.yaml | 6 + .../elasticsearch/search/SearchService.java | 54 +++-- .../integration/LearningToRankExplainIT.java | 223 ++++++++++++++++++ .../xpack/ml/LocalStateMachineLearning.java | 10 + 4 files changed, 269 insertions(+), 24 deletions(-) create mode 100644 docs/changelog/120717.yaml create mode 100644 x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankExplainIT.java diff --git a/docs/changelog/120717.yaml b/docs/changelog/120717.yaml new file mode 100644 index 0000000000000..c5609e7e3df5f --- /dev/null +++ b/docs/changelog/120717.yaml @@ -0,0 +1,6 @@ +pr: 120717 +summary: Fix LTR rescorer throws 'local model reference is null' on multi-shards index when explained is enabled +area: Ranking +type: bug +issues: + - 120739 diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 7f3747d321972..efa27b2f3448c 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -957,32 +957,38 @@ public void executeFetchPhase(ShardFetchRequest request, SearchShardTask task, A final ReaderContext readerContext = findReaderContext(request.contextId(), request); final ShardSearchRequest shardSearchRequest = readerContext.getShardSearchRequest(request.getShardSearchRequest()); final Releasable markAsUsed = readerContext.markAsUsed(getKeepAlive(shardSearchRequest)); - runAsync(getExecutor(readerContext.indexShard()), () -> { - try (SearchContext searchContext = createContext(readerContext, shardSearchRequest, task, ResultsType.FETCH, false)) { - if (request.lastEmittedDoc() != null) { - searchContext.scrollContext().lastEmittedDoc = request.lastEmittedDoc(); - } - searchContext.assignRescoreDocIds(readerContext.getRescoreDocIds(request.getRescoreDocIds())); - searchContext.searcher().setAggregatedDfs(readerContext.getAggregatedDfs(request.getAggregatedDfs())); - try ( - SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(searchContext, true, System.nanoTime()) - ) { - fetchPhase.execute(searchContext, request.docIds(), request.getRankDocks()); - if (readerContext.singleSession()) { - freeReaderContext(request.contextId()); + rewriteAndFetchShardRequest(readerContext.indexShard(), shardSearchRequest, listener.delegateFailure((l, rewritten) -> { + runAsync(getExecutor(readerContext.indexShard()), () -> { + try (SearchContext searchContext = createContext(readerContext, rewritten, task, ResultsType.FETCH, false)) { + if (request.lastEmittedDoc() != null) { + searchContext.scrollContext().lastEmittedDoc = request.lastEmittedDoc(); } - executor.success(); + searchContext.assignRescoreDocIds(readerContext.getRescoreDocIds(request.getRescoreDocIds())); + searchContext.searcher().setAggregatedDfs(readerContext.getAggregatedDfs(request.getAggregatedDfs())); + try ( + SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor( + searchContext, + true, + System.nanoTime() + ) + ) { + fetchPhase.execute(searchContext, request.docIds(), request.getRankDocks()); + if (readerContext.singleSession()) { + freeReaderContext(request.contextId()); + } + executor.success(); + } + var fetchResult = searchContext.fetchResult(); + // inc-ref fetch result because we close the SearchContext that references it in this try-with-resources block + fetchResult.incRef(); + return fetchResult; + } catch (Exception e) { + assert TransportActions.isShardNotAvailableException(e) == false : new AssertionError(e); + // we handle the failure in the failure listener below + throw e; } - var fetchResult = searchContext.fetchResult(); - // inc-ref fetch result because we close the SearchContext that references it in this try-with-resources block - fetchResult.incRef(); - return fetchResult; - } catch (Exception e) { - assert TransportActions.isShardNotAvailableException(e) == false : new AssertionError(e); - // we handle the failure in the failure listener below - throw e; - } - }, wrapFailureListener(listener, readerContext, markAsUsed)); + }, wrapFailureListener(l, readerContext, markAsUsed)); + })); } protected void checkCancelled(SearchShardTask task) { diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankExplainIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankExplainIT.java new file mode 100644 index 0000000000000..d05f4a37d5501 --- /dev/null +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankExplainIT.java @@ -0,0 +1,223 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.integration; + +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Predicates; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelDefinition; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TargetType; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ensemble.Ensemble; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree.Tree; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree.TreeNode; +import org.elasticsearch.xpack.core.ml.job.config.Operator; +import org.elasticsearch.xpack.core.ml.utils.QueryProvider; +import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public class LearningToRankExplainIT extends BaseMlIntegTestCase { + + private static final String LTR_SEARCH_INDEX = "ltr-search-index"; + private static final String LTR_MODEL = "ltr-model"; + private static final int NUMBER_OF_NODES = 3; + private static final String DEFAULT_SEARCH_REQUEST_BODY = """ + { + "query": { + "match": { "product": { "query": "TV" } } + }, + "rescore": { + "window_size": 10, + "learning_to_rank": { + "model_id": "ltr-model", + "params": { "keyword": "TV" } + } + } + }"""; + + @Before + public void setupCluster() throws IOException { + internalCluster().ensureAtLeastNumDataNodes(NUMBER_OF_NODES); + ensureStableCluster(); + createLtrModel(); + } + + public void testLtrExplainWithSingleShard() throws IOException { + runLtrExplainTest(1, 1, 2, new float[] { 15f, 11f }); + } + + public void testLtrExplainWithMultipleShards() throws IOException { + runLtrExplainTest(randomIntBetween(2, NUMBER_OF_NODES), 0, 2, new float[] { 15f, 11f }); + } + + public void testLtrExplainWithReplicas() throws IOException { + runLtrExplainTest(1, randomIntBetween(1, NUMBER_OF_NODES - 1), 2, new float[] { 15f, 11f }); + } + + public void testLtrExplainWithMultipleShardsAndReplicas() throws IOException { + runLtrExplainTest(randomIntBetween(2, NUMBER_OF_NODES), randomIntBetween(1, NUMBER_OF_NODES - 1), 2, new float[] { 15f, 11f }); + } + + private void runLtrExplainTest(int numberOfShards, int numberOfReplicas, long expectedTotalHits, float[] expectedScores) + throws IOException { + createLtrIndex(numberOfShards, numberOfReplicas); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, DEFAULT_SEARCH_REQUEST_BODY)) { + assertResponse( + client().prepareSearch(LTR_SEARCH_INDEX) + .setSource(new SearchSourceBuilder().parseXContent(parser, true, Predicates.always())) + .setExplain(true), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(expectedTotalHits)); + for (int i = 0; i < expectedScores.length; i++) { + // Check expected score + SearchHit hit = searchResponse.getHits().getHits()[i]; + assertThat(hit.getScore(), equalTo(expectedScores[i])); + + // Check explanation is present and contains the right data + assertThat(hit.getExplanation(), notNullValue()); + assertThat(hit.getExplanation().getValue().floatValue(), equalTo(hit.getScore())); + assertThat(hit.getExplanation().getDescription(), equalTo("rescored using LTR model ltr-model")); + } + } + ); + } + } + + private void createLtrIndex(int numberOfShards, int numberOfReplicas) { + client().admin() + .indices() + .prepareCreate(LTR_SEARCH_INDEX) + .setSettings( + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numberOfReplicas) + .build() + ) + .setMapping("product", "type=keyword", "best_seller", "type=boolean") + .get(); + + BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); + IndexRequest indexRequest = new IndexRequest(LTR_SEARCH_INDEX); + indexRequest.source("product", "TV", "best_seller", true); + bulkRequestBuilder.add(indexRequest); + + indexRequest = new IndexRequest(LTR_SEARCH_INDEX); + indexRequest.source("product", "TV", "best_seller", false); + bulkRequestBuilder.add(indexRequest); + + indexRequest = new IndexRequest(LTR_SEARCH_INDEX); + indexRequest.source("product", "VCR", "best_seller", true); + bulkRequestBuilder.add(indexRequest); + + indexRequest = new IndexRequest(LTR_SEARCH_INDEX); + indexRequest.source("product", "VCR", "best_seller", true); + bulkRequestBuilder.add(indexRequest); + + indexRequest = new IndexRequest(LTR_SEARCH_INDEX); + indexRequest.source("product", "Laptop", "best_seller", true); + bulkRequestBuilder.add(indexRequest); + + BulkResponse bulkResponse = bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + assertThat(bulkResponse.hasFailures(), is(false)); + } + + private void createLtrModel() throws IOException { + client().execute( + PutTrainedModelAction.INSTANCE, + new PutTrainedModelAction.Request( + TrainedModelConfig.builder() + .setModelId(LTR_MODEL) + .setInferenceConfig( + LearningToRankConfig.builder(LearningToRankConfig.EMPTY_PARAMS) + .setLearningToRankFeatureExtractorBuilders( + List.of( + new QueryExtractorBuilder( + "best_seller", + QueryProvider.fromParsedQuery(QueryBuilders.termQuery("best_seller", "true")) + ), + new QueryExtractorBuilder( + "product_match", + QueryProvider.fromParsedQuery(QueryBuilders.termQuery("product", "{{keyword}}")) + ) + ) + ) + .build() + ) + .setParsedDefinition( + new TrainedModelDefinition.Builder().setPreProcessors(Collections.emptyList()) + .setTrainedModel( + Ensemble.builder() + .setFeatureNames(List.of("best_seller", "product_bm25")) + .setTargetType(TargetType.REGRESSION) + .setTrainedModels( + List.of( + Tree.builder() + .setFeatureNames(List.of("best_seller")) + .setTargetType(TargetType.REGRESSION) + .setRoot( + TreeNode.builder(0) + .setSplitFeature(0) + .setSplitGain(12d) + .setThreshold(1d) + .setOperator(Operator.GTE) + .setDefaultLeft(true) + .setLeftChild(1) + .setRightChild(2) + ) + .addLeaf(1, 1) + .addLeaf(2, 5) + .build(), + Tree.builder() + .setFeatureNames(List.of("product_match")) + .setTargetType(TargetType.REGRESSION) + .setRoot( + TreeNode.builder(0) + .setSplitFeature(0) + .setSplitGain(12d) + .setThreshold(1d) + .setOperator(Operator.LT) + .setDefaultLeft(true) + .setLeftChild(1) + .setRightChild(2) + ) + .addLeaf(1, 10) + .addLeaf(2, 1) + .build() + ) + ) + .build() + ) + ) + .validate(true) + .build(), + false + ) + ).actionGet(); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java index ff1a1d19779df..426716d0399c5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java @@ -95,6 +95,16 @@ protected SSLService getSslService() { }); } + @Override + public List> getQueries() { + return mlPlugin.getQueries(); + } + + @Override + public List> getRescorers() { + return mlPlugin.getRescorers(); + } + @Override public List getAggregations() { return mlPlugin.getAggregations(); From 5e662c507e6e123e53546f435cb4b1a08f2d367a Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 24 Jan 2025 09:22:21 -0500 Subject: [PATCH 014/383] Optimize IngestDocMetadata isAvailable (#120753) --- docs/changelog/120753.yaml | 5 +++ .../ingest/common/RenameProcessorTests.java | 43 +++++++++---------- .../elasticsearch/ingest/IngestCtxMap.java | 2 +- .../ingest/IngestDocMetadata.java | 31 ++++++++++--- .../elasticsearch/ingest/IngestService.java | 5 ++- .../java/org/elasticsearch/script/CtxMap.java | 7 ++- .../ingest/IngestCtxMapTests.java | 39 +++++++---------- .../ingest/TestIngestCtxMetadata.java | 27 ------------ .../ingest/TestIngestDocument.java | 39 +++-------------- .../ingest/InferenceProcessorTests.java | 15 ++++--- 10 files changed, 92 insertions(+), 121 deletions(-) create mode 100644 docs/changelog/120753.yaml delete mode 100644 test/framework/src/main/java/org/elasticsearch/ingest/TestIngestCtxMetadata.java diff --git a/docs/changelog/120753.yaml b/docs/changelog/120753.yaml new file mode 100644 index 0000000000000..4885ab4be9add --- /dev/null +++ b/docs/changelog/120753.yaml @@ -0,0 +1,5 @@ +pr: 120753 +summary: Optimize `IngestDocMetadata` `isAvailable` +area: Ingest Node +type: enhancement +issues: [] diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java index 1ebc5f16a65d3..bc7caf93b0036 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TestIngestDocument; import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.script.Metadata; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -140,42 +139,40 @@ public void testRenameExistingFieldNullValue() throws Exception { public void testRenameAtomicOperationSetFails() throws Exception { Map metadata = new HashMap<>(); - metadata.put("list", List.of("item")); - - IngestDocument ingestDocument = TestIngestDocument.ofMetadataWithValidator( - metadata, - Map.of("new_field", new Metadata.FieldProperty<>(Object.class, true, true, (k, v) -> { - if (v != null) { - throw new UnsupportedOperationException(); - } - }), "list", new Metadata.FieldProperty<>(Object.class, true, true, null)) - ); - Processor processor = createRenameProcessor("list", "new_field", false, false); + metadata.put("_index", "foobar"); + + IngestDocument ingestDocument = TestIngestDocument.withDefaultVersion(metadata); + Processor processor = createRenameProcessor("_index", "_version_type", false, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); - } catch (UnsupportedOperationException e) { + } catch (IllegalArgumentException e) { // the set failed, the old field has not been removed - assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true)); - assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(false)); + assertThat( + e.getMessage(), + equalTo( + "_version_type must be a null or one of [internal, external, external_gte] " + + "but was [foobar] with type [java.lang.String]" + ) + ); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("_index"), equalTo(true)); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("_version_type"), equalTo(false)); } } public void testRenameAtomicOperationRemoveFails() throws Exception { Map metadata = new HashMap<>(); - metadata.put("list", List.of("item")); + metadata.put("foo", "bar"); - IngestDocument ingestDocument = TestIngestDocument.ofMetadataWithValidator( - metadata, - Map.of("list", new Metadata.FieldProperty<>(Object.class, false, true, null)) - ); - Processor processor = createRenameProcessor("list", "new_field", false, false); + IngestDocument ingestDocument = TestIngestDocument.withDefaultVersion(metadata); + Processor processor = createRenameProcessor("_version", "new_field", false, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); } catch (IllegalArgumentException e) { - // the set failed, the old field has not been removed - assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true)); + // the remove failed, the old field has not been removed + assertThat(e.getMessage(), equalTo("_version cannot be removed")); + assertThat(ingestDocument.getSourceAndMetadata().containsKey("_version"), equalTo(true)); assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(false)); } } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestCtxMap.java b/server/src/main/java/org/elasticsearch/ingest/IngestCtxMap.java index d903cc8e52144..a5a1612246a29 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestCtxMap.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestCtxMap.java @@ -29,7 +29,7 @@ * * The map is expected to be used by processors, server code should the typed getter and setters where possible. */ -class IngestCtxMap extends CtxMap { +final class IngestCtxMap extends CtxMap { /** * Create an IngestCtxMap with the given metadata, source and default validators diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocMetadata.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocMetadata.java index 5afeb5079a43f..d7533e26f3df0 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocMetadata.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocMetadata.java @@ -19,7 +19,7 @@ import java.util.Map; import java.util.stream.Collectors; -class IngestDocMetadata extends Metadata { +final class IngestDocMetadata extends Metadata { static final Map> PROPERTIES = Map.of( INDEX, @@ -42,6 +42,17 @@ class IngestDocMetadata extends Metadata { new FieldProperty<>(Map.class).withWritable().withNullable() ); + private static final char UNDERSCORE = '_'; + static { + // there's an optimization here in the overridden isAvailable below, but it only works if the first character of each of these + // keys starts with an underscore, since we know all the keys up front, though, we can just make sure that's always true + for (String key : PROPERTIES.keySet()) { + if (key.charAt(0) != UNDERSCORE) { + throw new IllegalArgumentException("IngestDocMetadata keys must begin with an underscore, but found [" + key + "]"); + } + } + } + protected final ZonedDateTime timestamp; IngestDocMetadata(String index, String id, long version, String routing, VersionType versionType, ZonedDateTime timestamp) { @@ -49,11 +60,7 @@ class IngestDocMetadata extends Metadata { } IngestDocMetadata(Map metadata, ZonedDateTime timestamp) { - this(metadata, PROPERTIES, timestamp); - } - - IngestDocMetadata(Map metadata, Map> properties, ZonedDateTime timestamp) { - super(metadata, properties); + super(metadata, PROPERTIES); this.timestamp = timestamp; } @@ -100,4 +107,16 @@ private static void versionTypeValidator(String key, String value) { + "]" ); } + + @Override + public boolean isAvailable(String key) { + // the key cannot be null or empty because of the nature of the calling code, and this is already validated in IngestDocument + assert key != null && key.isEmpty() == false; + // we can avoid a map lookup on most keys since we know that the only keys that are 'metadata keys' for an ingest document + // must be keys that start with an underscore + if (key.charAt(0) != UNDERSCORE) { + return false; + } + return super.isAvailable(key); + } } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 86522742a66c0..b819a1686d23c 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -1204,8 +1204,9 @@ private static void updateIndexRequestMetadata(final IndexRequest request, final request.id(metadata.getId()); request.routing(metadata.getRouting()); request.version(metadata.getVersion()); - if (metadata.getVersionType() != null) { - request.versionType(VersionType.fromString(metadata.getVersionType())); + String versionType; + if ((versionType = metadata.getVersionType()) != null) { + request.versionType(VersionType.fromString(versionType)); } Number number; if ((number = metadata.getIfSeqNo()) != null) { diff --git a/server/src/main/java/org/elasticsearch/script/CtxMap.java b/server/src/main/java/org/elasticsearch/script/CtxMap.java index e790eed097f35..1496d70cf39a1 100644 --- a/server/src/main/java/org/elasticsearch/script/CtxMap.java +++ b/server/src/main/java/org/elasticsearch/script/CtxMap.java @@ -14,6 +14,7 @@ import java.util.AbstractCollection; import java.util.AbstractMap; import java.util.AbstractSet; +import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; @@ -150,10 +151,12 @@ public Object remove(Object key) { @Override public void clear() { // AbstractMap uses entrySet().clear(), it should be quicker to run through the validators, then call the wrapped maps clear - for (String key : metadata.keySet()) { + for (String key : new ArrayList<>(metadata.keySet())) { // copy the key set to get around the ConcurrentModificationException metadata.remove(key); } - // TODO: this is just bogus, there isn't any case where metadata won't trip a failure above? + // note: this is actually bogus in the general case, though! for this to work there must be some Metadata or subclass of Metadata + // for which all the FieldPoperty properties of the metadata are nullable and therefore could have been removed in the previous + // loop -- does such a class even exist? (that is, is there any *real* CtxMap for which the previous loop didn't throw?) source.clear(); } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestCtxMapTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestCtxMapTests.java index 2ae9dbfb68599..5a8505e6bb375 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestCtxMapTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestCtxMapTests.java @@ -110,7 +110,7 @@ public void testRemoveSource() { source.put("abc", 123); source.put("def", 456); source.put("hij", 789); - map = new IngestCtxMap(source, new TestIngestCtxMetadata(new HashMap<>(), new HashMap<>())); + map = new IngestCtxMap(source, new IngestDocMetadata(new HashMap<>(Map.of("_version", 1L)), null)); // Make sure there isn't a ConcurrentModificationException when removing a key from the iterator String removedKey = null; @@ -129,31 +129,18 @@ public void testRemoveSource() { } public void testRemove() { - String cannotRemove = "cannotRemove"; - String canRemove = "canRemove"; - Map metadata = new HashMap<>(); - metadata.put(cannotRemove, "value"); - map = new IngestCtxMap( - new HashMap<>(), - new TestIngestCtxMetadata( - metadata, - Map.of( - cannotRemove, - new Metadata.FieldProperty<>(String.class, false, true, null), - canRemove, - new Metadata.FieldProperty<>(String.class, true, true, null) - ) - ) - ); - String msg = "cannotRemove cannot be removed"; + String cannotRemove = "_version"; // writable, but not *nullable* + String canRemove = "_id"; // writable, and *nullable* + map = new IngestCtxMap(new HashMap<>(), new IngestDocMetadata(new HashMap<>(Map.of(cannotRemove, 1L)), null)); + String msg = "_version cannot be removed"; IllegalArgumentException err = expectThrows(IllegalArgumentException.class, () -> map.remove(cannotRemove)); assertEquals(msg, err.getMessage()); err = expectThrows(IllegalArgumentException.class, () -> map.put(cannotRemove, null)); - assertEquals("cannotRemove cannot be null", err.getMessage()); + assertEquals("_version cannot be null", err.getMessage()); err = expectThrows(IllegalArgumentException.class, () -> map.entrySet().iterator().next().setValue(null)); - assertEquals("cannotRemove cannot be null", err.getMessage()); + assertEquals("_version cannot be null", err.getMessage()); err = expectThrows(IllegalArgumentException.class, () -> { Iterator> it = map.entrySet().iterator(); @@ -176,6 +163,10 @@ public void testRemove() { err = expectThrows(IllegalArgumentException.class, () -> map.clear()); assertEquals(msg, err.getMessage()); + // depending on iteration order, this may have been removed, so put it back before checking the size + map.put(canRemove, "value"); + assertEquals("value", map.get(canRemove)); + assertEquals(2, map.size()); map.entrySet().remove(new TestEntry(canRemove, "value")); @@ -205,7 +196,7 @@ public void testEntryAndIterator() { source.put("foo", "bar"); source.put("baz", "qux"); source.put("noz", "zon"); - map = new IngestCtxMap(source, TestIngestCtxMetadata.withNullableVersion(metadata)); + map = new IngestCtxMap(source, new IngestDocMetadata(metadata, null)); md = map.getMetadata(); for (Map.Entry entry : map.entrySet()) { @@ -240,8 +231,10 @@ public void testEntryAndIterator() { assertTrue(map.containsKey("noz")); assertEquals(3, map.entrySet().size()); assertEquals(3, map.size()); - map.clear(); - assertEquals(0, map.size()); + + // since an IngestCtxMap must have a _version (and the _version cannot be null), we can't just .clear() + map.entrySet().removeIf(e -> e.getKey().equals("_version") == false); + assertEquals(1, map.size()); } public void testContainsValue() { diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestCtxMetadata.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestCtxMetadata.java deleted file mode 100644 index 7dccecbde6a08..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestCtxMetadata.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.ingest; - -import org.elasticsearch.script.Metadata; - -import java.util.HashMap; -import java.util.Map; - -public class TestIngestCtxMetadata extends IngestDocMetadata { - public TestIngestCtxMetadata(Map map, Map> properties) { - super(map, Map.copyOf(properties), null); - } - - public static TestIngestCtxMetadata withNullableVersion(Map map) { - Map> updatedProperties = new HashMap<>(IngestDocMetadata.PROPERTIES); - updatedProperties.replace(VERSION, new Metadata.FieldProperty<>(Number.class, true, true, FieldProperty.LONGABLE_NUMBER)); - return new TestIngestCtxMetadata(map, updatedProperties); - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java index 798c824d3be9b..aa1833a659da3 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java @@ -10,10 +10,8 @@ package org.elasticsearch.ingest; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.VersionType; -import org.elasticsearch.script.Metadata; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -24,53 +22,30 @@ */ public class TestIngestDocument { public static final long DEFAULT_VERSION = 12345L; - private static String VERSION = IngestDocument.Metadata.VERSION.getFieldName(); - - /** - * Create an IngestDocument for testing that pass an empty mutable map for ingestMetaata - */ - public static IngestDocument withNullableVersion(Map sourceAndMetadata) { - return ofIngestWithNullableVersion(sourceAndMetadata, new HashMap<>()); - } + private static final String VERSION = IngestDocument.Metadata.VERSION.getFieldName(); /** * Create an {@link IngestDocument} from the given sourceAndMetadata and ingestMetadata and a version validator that allows null * _versions. Normally null _version is not allowed, but many tests don't care about that invariant. */ - public static IngestDocument ofIngestWithNullableVersion(Map sourceAndMetadata, Map ingestMetadata) { - Map source = new HashMap<>(sourceAndMetadata); - Map metadata = Maps.newHashMapWithExpectedSize(IngestDocument.Metadata.values().length); - for (IngestDocument.Metadata m : IngestDocument.Metadata.values()) { - String key = m.getFieldName(); - if (sourceAndMetadata.containsKey(key)) { - metadata.put(key, source.remove(key)); - } - } - return new IngestDocument(new IngestCtxMap(source, TestIngestCtxMetadata.withNullableVersion(metadata)), ingestMetadata); - } - - /** - * Create an {@link IngestDocument} with {@link #DEFAULT_VERSION} as the _version metadata, if _version is not already present. - */ - public static IngestDocument withDefaultVersion(Map sourceAndMetadata) { + public static IngestDocument withDefaultVersion(Map sourceAndMetadata, Map ingestMetadata) { if (sourceAndMetadata.containsKey(VERSION) == false) { sourceAndMetadata = new HashMap<>(sourceAndMetadata); sourceAndMetadata.put(VERSION, DEFAULT_VERSION); } - return new IngestDocument(sourceAndMetadata, new HashMap<>()); + return new IngestDocument(sourceAndMetadata, ingestMetadata); } /** - * Create an IngestDocument with a metadata map and validators. The metadata map is passed by reference, not copied, so callers - * can observe changes to the map directly. + * Create an {@link IngestDocument} with {@link #DEFAULT_VERSION} as the _version metadata, if _version is not already present. */ - public static IngestDocument ofMetadataWithValidator(Map metadata, Map> properties) { - return new IngestDocument(new IngestCtxMap(new HashMap<>(), new TestIngestCtxMetadata(metadata, properties)), new HashMap<>()); + public static IngestDocument withDefaultVersion(Map sourceAndMetadata) { + return withDefaultVersion(sourceAndMetadata, new HashMap<>()); } /** * Create an empty ingest document for testing. - * + *

* Adds the required {@code "_version"} metadata key with value {@link #DEFAULT_VERSION}. */ public static IngestDocument emptyIngestDocument() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java index 6b0b589ace606..248e15f066fdb 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java @@ -298,12 +298,13 @@ public void testGenerateRequestWithEmptyMapping() { Map source = new HashMap<>() { { + put("_version", 345); put("value1", 1); put("value2", 4); put("categorical", "foo"); } }; - IngestDocument document = TestIngestDocument.ofIngestWithNullableVersion(source, new HashMap<>()); + IngestDocument document = TestIngestDocument.withDefaultVersion(source, new HashMap<>()); var request = processor.buildRequest(document); assertThat(request.getObjectsToInfer().get(0), equalTo(source)); @@ -311,7 +312,7 @@ public void testGenerateRequestWithEmptyMapping() { assertEquals(TrainedModelPrefixStrings.PrefixType.INGEST, request.getPrefixType()); Map ingestMetadata = Collections.singletonMap("_value", 3); - document = TestIngestDocument.ofIngestWithNullableVersion(source, ingestMetadata); + document = TestIngestDocument.withDefaultVersion(source, ingestMetadata); Map expected = new HashMap<>(source); expected.put("_ingest", ingestMetadata); @@ -344,12 +345,14 @@ public void testGenerateWithMapping() { ); Map source = Maps.newMapWithExpectedSize(3); + source.put("_version", 234); source.put("value1", 1); source.put("categorical", "foo"); source.put("un_touched", "bar"); - IngestDocument document = TestIngestDocument.withNullableVersion(source); + IngestDocument document = TestIngestDocument.withDefaultVersion(source); Map expectedMap = Maps.newMapWithExpectedSize(5); + expectedMap.put("_version", 234); expectedMap.put("new_value1", 1); expectedMap.put("value1", 1); expectedMap.put("categorical", "foo"); @@ -361,7 +364,7 @@ public void testGenerateWithMapping() { assertEquals(TrainedModelPrefixStrings.PrefixType.INGEST, request.getPrefixType()); Map ingestMetadata = Collections.singletonMap("_value", "baz"); - document = TestIngestDocument.ofIngestWithNullableVersion(source, ingestMetadata); + document = TestIngestDocument.withDefaultVersion(source, ingestMetadata); expectedMap = new HashMap<>(expectedMap); expectedMap.put("metafield", "baz"); expectedMap.put("_ingest", ingestMetadata); @@ -392,12 +395,14 @@ public void testGenerateWithMappingNestedFields() { ); Map source = Maps.newMapWithExpectedSize(3); + source.put("_version", 987); source.put("value1", Collections.singletonMap("foo", 1)); source.put("categorical.bar", "foo"); source.put("un_touched", "bar"); - IngestDocument document = TestIngestDocument.withNullableVersion(source); + IngestDocument document = TestIngestDocument.withDefaultVersion(source); Map expectedMap = Maps.newMapWithExpectedSize(5); + expectedMap.put("_version", 987); expectedMap.put("new_value1", 1); expectedMap.put("value1", Collections.singletonMap("foo", 1)); expectedMap.put("categorical.bar", "foo"); From 39603ec1e297a8a2d0552672b0bd27d2bbae2873 Mon Sep 17 00:00:00 2001 From: Iraklis Psaroudakis Date: Fri, 24 Jan 2025 17:01:01 +0200 Subject: [PATCH 015/383] Reset engine for hollow shards (#120649) Introduces a function in the IndexShard to reset the engine. By default, all Engine implementation will throw in core ES. In stateless, we will extend the prepareForEngineReset() function in order to make a hollow commit or an unhollow commit. Based on the commit, the subsequent new engine will be a hollow or unhollow indexing engine. The reset function takes care to close the engine, which waits for all operations to drain. This, along with the fact we will have blocked ingestion in stateless, and there should be no searches in the indexing tier, should ensure there are no unexpected asynchronous side-effects. Relates ES-10600 --- .../elasticsearch/index/engine/Engine.java | 12 ++++++++ .../elasticsearch/index/shard/IndexShard.java | 29 +++++++++++++++++++ .../index/shard/IndexShardTests.java | 29 ++++++++++++++++++- 3 files changed, 69 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 394de0684c104..36fd18144ad6e 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -75,6 +75,7 @@ import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.DenseVectorStats; import org.elasticsearch.index.shard.DocsStats; +import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardLongFieldRange; import org.elasticsearch.index.shard.SparseVectorStats; @@ -2334,4 +2335,15 @@ public record FlushResult(boolean flushPerformed, long generation) { public static final long UNKNOWN_GENERATION = -1L; public static final FlushResult NO_FLUSH = new FlushResult(false, UNKNOWN_GENERATION); } + + /** + * Ensures the engine is in a state that it can be closed by a call to {@link IndexShard#resetEngine()}. + * + * In general, resetting the engine should be done with care, to consider any + * in-progress operations and listeners (e.g., primary term and generation listeners). + * At the moment, this is implemented in serverless for a special case that ensures the engine is prepared for reset. + */ + public void prepareForEngineReset() throws IOException { + throw new UnsupportedOperationException("does not support engine reset"); + } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index ab1c936d1c469..bfa286858f8ba 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -4307,6 +4307,35 @@ public void afterRefresh(boolean didRefresh) { } } + /** + * Reset the current engine to a new one. + * + * Calls {@link Engine#prepareForEngineReset()} on the current engine, then closes it, and loads a new engine without + * doing any translog recovery. + * + * In general, resetting the engine should be done with care, to consider any in-progress operations and listeners. + * At the moment, this is implemented in serverless for a special case that ensures the engine is prepared for reset. + */ + public void resetEngine() { + assert Thread.holdsLock(mutex) == false : "resetting engine under mutex"; + assert waitForEngineOrClosedShardListeners.isDone(); + try { + synchronized (engineMutex) { + final var currentEngine = getEngine(); + currentEngine.prepareForEngineReset(); + var engineConfig = newEngineConfig(replicationTracker); + verifyNotClosed(); + IOUtils.close(currentEngine); + var newEngine = createEngine(engineConfig); + currentEngineReference.set(newEngine); + onNewEngine(newEngine); + } + onSettingsChanged(); + } catch (Exception e) { + failShard("unable to reset engine", e); + } + } + /** * Rollback the current engine to the safe commit, then replay local translog up to the global checkpoint. */ diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 7d436ab5d8d22..4549a329d499a 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -4497,7 +4497,7 @@ public void testSupplyTombstoneDoc() throws Exception { closeShards(shard); } - public void testResetEngine() throws Exception { + public void testResetEngineToGlobalCheckpoint() throws Exception { IndexShard shard = newStartedShard(false); indexOnReplicaWithGaps(shard, between(0, 1000), Math.toIntExact(shard.getLocalCheckpoint())); long maxSeqNoBeforeRollback = shard.seqNoStats().getMaxSeqNo(); @@ -4559,6 +4559,33 @@ public void testResetEngine() throws Exception { closeShard(shard, false); } + public void testResetEngine() throws Exception { + var newEngineCreated = new CountDownLatch(2); + var indexShard = newStartedShard(true, Settings.EMPTY, config -> { + try { + return new ReadOnlyEngine(config, null, null, true, Function.identity(), true, true) { + @Override + public void prepareForEngineReset() throws IOException { + ; + } + }; + } finally { + newEngineCreated.countDown(); + } + }); + var newEngineNotification = new CountDownLatch(1); + indexShard.waitForEngineOrClosedShard(ActionListener.running(newEngineNotification::countDown)); + + var onAcquired = new PlainActionFuture(); + indexShard.acquireAllPrimaryOperationsPermits(onAcquired, TimeValue.timeValueMinutes(1L)); + try (var permits = safeGet(onAcquired)) { + indexShard.resetEngine(); + } + safeAwait(newEngineCreated); + safeAwait(newEngineNotification); + closeShard(indexShard, false); + } + /** * This test simulates a scenario seen rarely in ConcurrentSeqNoVersioningIT. Closing a shard while engine is inside * resetEngineToGlobalCheckpoint can lead to check index failure in integration tests. From 484a95043d4dc8303e7b80228a2357d5495951e3 Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Fri, 24 Jan 2025 09:35:54 -0600 Subject: [PATCH 016/383] Refresh source index before reindexing data stream index (#120752) Add step to the ReindexDatastreamIndexAction which refreshes the source index after setting it to read-only but before calling reindex. Without doing a refresh it is possible for docs from the source index to be missing from the destination index. This happens because the docs arrived before the source index is set to read-only, but because the index hasn't refreshed, the reindex action cannot see these updates. --- docs/changelog/120752.yaml | 6 ++++ muted-tests.yml | 3 -- ...indexDatastreamIndexTransportActionIT.java | 33 ++++++++----------- ...ReindexDataStreamIndexTransportAction.java | 9 +++++ 4 files changed, 29 insertions(+), 22 deletions(-) create mode 100644 docs/changelog/120752.yaml diff --git a/docs/changelog/120752.yaml b/docs/changelog/120752.yaml new file mode 100644 index 0000000000000..674d2190244b1 --- /dev/null +++ b/docs/changelog/120752.yaml @@ -0,0 +1,6 @@ +pr: 120752 +summary: Refresh source index before reindexing data stream index +area: Data streams +type: bug +issues: + - 120314 diff --git a/muted-tests.yml b/muted-tests.yml index 596001b5aac1a..1c58cd98d1e78 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -205,9 +205,6 @@ tests: - class: org.elasticsearch.oldrepos.OldRepositoryAccessIT method: testOldSourceOnlyRepoAccess issue: https://github.com/elastic/elasticsearch/issues/120080 -- class: org.elasticsearch.xpack.migrate.action.ReindexDatastreamIndexTransportActionIT - method: testTsdbStartEndSet - issue: https://github.com/elastic/elasticsearch/issues/120314 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=snapshot/10_basic/Failed to snapshot indices with synthetic source} issue: https://github.com/elastic/elasticsearch/issues/120332 diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java index cfd4f0901336d..40464d2a43220 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java @@ -87,6 +87,7 @@ public void testDestIndexDeletedIfExists() throws Exception { var destIndex = ReindexDataStreamIndexTransportAction.generateDestIndexName(sourceIndex); indicesAdmin().create(new CreateIndexRequest(destIndex)).actionGet(); indexDocs(destIndex, 10); + indicesAdmin().refresh(new RefreshRequest(destIndex)).actionGet(); assertHitCount(prepareSearch(destIndex).setSize(0), 10); // call reindex @@ -195,19 +196,7 @@ public void testMappingsAddedToDestIndex() throws Exception { assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); - String mapping = """ - { - "_doc":{ - "dynamic":"strict", - "properties":{ - "foo1":{ - "type":"text" - } - } - } - } - """; - indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(mapping)).actionGet(); + indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(MAPPING)).actionGet(); // call reindex var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) @@ -337,6 +326,12 @@ public void testSettingsAndMappingsFromTemplate() throws IOException { var sourceIndex = "logs-" + randomAlphaOfLength(20).toLowerCase(Locale.ROOT); indicesAdmin().create(new CreateIndexRequest(sourceIndex)).actionGet(); + { + var indexRequest = new IndexRequest(sourceIndex); + indexRequest.source("{ \"foo1\": \"cheese\" }", XContentType.JSON); + client().index(indexRequest).actionGet(); + } + // call reindex var destIndex = client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(sourceIndex)) .actionGet() @@ -359,6 +354,9 @@ public void testSettingsAndMappingsFromTemplate() throws IOException { // sanity check specific value from dest mapping assertEquals("text", XContentMapValues.extractValue("properties.foo1.type", destMappings)); } + + // verify doc was successfully added + assertHitCount(prepareSearch(destIndex).setSize(0), 1); } private static final String TSDB_MAPPING = """ @@ -455,12 +453,10 @@ public void testTsdbStartEndSet() throws Exception { assertEquals(startTime, destStart); assertEquals(endTime, destEnd); - } - // TODO more logsdb/tsdb specific tests - // TODO more data stream specific tests (how are data streams indices are different from regular indices?) - // TODO check other IndexMetadata fields that need to be fixed after the fact - // TODO what happens if don't have necessary perms for a given index? + // verify doc was successfully added + assertHitCount(prepareSearch(destIndex).setSize(0), 1); + } private static void cleanupMetadataBlocks(String index) { var settings = Settings.builder() @@ -483,7 +479,6 @@ private static void indexDocs(String index, int numDocs) { } BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet(); assertThat(bulkResponse.getItems().length, equalTo(numDocs)); - indicesAdmin().refresh(new RefreshRequest(index)).actionGet(); } private static String formatInstant(Instant instant) { diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java index b915eb3cd3e28..d3fe27006e82e 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -140,6 +141,7 @@ protected void doExecute( } SubscribableListener.newForked(l -> setBlockWrites(sourceIndexName, l, taskId)) + .andThen(l -> refresh(sourceIndexName, l, taskId)) .andThen(l -> deleteDestIfExists(destIndexName, l, taskId)) .andThen(l -> createIndex(sourceIndex, destIndexName, l, taskId)) .andThen(l -> reindex(sourceIndexName, destIndexName, l, taskId)) @@ -175,6 +177,13 @@ public void onFailure(Exception e) { }, parentTaskId); } + private void refresh(String sourceIndexName, ActionListener listener, TaskId parentTaskId) { + logger.debug("Refreshing source index [{}]", sourceIndexName); + var refreshRequest = new RefreshRequest(sourceIndexName); + refreshRequest.setParentTask(parentTaskId); + client.execute(RefreshAction.INSTANCE, refreshRequest, listener); + } + private void deleteDestIfExists(String destIndexName, ActionListener listener, TaskId parentTaskId) { logger.debug("Attempting to delete index [{}]", destIndexName); var deleteIndexRequest = new DeleteIndexRequest(destIndexName).indicesOptions(IGNORE_MISSING_OPTIONS) From 29afce6e003aed9db8067c2720c4e6902dd886ef Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 25 Jan 2025 02:38:47 +1100 Subject: [PATCH 017/383] Mute org.elasticsearch.xpack.ml.integration.LearningToRankExplainIT testLtrExplainWithMultipleShardsAndReplicas #120805 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1c58cd98d1e78..0843e52cfe390 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -243,6 +243,9 @@ tests: - class: org.elasticsearch.action.search.SearchProgressActionListenerIT method: testSearchProgressWithHits issue: https://github.com/elastic/elasticsearch/issues/120671 +- class: org.elasticsearch.xpack.ml.integration.LearningToRankExplainIT + method: testLtrExplainWithMultipleShardsAndReplicas + issue: https://github.com/elastic/elasticsearch/issues/120805 # Examples: # From 5508d896175b365aa3cfd20cad140a7041985cf2 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 24 Jan 2025 15:51:25 +0000 Subject: [PATCH 018/383] Use `HARD_CODED_MACHINE_LEARNING_MASTER_NODE_TIMEOUT` in more places (#120701) This hard-coded timeout is trappy, but its removal is not as imminent as `TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT` (see #107984). This commit removes a handful more usages of the trappy timeout bringing us closer to its removal. --- .../org/elasticsearch/xpack/ml/MlInitializationService.java | 2 +- .../xpack/ml/inference/TrainedModelStatsService.java | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index 99df16bcd3dc2..a21f59f11540f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -155,7 +155,7 @@ public void clusterChanged(ClusterChangedEvent event) { AnnotationIndex.createAnnotationsIndexIfNecessary( client, event.state(), - MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, + MachineLearning.HARD_CODED_MACHINE_LEARNING_MASTER_NODE_TIMEOUT, ActionListener.wrap(r -> isIndexCreationInProgress.set(false), e -> { if (e.getMessage().equals(previousException)) { logger.debug("Error creating ML annotations index or aliases", e); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java index 4ee294bcf0d8c..67f2ea74464d0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -256,14 +255,14 @@ private void createStatsIndexIfNecessary() { client, clusterState, indexNameExpressionResolver, - MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, + MachineLearning.HARD_CODED_MACHINE_LEARNING_MASTER_NODE_TIMEOUT, ActionListener.wrap( r -> ElasticsearchMappings.addDocMappingIfMissing( MlStatsIndex.writeAlias(), MlStatsIndex::wrappedMapping, client, clusterState, - MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, + MachineLearning.HARD_CODED_MACHINE_LEARNING_MASTER_NODE_TIMEOUT, listener, MlStatsIndex.STATS_INDEX_MAPPINGS_VERSION ), From ca83ae53af0721ba93813dd555e9125814cb0661 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 25 Jan 2025 03:10:50 +1100 Subject: [PATCH 019/383] Mute org.elasticsearch.xpack.test.rest.XPackRestIT test {p0=ml/3rd_party_deployment/Test start deployment fails while model download in progress} #120810 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 0843e52cfe390..269368c5914ba 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -246,6 +246,9 @@ tests: - class: org.elasticsearch.xpack.ml.integration.LearningToRankExplainIT method: testLtrExplainWithMultipleShardsAndReplicas issue: https://github.com/elastic/elasticsearch/issues/120805 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=ml/3rd_party_deployment/Test start deployment fails while model download in progress} + issue: https://github.com/elastic/elasticsearch/issues/120810 # Examples: # From 60f78e45a75c66bb8e345aa7e1fc557952e013ab Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 24 Jan 2025 16:18:56 +0000 Subject: [PATCH 020/383] Test & document raw transport handshakes (#120785) Updates the `TransportHandshaker` code comments to reflect the new handshake format introduced in #120744, and adds some low-level tests to verify the bytes on the wire are as described in those docs. --- .../transport/TransportHandshaker.java | 40 ++- .../bootstrap/test-framework.policy | 1 + .../TransportHandshakerRawMessageTests.java | 251 ++++++++++++++++++ 3 files changed, 289 insertions(+), 3 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java diff --git a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java index a5973e4001444..1a9043d093feb 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java @@ -53,7 +53,7 @@ final class TransportHandshaker { * rely on them matching the real transport protocol (which itself matched the release version numbers), but these days that's no longer * true. * - * Here are some example messages, broken down to show their structure: + * Here are some example messages, broken down to show their structure. See TransportHandshakerRawMessageTests for supporting tests. * * ## v6080099 Request: * @@ -87,7 +87,7 @@ final class TransportHandshaker { * c3 f9 eb 03 -- max acceptable protocol version (vInt: 00000011 11101011 11111001 11000011 == 8060099) * * - * ## v7170099 and v8800000 Requests: + * ## v7170099 Requests: * * 45 53 -- 'ES' marker * 00 00 00 31 -- total message length @@ -106,7 +106,7 @@ final class TransportHandshaker { * 04 -- payload length * c3 f9 eb 03 -- max acceptable protocol version (vInt: 00000011 11101011 11111001 11000011 == 8060099) * - * ## v7170099 and v8800000 Responses: + * ## v7170099 Responses: * * 45 53 -- 'ES' marker * 00 00 00 17 -- total message length @@ -118,6 +118,40 @@ final class TransportHandshaker { * 00 -- no response headers [1] * c3 f9 eb 03 -- max acceptable protocol version (vInt: 00000011 11101011 11111001 11000011 == 8060099) * + * ## v8800000 Requests: + * + * 45 53 -- 'ES' marker + * 00 00 00 36 -- total message length + * 00 00 00 00 00 00 00 01 -- request ID + * 08 -- status flags (0b1000 == handshake request) + * 00 86 47 00 -- handshake protocol version (0x6d6833 == 7170099) + * 00 00 00 19 -- length of variable portion of header + * 00 -- no request headers [1] + * 00 -- no response headers [1] + * 16 -- action string size + * 69 6e 74 65 72 6e 61 6c } + * 3a 74 63 70 2f 68 61 6e }- ASCII representation of HANDSHAKE_ACTION_NAME + * 64 73 68 61 6b 65 } + * 00 -- no parent task ID [3] + * 0a -- payload length + * e8 8f 9b 04 -- requesting node transport version (vInt: 00000100 10011011 10001111 11101000 == 8833000) + * 05 -- requesting node release version string length + * 39 2e 30 2e 30 -- requesting node release version string "9.0.0" + * + * ## v8800000 Responses: + * + * 45 53 -- 'ES' marker + * 00 00 00 1d -- total message length + * 00 00 00 00 00 00 00 01 -- request ID (copied from request) + * 09 -- status flags (0b1001 == handshake response) + * 00 86 47 00 -- handshake protocol version (0x864700 == 8800000, copied from request) + * 00 00 00 02 -- length of following variable portion of header + * 00 -- no request headers [1] + * 00 -- no response headers [1] + * e8 8f 9b 04 -- responding node transport version (vInt: 00000100 10011011 10001111 11101000 == 8833000) + * 05 -- responding node release version string length + * 39 2e 30 2e 30 -- responding node release version string "9.0.0" + * * [1] Thread context headers should be empty; see org.elasticsearch.common.util.concurrent.ThreadContext.ThreadContextStruct.writeTo * for their structure. * [2] A list of strings, which can safely be ignored diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index 462fab651c211..77aae99907dfc 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -127,6 +127,7 @@ grant codeBase "${codebase.netty-transport}" { grant { permission java.net.SocketPermission "127.0.0.1", "accept, connect, resolve"; + permission java.net.SocketPermission "[0:0:0:0:0:0:0:1]", "accept, connect, resolve"; permission java.nio.file.LinkPermission "symbolic"; // needed for keystore tests permission java.lang.RuntimePermission "accessUserInformation"; diff --git a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java new file mode 100644 index 0000000000000..de44ca70f2005 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java @@ -0,0 +1,251 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.transport; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Build; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.InputStreamStreamInput; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.core.UpdateForV10; +import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.TransportVersionUtils; + +import java.net.InetAddress; +import java.net.ServerSocket; +import java.net.Socket; +import java.nio.charset.StandardCharsets; +import java.security.AccessController; +import java.security.PrivilegedExceptionAction; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThan; + +public class TransportHandshakerRawMessageTests extends ESSingleNodeTestCase { + + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // remove support for v7 handshakes in v9 + public void testV7Handshake() throws Exception { + final BytesRef handshakeRequestBytes; + final var requestId = randomNonNegativeLong(); + try (var outputStream = new BytesStreamOutput()) { + outputStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); + outputStream.writeLong(requestId); + outputStream.writeByte(TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0))); + outputStream.writeInt(TransportHandshaker.V7_HANDSHAKE_VERSION.id()); + outputStream.writeByte((byte) 0); // no request headers; + outputStream.writeByte((byte) 0); // no response headers; + outputStream.writeStringArray(new String[] { "x-pack" }); // one feature + outputStream.writeString("internal:tcp/handshake"); + outputStream.writeByte((byte) 0); // no parent task ID; + + final var requestNodeTransportVersionId = TransportVersionUtils.randomCompatibleVersion(random()).id(); + assertThat(requestNodeTransportVersionId, allOf(greaterThanOrEqualTo(1 << 22), lessThan(1 << 28))); // 4-byte vInt + outputStream.writeByte((byte) 4); // payload length + outputStream.writeVInt(requestNodeTransportVersionId); + + handshakeRequestBytes = outputStream.bytes().toBytesRef(); + } + + final BytesRef handshakeResponseBytes; + try (var socket = openTransportConnection()) { + var streamOutput = new OutputStreamStreamOutput(socket.getOutputStream()); + streamOutput.write("ES".getBytes(StandardCharsets.US_ASCII)); + streamOutput.writeInt(handshakeRequestBytes.length); + streamOutput.writeBytes(handshakeRequestBytes.bytes, handshakeRequestBytes.offset, handshakeRequestBytes.length); + streamOutput.flush(); + + var streamInput = new InputStreamStreamInput(socket.getInputStream()); + assertEquals((byte) 'E', streamInput.readByte()); + assertEquals((byte) 'S', streamInput.readByte()); + var responseLength = streamInput.readInt(); + handshakeResponseBytes = streamInput.readBytesRef(responseLength); + } + + try (var inputStream = new BytesArray(handshakeResponseBytes).streamInput()) { + assertEquals(requestId, inputStream.readLong()); + assertEquals(TransportStatus.setResponse(TransportStatus.setHandshake((byte) 0)), inputStream.readByte()); + assertEquals(TransportHandshaker.V7_HANDSHAKE_VERSION.id(), inputStream.readInt()); + assertEquals((byte) 0, inputStream.readByte()); // no request headers + assertEquals((byte) 0, inputStream.readByte()); // no response headers + inputStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); + assertEquals(TransportVersion.current().id(), inputStream.readVInt()); + assertEquals(-1, inputStream.read()); + } + } + + @UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // remove support for v8 handshakes in v10 + public void testV8Handshake() throws Exception { + final BytesRef handshakeRequestBytes; + final var requestId = randomNonNegativeLong(); + try (var outputStream = new BytesStreamOutput()) { + outputStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + outputStream.writeLong(requestId); + outputStream.writeByte(TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0))); + outputStream.writeInt(TransportHandshaker.V8_HANDSHAKE_VERSION.id()); + outputStream.writeInt(0x1a); // length of variable-length header, always 0x1a + outputStream.writeByte((byte) 0); // no request headers; + outputStream.writeByte((byte) 0); // no response headers; + outputStream.writeByte((byte) 0); // no features; + outputStream.writeString("internal:tcp/handshake"); + outputStream.writeByte((byte) 0); // no parent task ID; + + final var requestNodeTransportVersionId = TransportVersionUtils.randomCompatibleVersion(random()).id(); + assertThat(requestNodeTransportVersionId, allOf(greaterThanOrEqualTo(1 << 22), lessThan(1 << 28))); // 4-byte vInt + outputStream.writeByte((byte) 4); // payload length + outputStream.writeVInt(requestNodeTransportVersionId); + + handshakeRequestBytes = outputStream.bytes().toBytesRef(); + } + + final BytesRef handshakeResponseBytes; + try (var socket = openTransportConnection()) { + var streamOutput = new OutputStreamStreamOutput(socket.getOutputStream()); + streamOutput.write("ES".getBytes(StandardCharsets.US_ASCII)); + streamOutput.writeInt(handshakeRequestBytes.length); + streamOutput.writeBytes(handshakeRequestBytes.bytes, handshakeRequestBytes.offset, handshakeRequestBytes.length); + streamOutput.flush(); + + var streamInput = new InputStreamStreamInput(socket.getInputStream()); + assertEquals((byte) 'E', streamInput.readByte()); + assertEquals((byte) 'S', streamInput.readByte()); + var responseLength = streamInput.readInt(); + handshakeResponseBytes = streamInput.readBytesRef(responseLength); + } + + try (var inputStream = new BytesArray(handshakeResponseBytes).streamInput()) { + assertEquals(requestId, inputStream.readLong()); + assertEquals(TransportStatus.setResponse(TransportStatus.setHandshake((byte) 0)), inputStream.readByte()); + assertEquals(TransportHandshaker.V8_HANDSHAKE_VERSION.id(), inputStream.readInt()); + assertEquals(2, inputStream.readInt()); // length of variable-length header, always 0x02 + assertEquals((byte) 0, inputStream.readByte()); // no request headers + assertEquals((byte) 0, inputStream.readByte()); // no response headers + inputStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + assertEquals(TransportVersion.current().id(), inputStream.readVInt()); + assertEquals(-1, inputStream.read()); + } + } + + @UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // remove support for v9 handshakes in v11 + public void testV9Handshake() throws Exception { + final BytesRef handshakeRequestBytes; + final var requestId = randomNonNegativeLong(); + try (var outputStream = new BytesStreamOutput()) { + outputStream.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); + outputStream.writeLong(requestId); + outputStream.writeByte(TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0))); + outputStream.writeInt(TransportHandshaker.V9_HANDSHAKE_VERSION.id()); + outputStream.writeInt(0x19); // length of variable-length header, always 0x19 + outputStream.writeByte((byte) 0); // no request headers; + outputStream.writeByte((byte) 0); // no response headers; + outputStream.writeString("internal:tcp/handshake"); + outputStream.writeByte((byte) 0); // no parent task ID; + + final var requestNodeTransportVersionId = TransportVersionUtils.randomCompatibleVersion(random()).id(); + assertThat(requestNodeTransportVersionId, allOf(greaterThanOrEqualTo(1 << 22), lessThan(1 << 28))); // 4-byte vInt + final var releaseVersionLength = between(0, 127 - 5); // so that its length, and the length of the payload, is a one-byte vInt + final var requestNodeReleaseVersion = randomAlphaOfLength(releaseVersionLength); + outputStream.writeByte((byte) (4 + 1 + releaseVersionLength)); // payload length + outputStream.writeVInt(requestNodeTransportVersionId); + outputStream.writeString(requestNodeReleaseVersion); + + handshakeRequestBytes = outputStream.bytes().toBytesRef(); + } + + final BytesRef handshakeResponseBytes; + try (var socket = openTransportConnection()) { + var streamOutput = new OutputStreamStreamOutput(socket.getOutputStream()); + streamOutput.write("ES".getBytes(StandardCharsets.US_ASCII)); + streamOutput.writeInt(handshakeRequestBytes.length); + streamOutput.writeBytes(handshakeRequestBytes.bytes, handshakeRequestBytes.offset, handshakeRequestBytes.length); + streamOutput.flush(); + + var streamInput = new InputStreamStreamInput(socket.getInputStream()); + assertEquals((byte) 'E', streamInput.readByte()); + assertEquals((byte) 'S', streamInput.readByte()); + var responseLength = streamInput.readInt(); + handshakeResponseBytes = streamInput.readBytesRef(responseLength); + } + + try (var inputStream = new BytesArray(handshakeResponseBytes).streamInput()) { + assertEquals(requestId, inputStream.readLong()); + assertEquals(TransportStatus.setResponse(TransportStatus.setHandshake((byte) 0)), inputStream.readByte()); + assertEquals(TransportHandshaker.V9_HANDSHAKE_VERSION.id(), inputStream.readInt()); + assertEquals(2, inputStream.readInt()); // length of variable-length header, always 0x02 + assertEquals((byte) 0, inputStream.readByte()); // no request headers + assertEquals((byte) 0, inputStream.readByte()); // no response headers + inputStream.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); + assertEquals(TransportVersion.current().id(), inputStream.readVInt()); + assertEquals(Build.current().version(), inputStream.readString()); + assertEquals(-1, inputStream.read()); + } + } + + public void testOutboundHandshake() throws Exception { + final BytesRef handshakeRequestBytes; + + try (var serverSocket = new ServerSocket(0, 1, InetAddress.getLoopbackAddress())) { + getInstanceFromNode(TransportService.class).openConnection( + DiscoveryNodeUtils.builder(randomIdentifier()) + .address(new TransportAddress(serverSocket.getInetAddress(), serverSocket.getLocalPort())) + .build(), + ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG, null, null, null, null, null), + ActionListener.noop() + ); + + try ( + var acceptedSocket = serverSocket.accept(); + var streamInput = new InputStreamStreamInput(acceptedSocket.getInputStream()) + ) { + assertEquals((byte) 'E', streamInput.readByte()); + assertEquals((byte) 'S', streamInput.readByte()); + var responseLength = streamInput.readInt(); + handshakeRequestBytes = streamInput.readBytesRef(responseLength); + } + } + + final BytesRef payloadBytes; + + try (var inputStream = new BytesArray(handshakeRequestBytes).streamInput()) { + assertThat(inputStream.readLong(), greaterThan(0L)); + assertEquals(TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0)), inputStream.readByte()); + assertEquals(TransportHandshaker.V8_HANDSHAKE_VERSION.id(), inputStream.readInt()); + assertEquals(0x1a, inputStream.readInt()); // length of variable-length header, always 0x1a + assertEquals((byte) 0, inputStream.readByte()); // no request headers + assertEquals((byte) 0, inputStream.readByte()); // no response headers + assertEquals((byte) 0, inputStream.readByte()); // no features + assertEquals("internal:tcp/handshake", inputStream.readString()); + assertEquals((byte) 0, inputStream.readByte()); // no parent task + inputStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + payloadBytes = inputStream.readBytesRef(); + assertEquals(-1, inputStream.read()); + } + + try (var inputStream = new BytesArray(payloadBytes).streamInput()) { + inputStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + assertEquals(TransportVersion.current().id(), inputStream.readVInt()); + assertEquals(-1, inputStream.read()); + } + } + + private Socket openTransportConnection() throws Exception { + final var transportAddress = randomFrom(getInstanceFromNode(TransportService.class).boundAddress().boundAddresses()).address(); + return AccessController.doPrivileged( + (PrivilegedExceptionAction) (() -> new Socket(transportAddress.getAddress(), transportAddress.getPort())) + ); + } +} From 83dd34f19cb90cca31091c147348a70abcf3762d Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 24 Jan 2025 11:39:24 -0500 Subject: [PATCH 021/383] Use getOrDefault in IngestDocument rather than containsKey+get (#120571) --- .../elasticsearch/ingest/IngestDocument.java | 36 +++++++++++-------- .../java/org/elasticsearch/script/CtxMap.java | 12 +++++++ .../org/elasticsearch/script/Metadata.java | 7 ++++ .../ingest/IngestCtxMapTests.java | 12 +++++++ .../org/elasticsearch/script/CtxMapTests.java | 34 ++++++++++++++++++ .../elasticsearch/script/MetadataTests.java | 9 +++++ 6 files changed, 96 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 0614e9e92edf2..7982024911beb 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -56,6 +56,9 @@ public final class IngestDocument { // This is the maximum number of nested pipelines that can be within a pipeline. If there are more, we bail out with an error public static final int MAX_PIPELINES = Integer.parseInt(System.getProperty("es.ingest.max_pipelines", "100")); + // a 'not found' sentinel value for use in getOrDefault calls in order to avoid containsKey-and-then-get + private static final Object NOT_FOUND = new Object(); + private final IngestCtxMap ctxMap; private final Map ingestMetadata; @@ -376,11 +379,15 @@ private static ResolveResult resolve(String pathElement, String fullPath, Object if (context == null) { return ResolveResult.error("cannot resolve [" + pathElement + "] from null as part of path [" + fullPath + "]"); } - if (context instanceof Map map) { - if (map.containsKey(pathElement)) { - return ResolveResult.success(map.get(pathElement)); + if (context instanceof Map) { + @SuppressWarnings("unchecked") + Map map = (Map) context; + Object object = map.getOrDefault(pathElement, NOT_FOUND); // getOrDefault is faster than containsKey + get + if (object == NOT_FOUND) { + return ResolveResult.error("field [" + pathElement + "] not present as part of path [" + fullPath + "]"); + } else { + return ResolveResult.success(object); } - return ResolveResult.error("field [" + pathElement + "] not present as part of path [" + fullPath + "]"); } if (context instanceof List list) { int index; @@ -547,12 +554,13 @@ private void setFieldValue(String path, Object value, boolean append, boolean al if (context instanceof Map) { @SuppressWarnings("unchecked") Map map = (Map) context; - if (map.containsKey(pathElement)) { - context = map.get(pathElement); - } else { - HashMap newMap = new HashMap<>(); + Object object = map.getOrDefault(pathElement, NOT_FOUND); // getOrDefault is faster than containsKey + get + if (object == NOT_FOUND) { + Map newMap = new HashMap<>(); map.put(pathElement, newMap); context = newMap; + } else { + context = object; } } else if (context instanceof List list) { int index; @@ -591,16 +599,16 @@ private void setFieldValue(String path, Object value, boolean append, boolean al @SuppressWarnings("unchecked") Map map = (Map) context; if (append) { - if (map.containsKey(leafKey)) { - Object object = map.get(leafKey); + Object object = map.getOrDefault(leafKey, NOT_FOUND); // getOrDefault is faster than containsKey + get + if (object == NOT_FOUND) { + List list = new ArrayList<>(); + appendValues(list, value); + map.put(leafKey, list); + } else { Object list = appendValues(object, value, allowDuplicates); if (list != object) { map.put(leafKey, list); } - } else { - List list = new ArrayList<>(); - appendValues(list, value); - map.put(leafKey, list); } return; } diff --git a/server/src/main/java/org/elasticsearch/script/CtxMap.java b/server/src/main/java/org/elasticsearch/script/CtxMap.java index 1496d70cf39a1..342e37efcaedf 100644 --- a/server/src/main/java/org/elasticsearch/script/CtxMap.java +++ b/server/src/main/java/org/elasticsearch/script/CtxMap.java @@ -196,6 +196,18 @@ public Object get(Object key) { return directSourceAccess() ? source.get(key) : (SOURCE.equals(key) ? source : null); } + @Override + public Object getOrDefault(Object key, Object defaultValue) { + // uses map directly to avoid Map's implementation that is just get and then containsKey and so could require two isAvailable calls + if (key instanceof String str) { + if (metadata.isAvailable(str)) { + return metadata.getOrDefault(str, defaultValue); + } + return directSourceAccess() ? source.getOrDefault(key, defaultValue) : (SOURCE.equals(key) ? source : defaultValue); + } + return defaultValue; + } + /** * Set of entries of the wrapped map that calls the appropriate validator before changing an entries value or removing an entry. * diff --git a/server/src/main/java/org/elasticsearch/script/Metadata.java b/server/src/main/java/org/elasticsearch/script/Metadata.java index dc5ae51e45af0..fc2a59f7171f2 100644 --- a/server/src/main/java/org/elasticsearch/script/Metadata.java +++ b/server/src/main/java/org/elasticsearch/script/Metadata.java @@ -240,6 +240,13 @@ public Object get(String key) { return map.get(key); } + /** + * Get the value associated with {@param key}, otherwise return {@param defaultValue} + */ + public Object getOrDefault(String key, Object defaultValue) { + return map.getOrDefault(key, defaultValue); + } + /** * Remove the mapping associated with {@param key} * @throws IllegalArgumentException if {@link #isAvailable(String)} is false or the key cannot be removed. diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestCtxMapTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestCtxMapTests.java index 5a8505e6bb375..33c3ae889040b 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestCtxMapTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestCtxMapTests.java @@ -329,6 +329,18 @@ public void testHandlesAllVersionTypes() { assertNull(md.getVersionType()); } + public void testGetOrDefault() { + map = new IngestCtxMap(Map.of("foo", "bar"), new IngestDocMetadata(Map.of("_version", 5L), null)); + + // it does the expected thing for fields that are present + assertThat(map.getOrDefault("_version", -1L), equalTo(5L)); + assertThat(map.getOrDefault("foo", "wat"), equalTo("bar")); + + // it does the expected thing for fields that are not present + assertThat(map.getOrDefault("_version_type", "something"), equalTo("something")); + assertThat(map.getOrDefault("baz", "quux"), equalTo("quux")); + } + private static class TestEntry implements Map.Entry { String key; Object value; diff --git a/server/src/test/java/org/elasticsearch/script/CtxMapTests.java b/server/src/test/java/org/elasticsearch/script/CtxMapTests.java index 05c6d22f2d4ec..69b98c940fd9b 100644 --- a/server/src/test/java/org/elasticsearch/script/CtxMapTests.java +++ b/server/src/test/java/org/elasticsearch/script/CtxMapTests.java @@ -17,7 +17,10 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.script.Metadata.LongField; +import static org.elasticsearch.script.Metadata.VERSION; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; public class CtxMapTests extends ESTestCase { CtxMap map; @@ -29,6 +32,37 @@ public void setUp() throws Exception { map = new CtxMap<>(new HashMap<>(), new Metadata(Map.of(), Map.of())); } + @SuppressWarnings("unchecked") + public void testGetOrDefault() { + { + map = new CtxMap<>(Map.of("foo", "bar"), new Metadata(Map.of("_version", 5L), Map.of(VERSION, LongField.withWritable()))); + + // it does the expected thing for fields that are present + assertThat(map.getOrDefault("_version", -1L), equalTo(5L)); + assertThat(((Map) map.getOrDefault("_source", Map.of())).getOrDefault("foo", "wat"), equalTo("bar")); + + // it does the expected thing for fields that are not present + assertThat(map.getOrDefault("_version_type", "something"), equalTo("something")); + assertThat(map.getOrDefault("baz", "quux"), equalTo("quux")); + } + { + map = new CtxMap<>(Map.of("foo", "bar"), new Metadata(Map.of("_version", 5L), Map.of(VERSION, LongField.withWritable()))) { + @Override + protected boolean directSourceAccess() { + return true; + } + }; + + // it does the expected thing for fields that are present + assertThat(map.getOrDefault("_version", -1L), equalTo(5L)); + assertThat(map.getOrDefault("foo", "wat"), equalTo("bar")); + + // it does the expected thing for fields that are not present + assertThat(map.getOrDefault("_version_type", "something"), equalTo("something")); + assertThat(map.getOrDefault("baz", "quux"), equalTo("quux")); + } + } + public void testAddingJunkToCtx() { IllegalArgumentException err = expectThrows(IllegalArgumentException.class, () -> map.put("junk", "stuff")); assertEquals("Cannot put key [junk] with value [stuff] into ctx", err.getMessage()); diff --git a/server/src/test/java/org/elasticsearch/script/MetadataTests.java b/server/src/test/java/org/elasticsearch/script/MetadataTests.java index 80b3dcbf16b66..7830b9d15fdd0 100644 --- a/server/src/test/java/org/elasticsearch/script/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/script/MetadataTests.java @@ -16,6 +16,8 @@ import java.util.Map; import java.util.Set; +import static org.hamcrest.Matchers.equalTo; + public class MetadataTests extends ESTestCase { Metadata md; private static final Metadata.FieldProperty STRING_PROP = new Metadata.FieldProperty<>(String.class, true, true, null); @@ -279,4 +281,11 @@ public void testImmutablePropertiesMap() { new Metadata(Map.of(), Map.of()); new Metadata(Map.of(), Map.copyOf(new HashMap<>())); } + + public void testGetOrDefault() { + md = new Metadata(new HashMap<>(Map.of("foo", "bar")), Map.of("foo", STRING_PROP, "baz", STRING_PROP)); + assertThat(md.getOrDefault("foo", "wat"), equalTo("bar")); + assertThat(md.getOrDefault("bar", "wat"), equalTo("wat")); + assertThat(md.getOrDefault("yo", "wat"), equalTo("wat")); + } } From 190720d2d89266f8ca033a006d596970d73d75d4 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 24 Jan 2025 17:46:27 +0100 Subject: [PATCH 022/383] Special handling for invalid global labels for APM agent (#120795) --- .../telemetry/apm/internal/APMAgentSettings.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java index 9d4822aa9c4d6..68adc97b74449 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java @@ -90,6 +90,11 @@ public void initAgentSystemProperties(Settings settings) { */ @SuppressForbidden(reason = "Need to be able to manipulate APM agent-related properties to set them dynamically") public void setAgentSetting(String key, String value) { + if (key.startsWith("global_labels.")) { + // Invalid agent setting, leftover from flattening global labels in APMJVMOptions + // https://github.com/elastic/elasticsearch/issues/120791 + return; + } final String completeKey = "elastic.apm." + Objects.requireNonNull(key); AccessController.doPrivileged((PrivilegedAction) () -> { if (value == null || value.isEmpty()) { @@ -242,8 +247,8 @@ private static Setting concreteAgentSetting(String namespace, String qua return new Setting<>(qualifiedKey, "", (value) -> { if (qualifiedKey.equals("_na_") == false && PERMITTED_AGENT_KEYS.contains(namespace) == false) { if (namespace.startsWith("global_labels.")) { - // The nested labels syntax is transformed in APMJvmOptions. - // Ignore these here to not fail if not correctly removed. + // Invalid agent setting, leftover from flattening global labels in APMJVMOptions + // https://github.com/elastic/elasticsearch/issues/120791 return value; } throw new IllegalArgumentException("Configuration [" + qualifiedKey + "] is either prohibited or unknown."); From 10e96bde5d6515bc6cfdcb37e619189fb45021c2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 25 Jan 2025 03:50:24 +1100 Subject: [PATCH 023/383] Mute org.elasticsearch.xpack.test.rest.XPackRestIT org.elasticsearch.xpack.test.rest.XPackRestIT #120816 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 269368c5914ba..d206bc63d7473 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -249,6 +249,8 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/3rd_party_deployment/Test start deployment fails while model download in progress} issue: https://github.com/elastic/elasticsearch/issues/120810 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + issue: https://github.com/elastic/elasticsearch/issues/120816 # Examples: # From 060c8337e73a4d3d8a4522287c9032dccbedc22d Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Fri, 24 Jan 2025 18:00:46 +0100 Subject: [PATCH 024/383] ESQL: Simplify TableIdentifier class + rename to IndexPattern (#120797) This class is confusing: - It contains an **unused** `cluster` attribute - we never separate out the cluster, it remains in the `index` field. Also, in the constructor, this field is called `catalog`, which is a concept entirely absent from ESQL at the moment. - It can refer to multiple indices, even multiple wildcard patterns, but doesn't mention this neither in its name nor javadoc. - It has little to do with tables, which is likely a remnant of this class' usage in SQL, before the `esql.core` split. This PR removes the `cluster` attribute, renames the class to `IndexPattern`, and adds javadoc to clarify that it can also contain stuff like `remote1:idx1,remote-*:idx-*`. --- .../xpack/esql/analysis/Analyzer.java | 14 ++-- .../xpack/esql/analysis/PreAnalyzer.java | 2 +- .../xpack/esql/analysis/TableInfo.java | 8 +- .../xpack/esql/parser/LogicalPlanBuilder.java | 8 +- .../xpack/esql/plan/IndexPattern.java | 57 +++++++++++++++ .../xpack/esql/plan/TableIdentifier.java | 73 ------------------- .../esql/plan/logical/UnresolvedRelation.java | 24 +++--- .../xpack/esql/session/EsqlSession.java | 27 ++++--- .../esql/session/EsqlSessionCCSUtils.java | 2 +- .../elasticsearch/xpack/esql/CsvTests.java | 2 +- .../xpack/esql/analysis/AnalyzerTests.java | 4 +- .../parser/AbstractStatementParserTests.java | 4 +- .../esql/parser/StatementParserTests.java | 20 ++--- .../session/EsqlSessionCCSUtilsTests.java | 10 +-- 14 files changed, 120 insertions(+), 135 deletions(-) create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/IndexPattern.java delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/TableIdentifier.java diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 552e90e0e90f9..4f5ff35b84054 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -64,7 +64,7 @@ import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.parser.ParsingException; -import org.elasticsearch.xpack.esql.plan.TableIdentifier; +import org.elasticsearch.xpack.esql.plan.IndexPattern; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Drop; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -202,7 +202,9 @@ private static class ResolveTable extends ParameterizedAnalyzerRule { List list = p.indexMode() == IndexMode.LOOKUP ? lookupIndices : indices; - list.add(new TableInfo(p.table())); + list.add(new TableInfo(p.indexPattern())); }); plan.forEachUp(Enrich.class, unresolvedEnriches::add); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/TableInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/TableInfo.java index eff658e8997b0..38d368bd2bfad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/TableInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/TableInfo.java @@ -7,17 +7,17 @@ package org.elasticsearch.xpack.esql.analysis; -import org.elasticsearch.xpack.esql.plan.TableIdentifier; +import org.elasticsearch.xpack.esql.plan.IndexPattern; public class TableInfo { - private final TableIdentifier id; + private final IndexPattern id; - public TableInfo(TableIdentifier id) { + public TableInfo(IndexPattern id) { this.id = id; } - public TableIdentifier id() { + public IndexPattern id() { return id; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index e7e3527f6b4aa..ba74bf467f2aa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -34,7 +34,7 @@ import org.elasticsearch.xpack.esql.expression.Order; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; -import org.elasticsearch.xpack.esql.plan.TableIdentifier; +import org.elasticsearch.xpack.esql.plan.IndexPattern; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Drop; @@ -255,7 +255,7 @@ public LogicalPlan visitRowCommand(EsqlBaseParser.RowCommandContext ctx) { @Override public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { Source source = source(ctx); - TableIdentifier table = new TableIdentifier(source, null, visitIndexPattern(ctx.indexPattern())); + IndexPattern table = new IndexPattern(source, visitIndexPattern(ctx.indexPattern())); Map metadataMap = new LinkedHashMap<>(); if (ctx.metadata() != null) { for (var c : ctx.metadata().UNQUOTED_SOURCE()) { @@ -468,7 +468,7 @@ public LogicalPlan visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) throw new IllegalArgumentException("METRICS command currently requires a snapshot build"); } Source source = source(ctx); - TableIdentifier table = new TableIdentifier(source, null, visitIndexPattern(ctx.indexPattern())); + IndexPattern table = new IndexPattern(source, visitIndexPattern(ctx.indexPattern())); if (ctx.aggregates == null && ctx.grouping == null) { return new UnresolvedRelation(source, table, false, List.of(), IndexMode.STANDARD, null, "METRICS"); @@ -530,7 +530,7 @@ public PlanFactory visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { UnresolvedRelation right = new UnresolvedRelation( source(target), - new TableIdentifier(source(target.index), null, rightPattern), + new IndexPattern(source(target.index), rightPattern), false, emptyList(), IndexMode.LOOKUP, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/IndexPattern.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/IndexPattern.java new file mode 100644 index 0000000000000..fdaac1c1cc64c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/IndexPattern.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.plan; + +import org.elasticsearch.xpack.esql.core.tree.Source; + +import java.util.Objects; + +/** + * Contains an index pattern together with its {@link Source}. Can also be a comma-separated list, like {@code idx-*,remote:other-idx*}. + */ +public class IndexPattern { + + private final Source source; + private final String indexPattern; + + public IndexPattern(Source source, String indexPattern) { + this.source = source; + this.indexPattern = indexPattern; + } + + public String indexPattern() { + return indexPattern; + } + + @Override + public int hashCode() { + return Objects.hash(indexPattern); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + IndexPattern other = (IndexPattern) obj; + return Objects.equals(indexPattern, other.indexPattern); + } + + public Source source() { + return source; + } + + @Override + public String toString() { + return indexPattern; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/TableIdentifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/TableIdentifier.java deleted file mode 100644 index 532d93eec48af..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/TableIdentifier.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.plan; - -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.Objects; - -import static org.elasticsearch.transport.RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR; - -public class TableIdentifier { - - private final Source source; - - private final String cluster; - private final String index; - - public TableIdentifier(Source source, String catalog, String index) { - this.source = source; - this.cluster = catalog; - this.index = index; - } - - public String cluster() { - return cluster; - } - - public String index() { - return index; - } - - @Override - public int hashCode() { - return Objects.hash(cluster, index); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - TableIdentifier other = (TableIdentifier) obj; - return Objects.equals(index, other.index) && Objects.equals(cluster, other.cluster); - } - - public Source source() { - return source; - } - - public String qualifiedIndex() { - return cluster != null ? cluster + REMOTE_CLUSTER_INDEX_SEPARATOR + index : index; - } - - @Override - public String toString() { - StringBuilder builder = new StringBuilder(); - if (cluster != null) { - builder.append(cluster); - builder.append(REMOTE_CLUSTER_INDEX_SEPARATOR); - } - builder.append(index); - return builder.toString(); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java index 384c3f7a340ae..0a20e1dd9080d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.plan.TableIdentifier; +import org.elasticsearch.xpack.esql.plan.IndexPattern; import java.util.Collections; import java.util.List; @@ -22,7 +22,7 @@ public class UnresolvedRelation extends LeafPlan implements Unresolvable { - private final TableIdentifier table; + private final IndexPattern indexPattern; private final boolean frozen; private final List metadataFields; /* @@ -40,7 +40,7 @@ public class UnresolvedRelation extends LeafPlan implements Unresolvable { public UnresolvedRelation( Source source, - TableIdentifier table, + IndexPattern indexPattern, boolean frozen, List metadataFields, IndexMode indexMode, @@ -48,11 +48,11 @@ public UnresolvedRelation( String commandName ) { super(source); - this.table = table; + this.indexPattern = indexPattern; this.frozen = frozen; this.metadataFields = metadataFields; this.indexMode = indexMode; - this.unresolvedMsg = unresolvedMessage == null ? "Unknown index [" + table.index() + "]" : unresolvedMessage; + this.unresolvedMsg = unresolvedMessage == null ? "Unknown index [" + indexPattern.indexPattern() + "]" : unresolvedMessage; this.commandName = commandName; } @@ -68,11 +68,11 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, UnresolvedRelation::new, table, frozen, metadataFields, indexMode, unresolvedMsg, commandName); + return NodeInfo.create(this, UnresolvedRelation::new, indexPattern, frozen, metadataFields, indexMode, unresolvedMsg, commandName); } - public TableIdentifier table() { - return table; + public IndexPattern indexPattern() { + return indexPattern; } public boolean frozen() { @@ -124,7 +124,7 @@ public String unresolvedMessage() { @Override public int hashCode() { - return Objects.hash(source(), table, metadataFields, indexMode, unresolvedMsg); + return Objects.hash(source(), indexPattern, metadataFields, indexMode, unresolvedMsg); } @Override @@ -138,7 +138,7 @@ public boolean equals(Object obj) { } UnresolvedRelation other = (UnresolvedRelation) obj; - return Objects.equals(table, other.table) + return Objects.equals(indexPattern, other.indexPattern) && Objects.equals(frozen, other.frozen) && Objects.equals(metadataFields, other.metadataFields) && indexMode == other.indexMode @@ -147,11 +147,11 @@ public boolean equals(Object obj) { @Override public List nodeProperties() { - return singletonList(table); + return singletonList(indexPattern); } @Override public String toString() { - return UNRESOLVED_PREFIX + table.index(); + return UNRESOLVED_PREFIX + indexPattern.indexPattern(); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index b10f766babb36..b40e49df49c84 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -56,7 +56,7 @@ import org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizer; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.parser.QueryParams; -import org.elasticsearch.xpack.esql.plan.TableIdentifier; +import org.elasticsearch.xpack.esql.plan.IndexPattern; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Keep; @@ -321,7 +321,9 @@ public void analyzedPlan( EsqlSessionCCSUtils.checkForCcsLicense(executionInfo, indices, indicesExpressionGrouper, verifier.licenseState()); final Set targetClusters = enrichPolicyResolver.groupIndicesPerCluster( - indices.stream().flatMap(t -> Arrays.stream(Strings.commaDelimitedListToStringArray(t.id().index()))).toArray(String[]::new) + indices.stream() + .flatMap(t -> Arrays.stream(Strings.commaDelimitedListToStringArray(t.id().indexPattern()))) + .toArray(String[]::new) ).keySet(); var listener = SubscribableListener.newForked( @@ -373,14 +375,14 @@ public void analyzedPlan( } private void preAnalyzeLookupIndex(TableInfo tableInfo, PreAnalysisResult result, ActionListener listener) { - TableIdentifier table = tableInfo.id(); - Set fieldNames = result.wildcardJoinIndices().contains(table.index()) ? IndexResolver.ALL_FIELDS : result.fieldNames; + IndexPattern table = tableInfo.id(); + Set fieldNames = result.wildcardJoinIndices().contains(table.indexPattern()) ? IndexResolver.ALL_FIELDS : result.fieldNames; // call the EsqlResolveFieldsAction (field-caps) to resolve indices and get field types indexResolver.resolveAsMergedMapping( - table.index(), + table.indexPattern(), fieldNames, null, - listener.map(indexResolution -> result.addLookupIndexResolution(table.index(), indexResolution)) + listener.map(indexResolution -> result.addLookupIndexResolution(table.indexPattern(), indexResolution)) ); // TODO: Verify that the resolved index actually has indexMode: "lookup" } @@ -400,9 +402,12 @@ private void preAnalyzeIndices( // known to be unavailable from the enrich policy API call Map unavailableClusters = result.enrichResolution.getUnavailableClusters(); TableInfo tableInfo = indices.get(0); - TableIdentifier table = tableInfo.id(); + IndexPattern table = tableInfo.id(); - Map clusterIndices = indicesExpressionGrouper.groupIndices(IndicesOptions.DEFAULT, table.index()); + Map clusterIndices = indicesExpressionGrouper.groupIndices( + IndicesOptions.DEFAULT, + table.indexPattern() + ); for (Map.Entry entry : clusterIndices.entrySet()) { final String clusterAlias = entry.getKey(); String indexExpr = Strings.arrayToCommaDelimitedString(entry.getValue().indices()); @@ -431,7 +436,9 @@ private void preAnalyzeIndices( String indexExpressionToResolve = EsqlSessionCCSUtils.createIndexExpressionFromAvailableClusters(executionInfo); if (indexExpressionToResolve.isEmpty()) { // if this was a pure remote CCS request (no local indices) and all remotes are offline, return an empty IndexResolution - listener.onResponse(result.withIndexResolution(IndexResolution.valid(new EsIndex(table.index(), Map.of(), Map.of())))); + listener.onResponse( + result.withIndexResolution(IndexResolution.valid(new EsIndex(table.indexPattern(), Map.of(), Map.of()))) + ); } else { // call the EsqlResolveFieldsAction (field-caps) to resolve indices and get field types indexResolver.resolveAsMergedMapping( @@ -588,7 +595,7 @@ static PreAnalysisResult fieldNames(LogicalPlan parsed, Set enrichPolicy } if (keepCommandReferences.isEmpty()) { // No KEEP commands after the JOIN, so we need to mark this index for "*" field resolution - wildcardJoinIndices.add(((UnresolvedRelation) join.right()).table().index()); + wildcardJoinIndices.add(((UnresolvedRelation) join.right()).indexPattern().indexPattern()); } else { // Keep commands can reference the join columns with names that shadow aliases, so we block their removal keepJoinReferences.addAll(keepCommandReferences); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java index 304a54741d44b..6be243456e040 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java @@ -311,7 +311,7 @@ public static void checkForCcsLicense( for (TableInfo tableInfo : indices) { Map groupedIndices; try { - groupedIndices = indicesGrouper.groupIndices(IndicesOptions.DEFAULT, tableInfo.id().index()); + groupedIndices = indicesGrouper.groupIndices(IndicesOptions.DEFAULT, tableInfo.id().indexPattern()); } catch (NoSuchRemoteClusterException e) { if (EsqlLicenseChecker.isCcsAllowed(licenseState)) { throw e; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 89150d6a52534..ae9c12fd7c711 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -462,7 +462,7 @@ private static CsvTestsDataLoader.MultiIndexTestDataset testDatasets(LogicalPlan throw new IllegalArgumentException("unexpected index resolution to multiple entries [" + preAnalysis.indices.size() + "]"); } - String indexName = indices.get(0).id().index(); + String indexName = indices.get(0).id().indexPattern(); List datasets = new ArrayList<>(); if (indexName.endsWith("*")) { String indexPrefix = indexName.substring(0, indexName.length() - 1); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 48366282e4e10..b01a82819e2ea 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -41,7 +41,7 @@ import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.parser.QueryParams; -import org.elasticsearch.xpack.esql.plan.TableIdentifier; +import org.elasticsearch.xpack.esql.plan.IndexPattern; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; @@ -98,7 +98,7 @@ public class AnalyzerTests extends ESTestCase { private static final UnresolvedRelation UNRESOLVED_RELATION = new UnresolvedRelation( EMPTY, - new TableIdentifier(EMPTY, null, "idx"), + new IndexPattern(EMPTY, "idx"), false, List.of(), IndexMode.STANDARD, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java index 31ea4f2712b98..111c553d34a0e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; -import org.elasticsearch.xpack.esql.plan.TableIdentifier; +import org.elasticsearch.xpack.esql.plan.IndexPattern; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; @@ -72,7 +72,7 @@ static UnresolvedFunction function(String name, List args) { } static UnresolvedRelation relation(String name) { - return new UnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, name), false, List.of(), IndexMode.STANDARD, null, "FROM"); + return new UnresolvedRelation(EMPTY, new IndexPattern(EMPTY, name), false, List.of(), IndexMode.STANDARD, null, "FROM"); } static Literal integer(int i) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index ac41c7b0f52bc..792b43433e1ee 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -42,7 +42,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; -import org.elasticsearch.xpack.esql.plan.TableIdentifier; +import org.elasticsearch.xpack.esql.plan.IndexPattern; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Drop; @@ -2052,7 +2052,7 @@ private void assertStringAsIndexPattern(String string, String statement) { LogicalPlan from = statement(statement); assertThat(from, instanceOf(UnresolvedRelation.class)); UnresolvedRelation table = (UnresolvedRelation) from; - assertThat(table.table().index(), is(string)); + assertThat(table.indexPattern().indexPattern(), is(string)); } private void assertStringAsLookupIndexPattern(String string, String statement) { @@ -2283,20 +2283,12 @@ public void testInvalidAlias() { } private LogicalPlan unresolvedRelation(String index) { - return new UnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, index), false, List.of(), IndexMode.STANDARD, null, "FROM"); + return new UnresolvedRelation(EMPTY, new IndexPattern(EMPTY, index), false, List.of(), IndexMode.STANDARD, null, "FROM"); } private LogicalPlan unresolvedTSRelation(String index) { List metadata = List.of(new MetadataAttribute(EMPTY, MetadataAttribute.TSID_FIELD, DataType.KEYWORD, false)); - return new UnresolvedRelation( - EMPTY, - new TableIdentifier(EMPTY, null, index), - false, - metadata, - IndexMode.TIME_SERIES, - null, - "FROM TS" - ); + return new UnresolvedRelation(EMPTY, new IndexPattern(EMPTY, index), false, metadata, IndexMode.TIME_SERIES, null, "FROM TS"); } public void testMetricWithGroupKeyAsAgg() { @@ -2956,8 +2948,8 @@ public void testValidJoinPattern() { var plan = statement("FROM " + basePattern + " | " + type + " JOIN " + joinPattern + " ON " + onField); var join = as(plan, LookupJoin.class); - assertThat(as(join.left(), UnresolvedRelation.class).table().index(), equalTo(unquoteIndexPattern(basePattern))); - assertThat(as(join.right(), UnresolvedRelation.class).table().index(), equalTo(unquoteIndexPattern(joinPattern))); + assertThat(as(join.left(), UnresolvedRelation.class).indexPattern().indexPattern(), equalTo(unquoteIndexPattern(basePattern))); + assertThat(as(join.right(), UnresolvedRelation.class).indexPattern().indexPattern(), equalTo(unquoteIndexPattern(joinPattern))); var joinType = as(join.config().type(), JoinTypes.UsingJoinType.class); assertThat(joinType.columns(), hasSize(1)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java index 05d04ff1315e6..a84e5b144e64c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; -import org.elasticsearch.xpack.esql.plan.TableIdentifier; +import org.elasticsearch.xpack.esql.plan.IndexPattern; import org.elasticsearch.xpack.esql.type.EsFieldTests; import java.util.ArrayList; @@ -702,7 +702,7 @@ public void testCheckForCcsLicense() { // local only search does not require an enterprise license { List indices = new ArrayList<>(); - indices.add(new TableInfo(new TableIdentifier(EMPTY, null, randomFrom("idx", "idx1,idx2*")))); + indices.add(new TableInfo(new IndexPattern(EMPTY, randomFrom("idx", "idx1,idx2*")))); checkForCcsLicense(executionInfo, indices, indicesGrouper, enterpriseLicenseValid); checkForCcsLicense(executionInfo, indices, indicesGrouper, platinumLicenseValid); @@ -727,10 +727,10 @@ public void testCheckForCcsLicense() { List indices = new ArrayList<>(); final String indexExprWithRemotes = randomFrom("remote:idx", "idx1,remote:idx2*,remote:logs,c*:idx4"); if (randomBoolean()) { - indices.add(new TableInfo(new TableIdentifier(EMPTY, null, indexExprWithRemotes))); + indices.add(new TableInfo(new IndexPattern(EMPTY, indexExprWithRemotes))); } else { - indices.add(new TableInfo(new TableIdentifier(EMPTY, null, randomFrom("idx", "idx1,idx2*")))); - indices.add(new TableInfo(new TableIdentifier(EMPTY, null, indexExprWithRemotes))); + indices.add(new TableInfo(new IndexPattern(EMPTY, randomFrom("idx", "idx1,idx2*")))); + indices.add(new TableInfo(new IndexPattern(EMPTY, indexExprWithRemotes))); } // licenses that work From ae7f3b6931d09cd0ae48e1b7f03cd9c5795ee628 Mon Sep 17 00:00:00 2001 From: Pawan Kartik Date: Fri, 24 Jan 2025 17:09:21 +0000 Subject: [PATCH 025/383] Fix and unmute `CrossClusterEsqlRCS2UnavailableRemotesIT` tests (#120402) * Fix and unmute `CrossClusterEsqlRCS2UnavailableRemotesIT` tests * [CI] Auto commit changes from spotless * Fix test --------- Co-authored-by: elasticsearchmachine --- muted-tests.yml | 3 --- ...rossClusterEsqlRCS2UnavailableRemotesIT.java | 17 ++++++++++++++--- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index d206bc63d7473..06ed24e068e4f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -131,9 +131,6 @@ tests: - class: org.elasticsearch.xpack.ml.integration.RegressionIT method: testTwoJobsWithSameRandomizeSeedUseSameTrainingSet issue: https://github.com/elastic/elasticsearch/issues/117805 -- class: org.elasticsearch.xpack.remotecluster.CrossClusterEsqlRCS2UnavailableRemotesIT - method: testEsqlRcs2UnavailableRemoteScenarios - issue: https://github.com/elastic/elasticsearch/issues/117419 - class: org.elasticsearch.xpack.esql.action.EsqlActionTaskIT method: testCancelRequestWhenFailingFetchingPages issue: https://github.com/elastic/elasticsearch/issues/118193 diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java index b62d82c47f753..50bd386b2172c 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java @@ -25,10 +25,12 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.oneOf; public class CrossClusterEsqlRCS2UnavailableRemotesIT extends AbstractRemoteClusterSecurityTestCase { private static final AtomicReference> API_KEY_MAP_REF = new AtomicReference<>(); @@ -179,8 +181,10 @@ private void remoteClusterShutdownWithSkipUnavailableTrue() throws Exception { Map failuresMap = (Map) remoteClusterFailures.get(0); Map reason = (Map) failuresMap.get("reason"); - assertThat(reason.get("type").toString(), equalTo("connect_transport_exception")); - assertThat(reason.get("reason").toString(), containsString("Unable to connect to [my_remote_cluster]")); + assertThat( + reason.get("type").toString(), + oneOf("node_disconnected_exception", "connect_transport_exception", "node_not_connected_exception") + ); } finally { fulfillingCluster.start(); closeFulfillingClusterClient(); @@ -201,7 +205,14 @@ private void remoteClusterShutdownWithSkipUnavailableFalse() throws Exception { // A simple query that targets our remote cluster. String query = "FROM *,my_remote_cluster:* | LIMIT 10"; ResponseException ex = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(esqlRequest(query))); - assertThat(ex.getMessage(), containsString("connect_transport_exception")); + assertThat( + ex.getMessage(), + anyOf( + containsString("node_disconnected_exception"), + containsString("connect_transport_exception"), + containsString("node_not_connected_exception") + ) + ); } finally { fulfillingCluster.start(); closeFulfillingClusterClient(); From 05513550e7150d17827b04d5d69ccebdd2360c13 Mon Sep 17 00:00:00 2001 From: Pawan Kartik Date: Fri, 24 Jan 2025 17:11:33 +0000 Subject: [PATCH 026/383] Fix and unmute `CrossClusterEsqlRCS1UnavailableRemotesIT` tests (#120388) * Unmute `CrossClusterEsqlRCS1UnavailableRemotesIT.testEsqlRcs1UnavailableRemoteScenarios` * Track `node_not_connected_exception` --- muted-tests.yml | 3 --- .../CrossClusterEsqlRCS1UnavailableRemotesIT.java | 10 +++++++++- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 06ed24e068e4f..dba2e055e7351 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -146,9 +146,6 @@ tests: - class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests method: testBottomFieldSort issue: https://github.com/elastic/elasticsearch/issues/118214 -- class: org.elasticsearch.xpack.remotecluster.CrossClusterEsqlRCS1UnavailableRemotesIT - method: testEsqlRcs1UnavailableRemoteScenarios - issue: https://github.com/elastic/elasticsearch/issues/118350 - class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests method: testSearcherId issue: https://github.com/elastic/elasticsearch/issues/118374 diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1UnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1UnavailableRemotesIT.java index c7623779ee214..9759b1440c3e8 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1UnavailableRemotesIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1UnavailableRemotesIT.java @@ -26,6 +26,7 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.greaterThan; public class CrossClusterEsqlRCS1UnavailableRemotesIT extends AbstractRemoteClusterSecurityTestCase { @@ -178,7 +179,14 @@ private void remoteClusterShutdownWithSkipUnavailableFalse() throws Exception { // A simple query that targets our remote cluster. String query = "FROM *,my_remote_cluster:* | LIMIT 10"; ResponseException ex = expectThrows(ResponseException.class, () -> client().performRequest(esqlRequest(query))); - assertThat(ex.getMessage(), containsString("connect_transport_exception")); + assertThat( + ex.getMessage(), + anyOf( + containsString("connect_transport_exception"), + containsString("node_disconnected_exception"), + containsString("node_not_connected_exception") + ) + ); } finally { fulfillingCluster.start(); closeFulfillingClusterClient(); From 969cd70aa00f0c104b5b94dabee05ac84be0e0d9 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Fri, 24 Jan 2025 19:36:59 +0200 Subject: [PATCH 027/383] Restore source matching in randomized logsdb tests (#120773) Applies the fix in `SourceMatcher` from #120756, along with disabling `SCALED_FLOAT` and `HALF_FLOAT` that have accuracy issues leading to false positives. --- .../org/elasticsearch/index/mapper/DocumentParser.java | 8 -------- .../index/mapper/IgnoredSourceFieldMapperTests.java | 4 +--- .../elasticsearch/logsdb/datageneration/FieldType.java | 8 +------- .../datasource/DefaultMappingParametersHandler.java | 3 +-- .../xpack/logsdb/qa/matchers/source/SourceMatcher.java | 3 +-- 5 files changed, 4 insertions(+), 22 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 9ddb6f0d496a0..5a417c541d716 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -729,28 +729,20 @@ private static void parseNonDynamicArray( XContentParser parser = context.parser(); XContentParser.Token token; - int elements = 0; while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { - elements = Integer.MAX_VALUE; parseObject(context, lastFieldName); } else if (token == XContentParser.Token.START_ARRAY) { - elements = Integer.MAX_VALUE; parseArray(context, lastFieldName); } else if (token == XContentParser.Token.VALUE_NULL) { - elements++; parseNullValue(context, lastFieldName); } else if (token == null) { throwEOFOnParseArray(arrayFieldName, context); } else { assert token.isValue(); - elements++; parseValue(context, lastFieldName); } } - if (elements <= 1 && canRemoveSingleLeafElement) { - context.removeLastIgnoredField(fullPath); - } postProcessDynamicArrayMapping(context, lastFieldName); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java index 14902aa419b9f..2b36c0ce0b5a4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java @@ -743,9 +743,7 @@ public void testIndexStoredArraySourceSingleLeafElement() throws IOException { b.startObject("int_value").field("type", "integer").endObject(); })).documentMapper(); var syntheticSource = syntheticSource(documentMapper, b -> b.array("int_value", new int[] { 10 })); - assertEquals("{\"int_value\":10}", syntheticSource); - ParsedDocument doc = documentMapper.parse(source(syntheticSource)); - assertNull(doc.rootDoc().getField("_ignored_source")); + assertEquals("{\"int_value\":[10]}", syntheticSource); } public void testIndexStoredArraySourceSingleLeafElementAndNull() throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java index 96b75f29382e2..07744851aba3e 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java @@ -13,11 +13,9 @@ import org.elasticsearch.logsdb.datageneration.fields.leaf.ByteFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.DoubleFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.FloatFieldDataGenerator; -import org.elasticsearch.logsdb.datageneration.fields.leaf.HalfFloatFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.IntegerFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.KeywordFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.LongFieldDataGenerator; -import org.elasticsearch.logsdb.datageneration.fields.leaf.ScaledFloatFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.ShortFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.UnsignedLongFieldDataGenerator; @@ -32,9 +30,7 @@ public enum FieldType { SHORT("short"), BYTE("byte"), DOUBLE("double"), - FLOAT("float"), - HALF_FLOAT("half_float"), - SCALED_FLOAT("scaled_float"); + FLOAT("float"); private final String name; @@ -52,8 +48,6 @@ public FieldDataGenerator generator(String fieldName, DataSource dataSource) { case BYTE -> new ByteFieldDataGenerator(fieldName, dataSource); case DOUBLE -> new DoubleFieldDataGenerator(fieldName, dataSource); case FLOAT -> new FloatFieldDataGenerator(fieldName, dataSource); - case HALF_FLOAT -> new HalfFloatFieldDataGenerator(fieldName, dataSource); - case SCALED_FLOAT -> new ScaledFloatFieldDataGenerator(fieldName, dataSource); }; } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java index b639108ea6ad2..db13867fe71ad 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java @@ -32,8 +32,7 @@ public DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceReques return new DataSourceResponse.LeafMappingParametersGenerator(switch (request.fieldType()) { case KEYWORD -> keywordMapping(request, map); - case LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, HALF_FLOAT, UNSIGNED_LONG -> plain(map); - case SCALED_FLOAT -> scaledFloatMapping(map); + case LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, UNSIGNED_LONG -> plain(map); }); } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceMatcher.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceMatcher.java index cd2bb361d065d..d4d53a85c6e88 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceMatcher.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceMatcher.java @@ -104,7 +104,7 @@ private MatchResult compareSource(Map> actual, Map> actual, Map Date: Fri, 24 Jan 2025 17:44:40 +0000 Subject: [PATCH 028/383] Remove support for types field in watcher search (#120748) In 8.x, setting the `input.search.request.types` field in the payload when creating a watcher to an empty array was allowed, although it resulted in a deprecation warning and had no effect (and any value other than an empty array would result in an error). In 9.x, support for this field is entirely removed, and the empty array will also result in an error. We have already introduced a script to be run as part of the upgrade which removes the field from existing watches in https://github.com/elastic/elasticsearch/pull/120371. This also removes an unrelated TODO in passing, because we are not going to do that (the functionality it refers to exists and is not deprecated so cannot be removed). ES-9747 #close #comment Types in search request removed in https://github.com/elastic/elasticsearch/pull/120748 --- docs/changelog/120748.yaml | 15 +++++++++++++++ .../search/WatcherSearchTemplateRequest.java | 14 -------------- .../search/WatcherSearchTemplateRequestTests.java | 10 ++++++---- 3 files changed, 21 insertions(+), 18 deletions(-) create mode 100644 docs/changelog/120748.yaml diff --git a/docs/changelog/120748.yaml b/docs/changelog/120748.yaml new file mode 100644 index 0000000000000..e2ec312f189b0 --- /dev/null +++ b/docs/changelog/120748.yaml @@ -0,0 +1,15 @@ +pr: 120748 +summary: Removing support for types field in watcher search +area: Watcher +type: breaking +issues: [] +breaking: + title: Removing support for types field in watcher search + area: REST API + details: >- + Previously, setting the `input.search.request.types` field in the payload when creating a watcher to an empty array + was allowed, although it resulted in a deprecation warning and had no effect (and any value other than an empty + array would result in an error). Now, support for this field is entirely removed, and the empty array will also + result in an error. + impact: Users should stop setting this field (which did not have any effect anyway). + notable: false diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java index 15208f86a5e2b..f62830f2345ea 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.Nullable; import org.elasticsearch.script.Script; @@ -173,7 +172,6 @@ public static WatcherSearchTemplateRequest fromXContent(XContentParser parser, S IndicesOptions indicesOptions = DEFAULT_INDICES_OPTIONS; BytesReference searchSource = null; Script template = null; - // TODO this is to retain BWC compatibility in 7.0 and can be removed for 8.0 boolean totalHitsAsInt = true; XContentParser.Token token; @@ -196,17 +194,6 @@ public static WatcherSearchTemplateRequest fromXContent(XContentParser parser, S ); } } - } else if (TYPES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - // Tolerate an empty types array, because some watches created internally in 6.x have - // an empty types array in their search, and it's clearly equivalent to typeless. - if (parser.nextToken() != XContentParser.Token.END_ARRAY) { - throw new ElasticsearchParseException( - "could not read search request. unsupported non-empty array field [" + currentFieldName + "]" - ); - } - // Empty types arrays still generate the same deprecation warning they did in 7.x. - // Ideally they should be removed from the definition. - deprecationLogger.critical(DeprecationCategory.PARSING, "watcher_search_input", TYPES_DEPRECATION_MESSAGE); } else { throw new ElasticsearchParseException( "could not read search request. unexpected array field [" + currentFieldName + "]" @@ -289,7 +276,6 @@ public int hashCode() { } private static final ParseField INDICES_FIELD = new ParseField("indices"); - private static final ParseField TYPES_FIELD = new ParseField("types"); private static final ParseField BODY_FIELD = new ParseField("body"); private static final ParseField SEARCH_TYPE_FIELD = new ParseField("search_type"); private static final ParseField INDICES_OPTIONS_FIELD = new ParseField("indices_options"); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java index 620580ee09824..96d8201b37a15 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java @@ -17,7 +17,6 @@ import java.util.Map; import static java.util.Collections.singletonMap; -import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -52,8 +51,11 @@ public void testFromXContentWithEmptyTypes() throws IOException { """; try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { parser.nextToken(); - WatcherSearchTemplateRequest result = WatcherSearchTemplateRequest.fromXContent(parser, randomFrom(SearchType.values())); - assertThat(result.getIndices(), arrayContaining(".ml-anomalies-*")); + ElasticsearchParseException e = expectThrows( + ElasticsearchParseException.class, + () -> WatcherSearchTemplateRequest.fromXContent(parser, randomFrom(SearchType.values())) + ); + assertThat(e.getMessage(), is("could not read search request. unexpected array field [types]")); } } @@ -74,7 +76,7 @@ public void testFromXContentWithNonEmptyTypes() throws IOException { ElasticsearchParseException.class, () -> WatcherSearchTemplateRequest.fromXContent(parser, randomFrom(SearchType.values())) ); - assertThat(e.getMessage(), is("could not read search request. unsupported non-empty array field [types]")); + assertThat(e.getMessage(), is("could not read search request. unexpected array field [types]")); } } From 4783d1f991633dbc3b7397fe13c63b6b7d70dc37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Fri, 24 Jan 2025 18:56:04 +0100 Subject: [PATCH 029/383] LTR sometines throw NullPointerException: Cannot read field "approximation" because "top" is null (#120809) * Add check on the DisiPriorityQueue size. * Update docs/changelog/120809.yaml * Add a unit test. --- docs/changelog/120809.yaml | 6 ++++ .../inference/ltr/QueryFeatureExtractor.java | 7 ++++- .../ltr/QueryFeatureExtractorTests.java | 28 +++++++++++++++++++ 3 files changed, 40 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/120809.yaml diff --git a/docs/changelog/120809.yaml b/docs/changelog/120809.yaml new file mode 100644 index 0000000000000..30a3736dc93a4 --- /dev/null +++ b/docs/changelog/120809.yaml @@ -0,0 +1,6 @@ +pr: 120809 +summary: LTR sometines throw `NullPointerException:` Cannot read field "approximation" + because "top" is null +area: Ranking +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractor.java index bbc377a67ec0b..08c141c0858ca 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractor.java @@ -55,11 +55,16 @@ public void setNextReader(LeafReaderContext segmentContext) throws IOException { } scorers.add(scorer); } - rankerIterator = new DisjunctionDISIApproximation(disiPriorityQueue); + + rankerIterator = disiPriorityQueue.size() > 0 ? new DisjunctionDISIApproximation(disiPriorityQueue) : null; } @Override public void addFeatures(Map featureMap, int docId) throws IOException { + if (rankerIterator == null) { + return; + } + rankerIterator.advance(docId); for (int i = 0; i < featureNames.size(); i++) { Scorer scorer = scorers.get(i); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractorTests.java index 3878ce5dab087..3b25a266bf412 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractorTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.test.AbstractBuilderTestCase; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.core.ml.utils.QueryProvider; @@ -31,12 +32,14 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Stream; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.not; +import static org.mockito.Mockito.mock; public class QueryFeatureExtractorTests extends AbstractBuilderTestCase { @@ -125,4 +128,29 @@ public void testQueryExtractor() throws IOException { dir.close(); } + public void testEmptyDisiPriorityQueue() throws IOException { + addDocs( + new String[] { "the quick brown fox", "the slow brown fox", "the grey dog", "yet another string" }, + new int[] { 5, 10, 12, 11 } + ); + + // Scorers returned by weights are null + List featureNames = randomList(1, 10, ESTestCase::randomIdentifier); + List weights = Stream.generate(() -> mock(Weight.class)).limit(featureNames.size()).toList(); + + QueryFeatureExtractor featureExtractor = new QueryFeatureExtractor(featureNames, weights); + + for (LeafReaderContext leafReaderContext : searcher.getLeafContexts()) { + int maxDoc = leafReaderContext.reader().maxDoc(); + featureExtractor.setNextReader(leafReaderContext); + for (int i = 0; i < maxDoc; i++) { + Map featureMap = new HashMap<>(); + featureExtractor.addFeatures(featureMap, i); + assertThat(featureMap, anEmptyMap()); + } + } + + reader.close(); + dir.close(); + } } From 095621f80157f6cd34e111d0cefd4d050d577fb7 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 24 Jan 2025 10:59:34 -0800 Subject: [PATCH 030/383] Validate mrjar plugin versions (#120823) The mrjar plugin adds support for sourcesets named in the form mainNN, which adds the appropriate compiler and other settings for that version of Java, and produces a multi-release jar. Having multi-release jars only makes sense for versions of java newer than the minimum compile version. This commit adds validation that the version is not too old. Note that the check is slightly relaxed; it allows mainNN where NN is equal to the min java version. This is due to the desire to keep code using incubating modules separate because warnings must be disabled. --- .../gradle/internal/MrjarPlugin.java | 28 ++++++-- .../qa/test/VersionSpecificNetworkChecks.java | 46 ++++++++++--- .../qa/test/VersionSpecificNetworkChecks.java | 48 -------------- .../qa/test/VersionSpecificNetworkChecks.java | 64 ------------------- 4 files changed, 61 insertions(+), 125 deletions(-) delete mode 100644 libs/entitlement/qa/entitlement-test-plugin/src/main18/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java delete mode 100644 libs/entitlement/qa/entitlement-test-plugin/src/main21/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index 5402e0a04fe8f..b387f019ad386 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -72,18 +72,19 @@ public void apply(Project project) { var javaExtension = project.getExtensions().getByType(JavaPluginExtension.class); var isIdeaSync = System.getProperty("idea.sync.active", "false").equals("true"); var ideaSourceSetsEnabled = project.hasProperty(MRJAR_IDEA_ENABLED) && project.property(MRJAR_IDEA_ENABLED).equals("true"); + int minJavaVersion = Integer.parseInt(buildParams.getMinimumCompilerVersion().getMajorVersion()); // Ignore version-specific source sets if we are importing into IntelliJ and have not explicitly enabled this. // Avoids an IntelliJ bug: // https://youtrack.jetbrains.com/issue/IDEA-285640/Compiler-Options-Settings-language-level-is-set-incorrectly-with-JDK-19ea if (isIdeaSync == false || ideaSourceSetsEnabled) { - List mainVersions = findSourceVersions(project); + List mainVersions = findSourceVersions(project, minJavaVersion); List mainSourceSets = new ArrayList<>(); mainSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); - configurePreviewFeatures(project, javaExtension.getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME), 21); + configurePreviewFeatures(project, javaExtension.getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME), minJavaVersion); List testSourceSets = new ArrayList<>(mainSourceSets); testSourceSets.add(SourceSet.TEST_SOURCE_SET_NAME); - configurePreviewFeatures(project, javaExtension.getSourceSets().getByName(SourceSet.TEST_SOURCE_SET_NAME), 21); + configurePreviewFeatures(project, javaExtension.getSourceSets().getByName(SourceSet.TEST_SOURCE_SET_NAME), minJavaVersion); for (int javaVersion : mainVersions) { String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion, true); @@ -103,6 +104,7 @@ public void apply(Project project) { } private void configureMrjar(Project project) { + var jarTask = project.getTasks().withType(Jar.class).named(JavaPlugin.JAR_TASK_NAME); jarTask.configure(task -> { task.manifest(manifest -> { manifest.attributes(Map.of("Multi-Release", "true")); }); }); @@ -222,7 +224,7 @@ private void createTestTask( project.getTasks().named("check").configure(checkTask -> checkTask.dependsOn(testTaskProvider)); } - private static List findSourceVersions(Project project) { + private static List findSourceVersions(Project project, int minJavaVersion) { var srcDir = project.getProjectDir().toPath().resolve("src"); List versions = new ArrayList<>(); try (var subdirStream = Files.list(srcDir)) { @@ -231,7 +233,23 @@ private static List findSourceVersions(Project project) { String sourcesetName = sourceSetPath.getFileName().toString(); Matcher sourcesetMatcher = MRJAR_SOURCESET_PATTERN.matcher(sourcesetName); if (sourcesetMatcher.matches()) { - versions.add(Integer.parseInt(sourcesetMatcher.group(1))); + int version = Integer.parseInt(sourcesetMatcher.group(1)); + if (version < minJavaVersion) { + // NOTE: We allow mainNN for the min java version so that incubating modules can be used without warnings. + // It is a workaround for https://bugs.openjdk.org/browse/JDK-8187591. Once min java is 22, we + // can use the SuppressWarnings("preview") in the code using incubating modules and this check + // can change to <= + throw new IllegalArgumentException( + "Found src dir '" + + sourcesetName + + "' for Java " + + version + + " but multi-release jar sourceset should have version " + + minJavaVersion + + " or greater" + ); + } + versions.add(version); } } } catch (IOException e) { diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java index 548bce8e2f766..d94597c2d9dd0 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java @@ -14,21 +14,51 @@ import java.net.http.HttpClient; import java.net.http.HttpRequest; import java.net.http.HttpResponse; +import java.net.spi.InetAddressResolver; +import java.net.spi.InetAddressResolverProvider; class VersionSpecificNetworkChecks { - static void createInetAddressResolverProvider() {} + static void createInetAddressResolverProvider() { + var x = new InetAddressResolverProvider() { + @Override + public InetAddressResolver get(Configuration configuration) { + return null; + } + + @Override + public String name() { + return "TEST"; + } + }; + } static void httpClientSend() throws InterruptedException { - HttpClient httpClient = HttpClient.newBuilder().build(); - try { - httpClient.send(HttpRequest.newBuilder(URI.create("http://localhost")).build(), HttpResponse.BodyHandlers.discarding()); - } catch (IOException e) { - // Expected, the send action may fail with these parameters (but after it run the entitlement check in the prologue) + try (HttpClient httpClient = HttpClient.newBuilder().build()) { + // Shutdown the client, so the send action will shortcut before actually executing any network operation + // (but after it run our check in the prologue) + httpClient.shutdown(); + try { + httpClient.send(HttpRequest.newBuilder(URI.create("http://localhost")).build(), HttpResponse.BodyHandlers.discarding()); + } catch (IOException e) { + // Expected, since we shut down the client + } } } static void httpClientSendAsync() { - HttpClient httpClient = HttpClient.newBuilder().build(); - httpClient.sendAsync(HttpRequest.newBuilder(URI.create("http://localhost")).build(), HttpResponse.BodyHandlers.discarding()); + try (HttpClient httpClient = HttpClient.newBuilder().build()) { + // Shutdown the client, so the send action will return before actually executing any network operation + // (but after it run our check in the prologue) + httpClient.shutdown(); + var future = httpClient.sendAsync( + HttpRequest.newBuilder(URI.create("http://localhost")).build(), + HttpResponse.BodyHandlers.discarding() + ); + assert future.isCompletedExceptionally(); + future.exceptionally(ex -> { + assert ex instanceof IOException; + return null; + }); + } } } diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main18/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java b/libs/entitlement/qa/entitlement-test-plugin/src/main18/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java deleted file mode 100644 index 5a456c65d8206..0000000000000 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main18/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.entitlement.qa.test; - -import java.io.IOException; -import java.net.URI; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.net.spi.InetAddressResolver; -import java.net.spi.InetAddressResolverProvider; - -class VersionSpecificNetworkChecks { - static void createInetAddressResolverProvider() { - var x = new InetAddressResolverProvider() { - @Override - public InetAddressResolver get(Configuration configuration) { - return null; - } - - @Override - public String name() { - return "TEST"; - } - }; - } - - static void httpClientSend() throws InterruptedException { - HttpClient httpClient = HttpClient.newBuilder().build(); - try { - httpClient.send(HttpRequest.newBuilder(URI.create("http://localhost")).build(), HttpResponse.BodyHandlers.discarding()); - } catch (IOException e) { - // Expected, the send action may fail with these parameters (but after it run the entitlement check in the prologue) - } - } - - static void httpClientSendAsync() { - HttpClient httpClient = HttpClient.newBuilder().build(); - httpClient.sendAsync(HttpRequest.newBuilder(URI.create("http://localhost")).build(), HttpResponse.BodyHandlers.discarding()); - } -} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main21/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java b/libs/entitlement/qa/entitlement-test-plugin/src/main21/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java deleted file mode 100644 index d94597c2d9dd0..0000000000000 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main21/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNetworkChecks.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.entitlement.qa.test; - -import java.io.IOException; -import java.net.URI; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.net.spi.InetAddressResolver; -import java.net.spi.InetAddressResolverProvider; - -class VersionSpecificNetworkChecks { - static void createInetAddressResolverProvider() { - var x = new InetAddressResolverProvider() { - @Override - public InetAddressResolver get(Configuration configuration) { - return null; - } - - @Override - public String name() { - return "TEST"; - } - }; - } - - static void httpClientSend() throws InterruptedException { - try (HttpClient httpClient = HttpClient.newBuilder().build()) { - // Shutdown the client, so the send action will shortcut before actually executing any network operation - // (but after it run our check in the prologue) - httpClient.shutdown(); - try { - httpClient.send(HttpRequest.newBuilder(URI.create("http://localhost")).build(), HttpResponse.BodyHandlers.discarding()); - } catch (IOException e) { - // Expected, since we shut down the client - } - } - } - - static void httpClientSendAsync() { - try (HttpClient httpClient = HttpClient.newBuilder().build()) { - // Shutdown the client, so the send action will return before actually executing any network operation - // (but after it run our check in the prologue) - httpClient.shutdown(); - var future = httpClient.sendAsync( - HttpRequest.newBuilder(URI.create("http://localhost")).build(), - HttpResponse.BodyHandlers.discarding() - ); - assert future.isCompletedExceptionally(); - future.exceptionally(ex -> { - assert ex instanceof IOException; - return null; - }); - } - } -} From 93606d009b71ee001628330d96c5eeb667fdab07 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 25 Jan 2025 06:28:32 +1100 Subject: [PATCH 031/383] Mute org.elasticsearch.xpack.logsdb.qa.StandardVersusStandardReindexedIntoLogsDbChallengeRestIT testEsqlSource #120830 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index dba2e055e7351..ae2264daceac0 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -245,6 +245,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120810 - class: org.elasticsearch.xpack.test.rest.XPackRestIT issue: https://github.com/elastic/elasticsearch/issues/120816 +- class: org.elasticsearch.xpack.logsdb.qa.StandardVersusStandardReindexedIntoLogsDbChallengeRestIT + method: testEsqlSource + issue: https://github.com/elastic/elasticsearch/issues/120830 # Examples: # From 4117a98c01b6a8821801f9410db806854d37cc5a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 25 Jan 2025 06:28:55 +1100 Subject: [PATCH 032/383] Mute org.elasticsearch.xpack.logsdb.qa.StandardVersusLogsIndexModeRandomDataChallengeRestIT testEsqlSource #120831 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index ae2264daceac0..eec1dbdef7e03 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -248,6 +248,9 @@ tests: - class: org.elasticsearch.xpack.logsdb.qa.StandardVersusStandardReindexedIntoLogsDbChallengeRestIT method: testEsqlSource issue: https://github.com/elastic/elasticsearch/issues/120830 +- class: org.elasticsearch.xpack.logsdb.qa.StandardVersusLogsIndexModeRandomDataChallengeRestIT + method: testEsqlSource + issue: https://github.com/elastic/elasticsearch/issues/120831 # Examples: # From ca82e43ed2454c022a9f26b579fca2b1fadf9d3f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 25 Jan 2025 06:29:17 +1100 Subject: [PATCH 033/383] Mute org.elasticsearch.xpack.logsdb.qa.StoredSourceLogsDbVersusReindexedLogsDbChallengeRestIT testEsqlSource #120832 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index eec1dbdef7e03..34b63e8b063fc 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -251,6 +251,9 @@ tests: - class: org.elasticsearch.xpack.logsdb.qa.StandardVersusLogsIndexModeRandomDataChallengeRestIT method: testEsqlSource issue: https://github.com/elastic/elasticsearch/issues/120831 +- class: org.elasticsearch.xpack.logsdb.qa.StoredSourceLogsDbVersusReindexedLogsDbChallengeRestIT + method: testEsqlSource + issue: https://github.com/elastic/elasticsearch/issues/120832 # Examples: # From ea75dbeaa15ef1c6914105ef3c49f707b587ff22 Mon Sep 17 00:00:00 2001 From: mushaoqiong Date: Sat, 25 Jan 2025 04:40:30 +0800 Subject: [PATCH 034/383] Fix cat_component_templates documentation (#120487) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix_cat_component_templates_documentation * add changelog * Update changelog to use correct area --------- Co-authored-by: 广富 Co-authored-by: Lee Hinman Co-authored-by: Elastic Machine --- docs/changelog/120487.yaml | 5 +++++ .../rest/action/cat/RestCatComponentTemplateAction.java | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/120487.yaml diff --git a/docs/changelog/120487.yaml b/docs/changelog/120487.yaml new file mode 100644 index 0000000000000..d728a35615156 --- /dev/null +++ b/docs/changelog/120487.yaml @@ -0,0 +1,5 @@ +pr: 120487 +summary: Fix cat_component_templates documentation +area: CAT APIs +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatComponentTemplateAction.java index 0efb42b56a351..6ec55b1237324 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatComponentTemplateAction.java @@ -54,7 +54,7 @@ public List routes() { @Override protected void documentation(StringBuilder sb) { - sb.append("/_cat/component_templates"); + sb.append("/_cat/component_templates\n"); } @Override From 7d7a9d9fdbe49a9e350d16697490db2c816aaf26 Mon Sep 17 00:00:00 2001 From: Tim Sullivan Date: Fri, 24 Jan 2025 13:48:33 -0700 Subject: [PATCH 035/383] [Index Management] Doc updates for Kibana Reporting built-ins (#120829) * [Index Management] Doc updates for Kibana Reporting built-ins * Update docs/reference/indices/index-templates.asciidoc Co-authored-by: Lee Hinman --------- Co-authored-by: Lee Hinman --- docs/reference/indices/index-templates.asciidoc | 1 + docs/reference/indices/put-component-template.asciidoc | 1 + 2 files changed, 2 insertions(+) diff --git a/docs/reference/indices/index-templates.asciidoc b/docs/reference/indices/index-templates.asciidoc index 90c4a6952446e..b13921d263f71 100644 --- a/docs/reference/indices/index-templates.asciidoc +++ b/docs/reference/indices/index-templates.asciidoc @@ -40,6 +40,7 @@ template with the highest priority is used. following index patterns: // tag::built-in-index-template-patterns[] +- `.kibana-reporting*` - `logs-*-*` - `metrics-*-*` - `synthetics-*-*` diff --git a/docs/reference/indices/put-component-template.asciidoc b/docs/reference/indices/put-component-template.asciidoc index 9f129c3507d87..ccdafaf2fd050 100644 --- a/docs/reference/indices/put-component-template.asciidoc +++ b/docs/reference/indices/put-component-template.asciidoc @@ -97,6 +97,7 @@ Name of the component template to create. {es} includes the following built-in component templates: // tag::built-in-component-templates[] +- `kibana-reporting@settings` - `logs@mappings` - `logs@settings` - `metrics@mappings` From 090e0ddbb701adc9810662a967e619252ed16710 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 24 Jan 2025 22:13:33 +0100 Subject: [PATCH 036/383] Deduplicate code for getting cancellation checks out of a search context (#120828) Just deduplicating the logic and moving it to a shared location + no need for a static method like that. --- .../search/aggregations/AggregationPhase.java | 24 +------------------ .../search/internal/SearchContext.java | 16 +++++++++++++ .../search/rescore/RescorePhase.java | 19 +-------------- 3 files changed, 18 insertions(+), 41 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java index 8ca21db1ad9f2..4e1ec3faf6b36 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java @@ -8,13 +8,10 @@ */ package org.elasticsearch.search.aggregations; -import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.search.aggregations.support.TimeSeriesIndexSearcher; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.query.QueryPhase; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -59,7 +56,7 @@ private static AggregatorCollector newAggregatorCollector(SearchContext context) } private static void executeInSortOrder(SearchContext context, BucketCollector collector) { - TimeSeriesIndexSearcher searcher = new TimeSeriesIndexSearcher(context.searcher(), getCancellationChecks(context)); + TimeSeriesIndexSearcher searcher = new TimeSeriesIndexSearcher(context.searcher(), context.getCancellationChecks()); searcher.setMinimumScore(context.minimumScore()); searcher.setProfiler(context); try { @@ -70,23 +67,4 @@ private static void executeInSortOrder(SearchContext context, BucketCollector co } } - private static List getCancellationChecks(SearchContext context) { - List cancellationChecks = new ArrayList<>(); - if (context.lowLevelCancellation()) { - // This searching doesn't live beyond this phase, so we don't need to remove query cancellation - cancellationChecks.add(() -> { - final SearchShardTask task = context.getTask(); - if (task != null) { - task.ensureNotCancelled(); - } - }); - } - - final Runnable timeoutRunnable = QueryPhase.getTimeoutCheck(context); - if (timeoutRunnable != null) { - cancellationChecks.add(timeoutRunnable); - } - - return cancellationChecks; - } } diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 14e2007befa55..7da71b77c6a6f 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -41,6 +41,7 @@ import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext; import org.elasticsearch.search.profile.Profilers; +import org.elasticsearch.search.query.QueryPhase; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; import org.elasticsearch.search.rank.feature.RankFeatureResult; @@ -84,6 +85,21 @@ public abstract class SearchContext implements Releasable { protected SearchContext() {} + public final List getCancellationChecks() { + final Runnable timeoutRunnable = QueryPhase.getTimeoutCheck(this); + if (lowLevelCancellation()) { + // This searching doesn't live beyond this phase, so we don't need to remove query cancellation + Runnable c = () -> { + final SearchShardTask task = getTask(); + if (task != null) { + task.ensureNotCancelled(); + } + }; + return timeoutRunnable == null ? List.of(c) : List.of(c, timeoutRunnable); + } + return timeoutRunnable == null ? List.of() : List.of(timeoutRunnable); + } + public abstract void setTask(SearchShardTask task); public abstract SearchShardTask getTask(); diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java index c23df9cdfa441..f8b348b383f01 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java @@ -15,19 +15,16 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldDocs; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.util.Maps; import org.elasticsearch.lucene.grouping.TopFieldGroups; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.query.QueryPhase; import org.elasticsearch.search.query.SearchTimeoutException; import org.elasticsearch.search.sort.ShardDocSortField; import org.elasticsearch.search.sort.SortAndFormats; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -195,21 +192,7 @@ private static boolean topDocsSortedByScore(TopDocs topDocs) { } static Runnable getCancellationChecks(SearchContext context) { - List cancellationChecks = new ArrayList<>(); - if (context.lowLevelCancellation()) { - cancellationChecks.add(() -> { - final SearchShardTask task = context.getTask(); - if (task != null) { - task.ensureNotCancelled(); - } - }); - } - - final Runnable timeoutRunnable = QueryPhase.getTimeoutCheck(context); - if (timeoutRunnable != null) { - cancellationChecks.add(timeoutRunnable); - } - + List cancellationChecks = context.getCancellationChecks(); return () -> { for (var check : cancellationChecks) { check.run(); From 3d4349851bdc9c3af713e86b6b25159a17e126ac Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Fri, 24 Jan 2025 13:45:42 -0800 Subject: [PATCH 037/383] [TEST] Restore scaled_float and half_float data generation (#120756) --- .../logsdb/datageneration/FieldType.java | 8 +- .../DefaultMappingParametersHandler.java | 3 +- .../matchers/ArrayEqualMatcher.java | 14 ++- .../matchers/GenericEqualsMatcher.java | 12 +- .../matchers/ListEqualMatcher.java | 14 ++- .../datageneration}/matchers/MatchResult.java | 10 +- .../datageneration}/matchers/Matcher.java | 12 +- .../datageneration}/matchers/Messages.java | 10 +- .../matchers/ObjectMatcher.java | 12 +- .../matchers/source/DynamicFieldMatcher.java | 16 +-- .../matchers/source/FieldSpecificMatcher.java | 73 ++++++------ .../matchers/source/MappingTransforms.java | 10 +- .../matchers/source/SourceMatcher.java | 20 ++-- .../matchers/source/SourceTransforms.java | 10 +- .../datageneration/SourceMatcherTests.java | 108 ++++++++++++++++++ ...ardVersusLogsIndexModeChallengeRestIT.java | 4 +- 16 files changed, 237 insertions(+), 99 deletions(-) rename {x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa => test/framework/src/main/java/org/elasticsearch/logsdb/datageneration}/matchers/ArrayEqualMatcher.java (83%) rename {x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa => test/framework/src/main/java/org/elasticsearch/logsdb/datageneration}/matchers/GenericEqualsMatcher.java (85%) rename {x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa => test/framework/src/main/java/org/elasticsearch/logsdb/datageneration}/matchers/ListEqualMatcher.java (83%) rename {x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa => test/framework/src/main/java/org/elasticsearch/logsdb/datageneration}/matchers/MatchResult.java (73%) rename {x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa => test/framework/src/main/java/org/elasticsearch/logsdb/datageneration}/matchers/Matcher.java (90%) rename {x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa => test/framework/src/main/java/org/elasticsearch/logsdb/datageneration}/matchers/Messages.java (80%) rename {x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa => test/framework/src/main/java/org/elasticsearch/logsdb/datageneration}/matchers/ObjectMatcher.java (68%) rename {x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa => test/framework/src/main/java/org/elasticsearch/logsdb/datageneration}/matchers/source/DynamicFieldMatcher.java (80%) rename {x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa => test/framework/src/main/java/org/elasticsearch/logsdb/datageneration}/matchers/source/FieldSpecificMatcher.java (73%) rename {x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa => test/framework/src/main/java/org/elasticsearch/logsdb/datageneration}/matchers/source/MappingTransforms.java (88%) rename {x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa => test/framework/src/main/java/org/elasticsearch/logsdb/datageneration}/matchers/source/SourceMatcher.java (90%) rename {x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa => test/framework/src/main/java/org/elasticsearch/logsdb/datageneration}/matchers/source/SourceTransforms.java (86%) create mode 100644 test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/SourceMatcherTests.java diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java index 07744851aba3e..96b75f29382e2 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java @@ -13,9 +13,11 @@ import org.elasticsearch.logsdb.datageneration.fields.leaf.ByteFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.DoubleFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.FloatFieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.fields.leaf.HalfFloatFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.IntegerFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.KeywordFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.LongFieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.fields.leaf.ScaledFloatFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.ShortFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.UnsignedLongFieldDataGenerator; @@ -30,7 +32,9 @@ public enum FieldType { SHORT("short"), BYTE("byte"), DOUBLE("double"), - FLOAT("float"); + FLOAT("float"), + HALF_FLOAT("half_float"), + SCALED_FLOAT("scaled_float"); private final String name; @@ -48,6 +52,8 @@ public FieldDataGenerator generator(String fieldName, DataSource dataSource) { case BYTE -> new ByteFieldDataGenerator(fieldName, dataSource); case DOUBLE -> new DoubleFieldDataGenerator(fieldName, dataSource); case FLOAT -> new FloatFieldDataGenerator(fieldName, dataSource); + case HALF_FLOAT -> new HalfFloatFieldDataGenerator(fieldName, dataSource); + case SCALED_FLOAT -> new ScaledFloatFieldDataGenerator(fieldName, dataSource); }; } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java index db13867fe71ad..b639108ea6ad2 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java @@ -32,7 +32,8 @@ public DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceReques return new DataSourceResponse.LeafMappingParametersGenerator(switch (request.fieldType()) { case KEYWORD -> keywordMapping(request, map); - case LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, UNSIGNED_LONG -> plain(map); + case LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, HALF_FLOAT, UNSIGNED_LONG -> plain(map); + case SCALED_FLOAT -> scaledFloatMapping(map); }); } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ArrayEqualMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/ArrayEqualMatcher.java similarity index 83% rename from x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ArrayEqualMatcher.java rename to test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/ArrayEqualMatcher.java index b98ad65ac4d4f..940bb1b7553fc 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ArrayEqualMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/ArrayEqualMatcher.java @@ -1,11 +1,13 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.xpack.logsdb.qa.matchers; +package org.elasticsearch.logsdb.datageneration.matchers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; @@ -13,8 +15,8 @@ import java.util.Arrays; import java.util.List; -import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintArrays; +import static org.elasticsearch.logsdb.datageneration.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.logsdb.datageneration.matchers.Messages.prettyPrintArrays; class ArrayEqualMatcher extends GenericEqualsMatcher { ArrayEqualMatcher( diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/GenericEqualsMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/GenericEqualsMatcher.java similarity index 85% rename from x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/GenericEqualsMatcher.java rename to test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/GenericEqualsMatcher.java index 933c7eb86f65a..b409d18709f23 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/GenericEqualsMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/GenericEqualsMatcher.java @@ -1,18 +1,20 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.xpack.logsdb.qa.matchers; +package org.elasticsearch.logsdb.datageneration.matchers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; import java.util.List; -import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.logsdb.datageneration.matchers.Messages.formatErrorMessage; public class GenericEqualsMatcher extends Matcher { protected final XContentBuilder actualMappings; diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ListEqualMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/ListEqualMatcher.java similarity index 83% rename from x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ListEqualMatcher.java rename to test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/ListEqualMatcher.java index 447aa21b932c2..093556ea89355 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ListEqualMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/ListEqualMatcher.java @@ -1,19 +1,21 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.xpack.logsdb.qa.matchers; +package org.elasticsearch.logsdb.datageneration.matchers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; import java.util.List; -import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintCollections; +import static org.elasticsearch.logsdb.datageneration.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.logsdb.datageneration.matchers.Messages.prettyPrintCollections; public class ListEqualMatcher extends GenericEqualsMatcher> { public ListEqualMatcher( diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/MatchResult.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/MatchResult.java similarity index 73% rename from x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/MatchResult.java rename to test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/MatchResult.java index a890a0375ef03..77b51890ad691 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/MatchResult.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/MatchResult.java @@ -1,11 +1,13 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.xpack.logsdb.qa.matchers; +package org.elasticsearch.logsdb.datageneration.matchers; import java.util.Objects; diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Matcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/Matcher.java similarity index 90% rename from x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Matcher.java rename to test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/Matcher.java index e08e401c19530..dd87e23351c0d 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Matcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/Matcher.java @@ -1,15 +1,17 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.xpack.logsdb.qa.matchers; +package org.elasticsearch.logsdb.datageneration.matchers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.logsdb.datageneration.matchers.source.SourceMatcher; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.logsdb.qa.matchers.source.SourceMatcher; import java.util.List; import java.util.Map; diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Messages.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/Messages.java similarity index 80% rename from x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Messages.java rename to test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/Messages.java index 122e3b2d6261c..6f7ccc69c35e1 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/Messages.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/Messages.java @@ -1,11 +1,13 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.xpack.logsdb.qa.matchers; +package org.elasticsearch.logsdb.datageneration.matchers; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ObjectMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/ObjectMatcher.java similarity index 68% rename from x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ObjectMatcher.java rename to test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/ObjectMatcher.java index f2f08b1dfac14..f922385fc0190 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/ObjectMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/ObjectMatcher.java @@ -1,16 +1,18 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.xpack.logsdb.qa.matchers; +package org.elasticsearch.logsdb.datageneration.matchers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentBuilder; -import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.logsdb.datageneration.matchers.Messages.formatErrorMessage; public class ObjectMatcher extends GenericEqualsMatcher { ObjectMatcher( diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/DynamicFieldMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/DynamicFieldMatcher.java similarity index 80% rename from x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/DynamicFieldMatcher.java rename to test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/DynamicFieldMatcher.java index d6812c41f7611..5bcf53cfa5c9a 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/DynamicFieldMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/DynamicFieldMatcher.java @@ -1,15 +1,17 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.xpack.logsdb.qa.matchers.source; +package org.elasticsearch.logsdb.datageneration.matchers.source; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.logsdb.datageneration.matchers.MatchResult; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.logsdb.qa.matchers.MatchResult; import java.util.List; import java.util.Objects; @@ -18,8 +20,8 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintCollections; +import static org.elasticsearch.logsdb.datageneration.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.logsdb.datageneration.matchers.Messages.prettyPrintCollections; class DynamicFieldMatcher { private final XContentBuilder actualMappings; diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/FieldSpecificMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java similarity index 73% rename from x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/FieldSpecificMatcher.java rename to test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java index 0c970f1b5fd9a..960cc38e55c82 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/FieldSpecificMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java @@ -1,16 +1,18 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.xpack.logsdb.qa.matchers.source; +package org.elasticsearch.logsdb.datageneration.matchers.source; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.logsdb.datageneration.matchers.MatchResult; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.logsdb.qa.matchers.MatchResult; import java.math.BigInteger; import java.util.List; @@ -20,8 +22,8 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintCollections; +import static org.elasticsearch.logsdb.datageneration.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.logsdb.datageneration.matchers.Messages.prettyPrintCollections; interface FieldSpecificMatcher { MatchResult match(List actual, List expected, Map actualMapping, Map expectedMapping); @@ -115,40 +117,39 @@ public MatchResult match( } assert scalingFactor instanceof Number; - var expectedNormalized = normalizeExpected(expected, ((Number) scalingFactor).doubleValue()); - var actualNormalized = normalizeActual(actual); + double scalingFactorDouble = ((Number) scalingFactor).doubleValue(); + // It is possible that we receive a mix of reduced precision values and original values. + // F.e. in case of `synthetic_source_keep: "arrays"` in nested objects only arrays are preserved as is + // and therefore any singleton values have reduced precision. + // Therefore, we need to match either an exact value or a normalized value. + var expectedNormalized = normalizeValues(expected); + var actualNormalized = normalizeValues(actual); + for (var expectedValue : expectedNormalized) { + if (actualNormalized.contains(expectedValue) == false + && actualNormalized.contains(encodeDecodeWithPrecisionLoss(expectedValue, scalingFactorDouble)) == false) { + return MatchResult.noMatch( + formatErrorMessage( + actualMappings, + actualSettings, + expectedMappings, + expectedSettings, + "Values of type [scaled_float] don't match after normalization, normalized " + + prettyPrintCollections(actualNormalized, expectedNormalized) + ) + ); + } + } - return actualNormalized.equals(expectedNormalized) - ? MatchResult.match() - : MatchResult.noMatch( - formatErrorMessage( - actualMappings, - actualSettings, - expectedMappings, - expectedSettings, - "Values of type [scaled_float] don't match after normalization, normalized " - + prettyPrintCollections(actualNormalized, expectedNormalized) - ) - ); + return MatchResult.match(); } - private static Set normalizeExpected(List values, double scalingFactor) { - if (values == null) { - return Set.of(); - } - - return values.stream() - .filter(Objects::nonNull) - .map(ScaledFloatMatcher::toDouble) - // Based on logic in ScaledFloatFieldMapper - .map(v -> { - var encoded = Math.round(v * scalingFactor); - return encoded / scalingFactor; - }) - .collect(Collectors.toSet()); + private Double encodeDecodeWithPrecisionLoss(double value, double scalingFactor) { + // Based on logic in ScaledFloatFieldMapper + var encoded = Math.round(value * scalingFactor); + return encoded / scalingFactor; } - private static Set normalizeActual(List values) { + private static Set normalizeValues(List values) { if (values == null) { return Set.of(); } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/MappingTransforms.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/MappingTransforms.java similarity index 88% rename from x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/MappingTransforms.java rename to test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/MappingTransforms.java index dbe73e3c2a4c2..312be273dcd3e 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/MappingTransforms.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/MappingTransforms.java @@ -1,11 +1,13 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.xpack.logsdb.qa.matchers.source; +package org.elasticsearch.logsdb.datageneration.matchers.source; import java.util.ArrayList; import java.util.HashMap; diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java similarity index 90% rename from x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceMatcher.java rename to test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java index d4d53a85c6e88..eb62598712f03 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java @@ -1,27 +1,29 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.xpack.logsdb.qa.matchers.source; +package org.elasticsearch.logsdb.datageneration.matchers.source; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.logsdb.datageneration.matchers.GenericEqualsMatcher; +import org.elasticsearch.logsdb.datageneration.matchers.ListEqualMatcher; +import org.elasticsearch.logsdb.datageneration.matchers.MatchResult; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.logsdb.qa.matchers.GenericEqualsMatcher; -import org.elasticsearch.xpack.logsdb.qa.matchers.ListEqualMatcher; -import org.elasticsearch.xpack.logsdb.qa.matchers.MatchResult; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; -import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.formatErrorMessage; -import static org.elasticsearch.xpack.logsdb.qa.matchers.Messages.prettyPrintCollections; +import static org.elasticsearch.logsdb.datageneration.matchers.Messages.formatErrorMessage; +import static org.elasticsearch.logsdb.datageneration.matchers.Messages.prettyPrintCollections; public class SourceMatcher extends GenericEqualsMatcher>> { private final Map actualNormalizedMapping; diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceTransforms.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceTransforms.java similarity index 86% rename from x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceTransforms.java rename to test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceTransforms.java index c21383d411212..c86fe2f90b0d1 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/matchers/source/SourceTransforms.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceTransforms.java @@ -1,11 +1,13 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.xpack.logsdb.qa.matchers.source; +package org.elasticsearch.logsdb.datageneration.matchers.source; import java.util.ArrayList; import java.util.Collections; diff --git a/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/SourceMatcherTests.java b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/SourceMatcherTests.java new file mode 100644 index 0000000000000..74f70bae4d0c1 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/logsdb/datageneration/SourceMatcherTests.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.logsdb.datageneration; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.logsdb.datageneration.matchers.source.SourceMatcher; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public class SourceMatcherTests extends ESTestCase { + public void testDynamicMatch() throws IOException { + List> values = List.of( + Map.of("aaa", 124, "bbb", false, "ccc", 12.34), + Map.of("aaa", 124, "bbb", false, "ccc", 12.34) + ); + + var sut = new SourceMatcher( + XContentBuilder.builder(XContentType.JSON.xContent()).startObject().endObject(), + Settings.builder(), + XContentBuilder.builder(XContentType.JSON.xContent()).startObject().endObject(), + Settings.builder(), + values, + values, + false + ); + assertTrue(sut.match().isMatch()); + } + + public void testDynamicMismatch() throws IOException { + List> actual = List.of( + Map.of("aaa", 124, "bbb", false, "ccc", 12.34), + Map.of("aaa", 124, "bbb", false, "ccc", 12.34) + ); + List> expected = List.of( + Map.of("aaa", 124, "bbb", false, "ccc", 12.34), + Map.of("aaa", 125, "bbb", false, "ccc", 12.34) + ); + + var sut = new SourceMatcher( + XContentBuilder.builder(XContentType.JSON.xContent()).startObject().endObject(), + Settings.builder(), + XContentBuilder.builder(XContentType.JSON.xContent()).startObject().endObject(), + Settings.builder(), + actual, + expected, + false + ); + assertFalse(sut.match().isMatch()); + } + + public void testMappedMatch() throws IOException { + List> values = List.of( + Map.of("aaa", 124, "bbb", false, "ccc", 12.34), + Map.of("aaa", 124, "bbb", false, "ccc", 12.34) + ); + + var mapping = XContentBuilder.builder(XContentType.JSON.xContent()); + mapping.startObject(); + mapping.startObject("_doc"); + { + mapping.startObject("aaa").field("type", "long").endObject(); + mapping.startObject("bbb").field("type", "boolean").endObject(); + mapping.startObject("ccc").field("type", "half_float").endObject(); + } + mapping.endObject(); + mapping.endObject(); + + var sut = new SourceMatcher(mapping, Settings.builder(), mapping, Settings.builder(), values, values, false); + assertTrue(sut.match().isMatch()); + } + + public void testMappedMismatch() throws IOException { + List> actual = List.of( + Map.of("aaa", 124, "bbb", false, "ccc", 12.34), + Map.of("aaa", 124, "bbb", false, "ccc", 12.34) + ); + List> expected = List.of( + Map.of("aaa", 124, "bbb", false, "ccc", 12.34), + Map.of("aaa", 124, "bbb", false, "ccc", 12.35) + ); + + var mapping = XContentBuilder.builder(XContentType.JSON.xContent()); + mapping.startObject(); + mapping.startObject("_doc"); + { + mapping.startObject("aaa").field("type", "long").endObject(); + mapping.startObject("bbb").field("type", "boolean").endObject(); + mapping.startObject("ccc").field("type", "half_float").endObject(); + } + mapping.endObject(); + mapping.endObject(); + + var sut = new SourceMatcher(mapping, Settings.builder(), mapping, Settings.builder(), actual, expected, false); + assertFalse(sut.match().isMatch()); + } +} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java index 1e4c28e72aaeb..f18e57c229345 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java @@ -17,6 +17,8 @@ import org.elasticsearch.common.time.FormatNames; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.logsdb.datageneration.matchers.MatchResult; +import org.elasticsearch.logsdb.datageneration.matchers.Matcher; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; @@ -27,8 +29,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.logsdb.qa.matchers.MatchResult; -import org.elasticsearch.xpack.logsdb.qa.matchers.Matcher; import org.hamcrest.Matchers; import java.io.IOException; From 160bdab8fd23d6f11bbd310c92b5606d0b4eab1c Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 24 Jan 2025 15:56:29 -0600 Subject: [PATCH 038/383] Removing the reindex data stream feature flag (#120677) --- .../test/cluster/FeatureFlag.java | 3 +- .../action/CreateIndexFromSourceActionIT.java | 17 --- .../ReindexDataStreamTransportActionIT.java | 3 - ...indexDatastreamIndexTransportActionIT.java | 26 ----- .../xpack/migrate/MigratePlugin.java | 101 ++++++++---------- .../action/ReindexDataStreamAction.java | 2 - .../rest/RestMigrationReindexAction.java | 9 +- .../rest-api-spec/test/migrate/10_reindex.yml | 8 +- .../test/migrate/20_reindex_status.yml | 4 +- .../test/migrate/30_create_from.yml | 10 +- 10 files changed, 55 insertions(+), 128 deletions(-) diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index 896b245c8e920..5630c33ad559c 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -19,8 +19,7 @@ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - INFERENCE_UNIFIED_API_ENABLED("es.inference_unified_feature_flag_enabled=true", Version.fromString("8.18.0"), null), - MIGRATION_REINDEX_ENABLED("es.reindex_data_stream_feature_flag_enabled=true", Version.fromString("8.18.0"), null); + INFERENCE_UNIFIED_API_ENABLED("es.inference_unified_feature_flag_enabled=true", Version.fromString("8.18.0"), null); public final String systemProperty; public final Version from; diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java index 3b523857eb6e1..5220e17618a34 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/CreateIndexFromSourceActionIT.java @@ -32,7 +32,6 @@ import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.REINDEX_DATA_STREAM_FEATURE_FLAG; public class CreateIndexFromSourceActionIT extends ESIntegTestCase { @@ -42,8 +41,6 @@ protected Collection> nodePlugins() { } public void testOldSettingsManuallyFiltered() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var numShards = randomIntBetween(1, 10); var staticSettings = Settings.builder() // setting to filter @@ -77,8 +74,6 @@ public void testOldSettingsManuallyFiltered() throws Exception { } public void testDestIndexCreated() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); @@ -96,8 +91,6 @@ public void testDestIndexCreated() throws Exception { } public void testSettingsCopiedFromSource() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - // start with a static setting var numShards = randomIntBetween(1, 10); var staticSettings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards).build(); @@ -122,8 +115,6 @@ public void testSettingsCopiedFromSource() throws Exception { } public void testMappingsCopiedFromSource() { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); String mapping = """ { @@ -157,8 +148,6 @@ public void testMappingsCopiedFromSource() { } public void testSettingsOverridden() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var numShardsSource = randomIntBetween(1, 10); var numReplicasSource = randomIntBetween(0, 10); var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); @@ -191,8 +180,6 @@ public void testSettingsOverridden() throws Exception { } public void testSettingsNullOverride() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); var sourceSettings = Settings.builder() .put(IndexMetadata.SETTING_BLOCKS_WRITE, true) @@ -223,8 +210,6 @@ public void testSettingsNullOverride() throws Exception { } public void testRemoveIndexBlocksByDefault() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); var sourceSettings = Settings.builder() @@ -257,8 +242,6 @@ public void testRemoveIndexBlocksByDefault() throws Exception { } public void testMappingsOverridden() { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); String sourceMapping = """ { diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java index 04caf3dbaa9d1..b6ff76095ac16 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java @@ -40,7 +40,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; -import static org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.REINDEX_DATA_STREAM_FEATURE_FLAG; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -52,7 +51,6 @@ protected Collection> nodePlugins() { } public void testNonExistentDataStream() { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); String nonExistentDataStreamName = randomAlphaOfLength(50); ReindexDataStreamRequest reindexDataStreamRequest = new ReindexDataStreamRequest( ReindexDataStreamAction.Mode.UPGRADE, @@ -65,7 +63,6 @@ public void testNonExistentDataStream() { } public void testAlreadyUpToDateDataStream() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); String dataStreamName = randomAlphaOfLength(50).toLowerCase(Locale.ROOT); ReindexDataStreamRequest reindexDataStreamRequest = new ReindexDataStreamRequest( ReindexDataStreamAction.Mode.UPGRADE, diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java index 40464d2a43220..0ad7dc45d4df8 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java @@ -53,7 +53,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.REINDEX_DATA_STREAM_FEATURE_FLAG; import static org.hamcrest.Matchers.equalTo; public class ReindexDatastreamIndexTransportActionIT extends ESIntegTestCase { @@ -77,8 +76,6 @@ protected Collection> nodePlugins() { } public void testDestIndexDeletedIfExists() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - // empty source index var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); @@ -99,8 +96,6 @@ public void testDestIndexDeletedIfExists() throws Exception { } public void testDestIndexNameSet_noDotPrefix() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); @@ -113,7 +108,6 @@ public void testDestIndexNameSet_noDotPrefix() throws Exception { } public void testDestIndexNameSet_withDotPrefix() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); var sourceIndex = "." + randomAlphaOfLength(20).toLowerCase(Locale.ROOT); indicesAdmin().create(new CreateIndexRequest(sourceIndex)).get(); @@ -127,8 +121,6 @@ public void testDestIndexNameSet_withDotPrefix() throws Exception { } public void testDestIndexContainsDocs() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - // source index with docs var numDocs = randomIntBetween(1, 100); var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); @@ -145,8 +137,6 @@ public void testDestIndexContainsDocs() throws Exception { } public void testSetSourceToBlockWrites() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var settings = randomBoolean() ? Settings.builder().put(IndexMetadata.SETTING_BLOCKS_WRITE, true).build() : Settings.EMPTY; // empty source index @@ -163,8 +153,6 @@ public void testSetSourceToBlockWrites() throws Exception { } public void testSettingsAddedBeforeReindex() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - // start with a static setting var numShards = randomIntBetween(1, 10); var staticSettings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards).build(); @@ -193,8 +181,6 @@ public void testSettingsAddedBeforeReindex() throws Exception { } public void testMappingsAddedToDestIndex() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(MAPPING)).actionGet(); @@ -215,8 +201,6 @@ public void testMappingsAddedToDestIndex() throws Exception { } public void testFailIfMetadataBlockSet() { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); var settings = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_METADATA, true).build(); indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).actionGet(); @@ -231,8 +215,6 @@ public void testFailIfMetadataBlockSet() { } public void testFailIfReadBlockSet() { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); var settings = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_READ, true).build(); indicesAdmin().create(new CreateIndexRequest(sourceIndex, settings)).actionGet(); @@ -247,8 +229,6 @@ public void testFailIfReadBlockSet() { } public void testReadOnlyBlocksNotAddedBack() { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var sourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); var settings = Settings.builder() .put(IndexMetadata.SETTING_READ_ONLY, randomBoolean()) @@ -272,8 +252,6 @@ public void testReadOnlyBlocksNotAddedBack() { } public void testUpdateSettingsDefaultsRestored() { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - // ESIntegTestCase creates a template random_index_template which contains a value for number_of_replicas. // Since this test checks the behavior of default settings, there cannot be a value for number_of_replicas, // so we delete the template within this method. This has no effect on other tests which will still @@ -304,8 +282,6 @@ public void testUpdateSettingsDefaultsRestored() { } public void testSettingsAndMappingsFromTemplate() throws IOException { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var numShards = randomIntBetween(1, 10); var numReplicas = randomIntBetween(0, 10); @@ -393,8 +369,6 @@ public void testSettingsAndMappingsFromTemplate() throws IOException { """; public void testTsdbStartEndSet() throws Exception { - assumeTrue("requires the migration reindex feature flag", REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()); - var templateSettings = Settings.builder().put("index.mode", "time_series"); if (randomBoolean()) { templateSettings.put("index.routing_path", "metricset"); diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java index 10cf498c85bf0..f5f8beba26d8f 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java @@ -60,7 +60,6 @@ import java.util.function.Supplier; import static org.elasticsearch.xpack.core.ClientHelper.REINDEX_DATA_STREAM_ORIGIN; -import static org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.REINDEX_DATA_STREAM_FEATURE_FLAG; import static org.elasticsearch.xpack.migrate.action.ReindexDataStreamIndexTransportAction.REINDEX_MAX_REQUESTS_PER_SECOND_SETTING; import static org.elasticsearch.xpack.migrate.task.ReindexDataStreamPersistentTaskExecutor.MAX_CONCURRENT_INDICES_REINDEXED_PER_DATA_STREAM_SETTING; @@ -79,67 +78,55 @@ public List getRestHandlers( Predicate clusterSupportsFeature ) { List handlers = new ArrayList<>(); - if (REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()) { - handlers.add(new RestMigrationReindexAction()); - handlers.add(new RestGetMigrationReindexStatusAction()); - handlers.add(new RestCancelReindexDataStreamAction()); - handlers.add(new RestCreateIndexFromSourceAction()); - } + handlers.add(new RestMigrationReindexAction()); + handlers.add(new RestGetMigrationReindexStatusAction()); + handlers.add(new RestCancelReindexDataStreamAction()); + handlers.add(new RestCreateIndexFromSourceAction()); return handlers; } @Override public List> getActions() { List> actions = new ArrayList<>(); - if (REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()) { - actions.add(new ActionHandler<>(ReindexDataStreamAction.INSTANCE, ReindexDataStreamTransportAction.class)); - actions.add(new ActionHandler<>(GetMigrationReindexStatusAction.INSTANCE, GetMigrationReindexStatusTransportAction.class)); - actions.add(new ActionHandler<>(CancelReindexDataStreamAction.INSTANCE, CancelReindexDataStreamTransportAction.class)); - actions.add(new ActionHandler<>(ReindexDataStreamIndexAction.INSTANCE, ReindexDataStreamIndexTransportAction.class)); - actions.add(new ActionHandler<>(CreateIndexFromSourceAction.INSTANCE, CreateIndexFromSourceTransportAction.class)); - } + actions.add(new ActionHandler<>(ReindexDataStreamAction.INSTANCE, ReindexDataStreamTransportAction.class)); + actions.add(new ActionHandler<>(GetMigrationReindexStatusAction.INSTANCE, GetMigrationReindexStatusTransportAction.class)); + actions.add(new ActionHandler<>(CancelReindexDataStreamAction.INSTANCE, CancelReindexDataStreamTransportAction.class)); + actions.add(new ActionHandler<>(ReindexDataStreamIndexAction.INSTANCE, ReindexDataStreamIndexTransportAction.class)); + actions.add(new ActionHandler<>(CreateIndexFromSourceAction.INSTANCE, CreateIndexFromSourceTransportAction.class)); return actions; } @Override public List getNamedXContent() { - if (REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()) { - return List.of( - new NamedXContentRegistry.Entry( - PersistentTaskState.class, - new ParseField(ReindexDataStreamPersistentTaskState.NAME), - ReindexDataStreamPersistentTaskState::fromXContent - ), - new NamedXContentRegistry.Entry( - PersistentTaskParams.class, - new ParseField(ReindexDataStreamTaskParams.NAME), - ReindexDataStreamTaskParams::fromXContent - ) - ); - } else { - return List.of(); - } + return List.of( + new NamedXContentRegistry.Entry( + PersistentTaskState.class, + new ParseField(ReindexDataStreamPersistentTaskState.NAME), + ReindexDataStreamPersistentTaskState::fromXContent + ), + new NamedXContentRegistry.Entry( + PersistentTaskParams.class, + new ParseField(ReindexDataStreamTaskParams.NAME), + ReindexDataStreamTaskParams::fromXContent + ) + ); } @Override public List getNamedWriteables() { - if (REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()) { - return List.of( - new NamedWriteableRegistry.Entry( - PersistentTaskState.class, - ReindexDataStreamPersistentTaskState.NAME, - ReindexDataStreamPersistentTaskState::new - ), - new NamedWriteableRegistry.Entry( - PersistentTaskParams.class, - ReindexDataStreamTaskParams.NAME, - ReindexDataStreamTaskParams::new - ), - new NamedWriteableRegistry.Entry(Task.Status.class, ReindexDataStreamStatus.NAME, ReindexDataStreamStatus::new) - ); - } else { - return List.of(); - } + return List.of( + new NamedWriteableRegistry.Entry( + PersistentTaskState.class, + ReindexDataStreamPersistentTaskState.NAME, + ReindexDataStreamPersistentTaskState::new + ), + new NamedWriteableRegistry.Entry( + PersistentTaskParams.class, + ReindexDataStreamTaskParams.NAME, + ReindexDataStreamTaskParams::new + ), + new NamedWriteableRegistry.Entry(Task.Status.class, ReindexDataStreamStatus.NAME, ReindexDataStreamStatus::new) + ); } @Override @@ -150,18 +137,14 @@ public List> getPersistentTasksExecutor( SettingsModule settingsModule, IndexNameExpressionResolver expressionResolver ) { - if (REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()) { - return List.of( - new ReindexDataStreamPersistentTaskExecutor( - new OriginSettingClient(client, REINDEX_DATA_STREAM_ORIGIN), - clusterService, - ReindexDataStreamTask.TASK_NAME, - threadPool - ) - ); - } else { - return List.of(); - } + return List.of( + new ReindexDataStreamPersistentTaskExecutor( + new OriginSettingClient(client, REINDEX_DATA_STREAM_ORIGIN), + clusterService, + ReindexDataStreamTask.TASK_NAME, + threadPool + ) + ); } @Override diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java index 17925eb04851b..faf8982b79bf0 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -29,7 +28,6 @@ import java.util.function.Predicate; public class ReindexDataStreamAction extends ActionType { - public static final FeatureFlag REINDEX_DATA_STREAM_FEATURE_FLAG = new FeatureFlag("reindex_data_stream"); public static final String TASK_ID_PREFIX = "reindex-data-stream-"; public static final ReindexDataStreamAction INSTANCE = new ReindexDataStreamAction(); diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/rest/RestMigrationReindexAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/rest/RestMigrationReindexAction.java index a89f056477d2c..1578228ea1b63 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/rest/RestMigrationReindexAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/rest/RestMigrationReindexAction.java @@ -20,13 +20,10 @@ import org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction; import java.io.IOException; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.REINDEX_DATA_STREAM_FEATURE_FLAG; public class RestMigrationReindexAction extends BaseRestHandler { public static final String MIGRATION_REINDEX_CAPABILITY = "migration_reindex"; @@ -56,11 +53,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli @Override public Set supportedCapabilities() { - Set capabilities = new HashSet<>(); - if (REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()) { - capabilities.add(MIGRATION_REINDEX_CAPABILITY); - } - return Collections.unmodifiableSet(capabilities); + return Set.of(MIGRATION_REINDEX_CAPABILITY); } static class ReindexDataStreamRestToXContentListener extends RestBuilderListener { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/10_reindex.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/10_reindex.yml index 247f49efb5404..df6feb1502dc4 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/10_reindex.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/10_reindex.yml @@ -7,7 +7,7 @@ setup: --- "Test Reindex With Unsupported Mode": - requires: - reason: "migration reindex is behind a feature flag" + reason: "requires a cluster with the migration reindex feature" test_runner_features: [capabilities] capabilities: - method: POST @@ -27,7 +27,7 @@ setup: --- "Test Reindex With Nonexistent Data Stream": - requires: - reason: "migration reindex is behind a feature flag" + reason: "requires a cluster with the migration reindex feature" test_runner_features: [capabilities] capabilities: - method: POST @@ -59,7 +59,7 @@ setup: --- "Test Reindex With Bad Data Stream Name": - requires: - reason: "migration reindex is behind a feature flag" + reason: "requires a cluster with the migration reindex feature" test_runner_features: [capabilities] capabilities: - method: POST @@ -79,7 +79,7 @@ setup: --- "Test Reindex With Existing Data Stream": - requires: - reason: "migration reindex is behind a feature flag" + reason: "requires a cluster with the migration reindex feature" test_runner_features: [capabilities] capabilities: - method: POST diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/20_reindex_status.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/20_reindex_status.yml index c65786a188687..616d320ae7ffe 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/20_reindex_status.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/20_reindex_status.yml @@ -7,7 +7,7 @@ setup: --- "Test get reindex status with nonexistent task id": - requires: - reason: "migration reindex is behind a feature flag" + reason: "requires a cluster with the migration reindex feature" test_runner_features: [capabilities] capabilities: - method: POST @@ -21,7 +21,7 @@ setup: --- "Test Reindex With Existing Data Stream": - requires: - reason: "migration reindex is behind a feature flag" + reason: "requires a cluster with the migration reindex feature" test_runner_features: [capabilities] capabilities: - method: POST diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/30_create_from.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/30_create_from.yml index 269d66474b1d7..cb9b0d3fe896a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/30_create_from.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/30_create_from.yml @@ -18,7 +18,7 @@ teardown: --- "Test create from with nonexistent source index": - requires: - reason: "migration reindex is behind a feature flag" + reason: "requires a cluster with the migration reindex feature" test_runner_features: [capabilities] capabilities: - method: POST @@ -33,7 +33,7 @@ teardown: --- "Test create_from with existing source index": - requires: - reason: "migration reindex is behind a feature flag" + reason: "requires a cluster with the migration reindex feature" test_runner_features: [capabilities] capabilities: - method: POST @@ -69,7 +69,7 @@ teardown: --- "Test create_from with existing source index and overrides": - requires: - reason: "migration reindex is behind a feature flag" + reason: "requires a cluster with the migration reindex feature" test_runner_features: [capabilities] capabilities: - method: POST @@ -122,7 +122,7 @@ teardown: --- "Test create_from with remove_index_blocks set to false": - requires: - reason: "migration reindex is behind a feature flag" + reason: "requires a cluster with the migration reindex feature" test_runner_features: [capabilities] capabilities: - method: POST @@ -155,7 +155,7 @@ teardown: --- "Test create_from with remove_index_blocks default of true": - requires: - reason: "migration reindex is behind a feature flag" + reason: "requires a cluster with the migration reindex feature" test_runner_features: [capabilities] capabilities: - method: POST From 75b772d66c31eacbfeb27c15d5c888e4789d6383 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 24 Jan 2025 15:22:32 -0800 Subject: [PATCH 039/383] Share more entitlement IT setup (#120846) This commit adds an AbstractEntitlementsIT and moves the entitlement cluster setup into a bespoke EntitlementTestRule. That allows most of the common code to be deduplicated. This change also automatically creates a temp dir which the test passes along into the test cluster. --- .../qa/AbstractEntitlementsIT.java | 63 ++++++++++++++ .../entitlement/qa/EntitlementsAllowedIT.java | 31 +------ .../qa/EntitlementsAllowedNonModularIT.java | 31 +------ .../entitlement/qa/EntitlementsDeniedIT.java | 37 ++------ .../qa/EntitlementsDeniedNonModularIT.java | 37 ++------ .../entitlement/qa/EntitlementsTestRule.java | 87 +++++++++++++++++++ .../entitlement/qa/EntitlementsUtil.java | 72 --------------- 7 files changed, 170 insertions(+), 188 deletions(-) create mode 100644 libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java create mode 100644 libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java delete mode 100644 libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsUtil.java diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java new file mode 100644 index 0000000000000..b770b4915a317 --- /dev/null +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.test.rest.ESRestTestCase; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public abstract class AbstractEntitlementsIT extends ESRestTestCase { + + static final EntitlementsTestRule.PolicyBuilder ALLOWED_TEST_ENTITLEMENTS = (builder, tempDir) -> { + builder.value("create_class_loader"); + builder.value("set_https_connection_properties"); + builder.value("inbound_network"); + builder.value("outbound_network"); + builder.value("load_native_libraries"); + builder.value( + Map.of( + "write_system_properties", + Map.of("properties", List.of("es.entitlements.checkSetSystemProperty", "es.entitlements.checkClearSystemProperty")) + ) + ); + }; + + private final String actionName; + private final boolean expectAllowed; + + AbstractEntitlementsIT(String actionName, boolean expectAllowed) { + this.actionName = actionName; + this.expectAllowed = expectAllowed; + } + + private Response executeCheck() throws IOException { + var request = new Request("GET", "/_entitlement_check"); + request.addParameter("action", actionName); + return client().performRequest(request); + } + + public void testAction() throws IOException { + logger.info("Executing Entitlement test for [{}]", actionName); + if (expectAllowed) { + Response result = executeCheck(); + assertThat(result.getStatusLine().getStatusCode(), equalTo(200)); + } else { + var exception = expectThrows(IOException.class, this::executeCheck); + assertThat(exception.getMessage(), containsString("not_entitled_exception")); + } + } +} diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedIT.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedIT.java index 54628fc674d75..159083c2dd6ba 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedIT.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedIT.java @@ -12,31 +12,16 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; import org.elasticsearch.entitlement.qa.test.RestEntitlementsCheckAction; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.rest.ESRestTestCase; import org.junit.ClassRule; -import java.io.IOException; - -import static org.elasticsearch.entitlement.qa.EntitlementsUtil.ALLOWED_ENTITLEMENTS; -import static org.hamcrest.Matchers.equalTo; - -public class EntitlementsAllowedIT extends ESRestTestCase { +public class EntitlementsAllowedIT extends AbstractEntitlementsIT { @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .module("entitlement-test-plugin", spec -> EntitlementsUtil.setupEntitlements(spec, true, ALLOWED_ENTITLEMENTS)) - .systemProperty("es.entitlements.enabled", "true") - .setting("xpack.security.enabled", "false") - .build(); - - private final String actionName; + public static EntitlementsTestRule testRule = new EntitlementsTestRule(true, ALLOWED_TEST_ENTITLEMENTS); public EntitlementsAllowedIT(@Name("actionName") String actionName) { - this.actionName = actionName; + super(actionName, true); } @ParametersFactory @@ -46,14 +31,6 @@ public static Iterable data() { @Override protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - public void testCheckActionWithPolicyPass() throws IOException { - logger.info("Executing Entitlement test for [{}]", actionName); - var request = new Request("GET", "/_entitlement_check"); - request.addParameter("action", actionName); - Response result = client().performRequest(request); - assertThat(result.getStatusLine().getStatusCode(), equalTo(200)); + return testRule.cluster.getHttpAddresses(); } } diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedNonModularIT.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedNonModularIT.java index 8390f0e5fd115..c99a05ff57ece 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedNonModularIT.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedNonModularIT.java @@ -12,31 +12,16 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; import org.elasticsearch.entitlement.qa.test.RestEntitlementsCheckAction; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.rest.ESRestTestCase; import org.junit.ClassRule; -import java.io.IOException; - -import static org.elasticsearch.entitlement.qa.EntitlementsUtil.ALLOWED_ENTITLEMENTS; -import static org.hamcrest.Matchers.equalTo; - -public class EntitlementsAllowedNonModularIT extends ESRestTestCase { +public class EntitlementsAllowedNonModularIT extends AbstractEntitlementsIT { @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .module("entitlement-test-plugin", spec -> EntitlementsUtil.setupEntitlements(spec, false, ALLOWED_ENTITLEMENTS)) - .systemProperty("es.entitlements.enabled", "true") - .setting("xpack.security.enabled", "false") - .build(); - - private final String actionName; + public static EntitlementsTestRule testRule = new EntitlementsTestRule(false, ALLOWED_TEST_ENTITLEMENTS); public EntitlementsAllowedNonModularIT(@Name("actionName") String actionName) { - this.actionName = actionName; + super(actionName, true); } @ParametersFactory @@ -46,14 +31,6 @@ public static Iterable data() { @Override protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - public void testCheckActionWithPolicyPass() throws IOException { - logger.info("Executing Entitlement test for [{}]", actionName); - var request = new Request("GET", "/_entitlement_check"); - request.addParameter("action", actionName); - Response result = client().performRequest(request); - assertThat(result.getStatusLine().getStatusCode(), equalTo(200)); + return testRule.cluster.getHttpAddresses(); } } diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedIT.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedIT.java index 3405e41897cc1..6f348d38d8e53 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedIT.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedIT.java @@ -12,36 +12,16 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.client.Request; import org.elasticsearch.entitlement.qa.test.RestEntitlementsCheckAction; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.rest.ESRestTestCase; import org.junit.ClassRule; -import java.io.IOException; - -import static org.hamcrest.Matchers.containsString; - -public class EntitlementsDeniedIT extends ESRestTestCase { +public class EntitlementsDeniedIT extends AbstractEntitlementsIT { @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .module("entitlement-test-plugin", spec -> EntitlementsUtil.setupEntitlements(spec, true, null)) - .systemProperty("es.entitlements.enabled", "true") - .setting("xpack.security.enabled", "false") - // Logs in libs/entitlement/qa/build/test-results/javaRestTest/TEST-org.elasticsearch.entitlement.qa.EntitlementsDeniedIT.xml - // .setting("logger.org.elasticsearch.entitlement", "DEBUG") - .build(); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - private final String actionName; + public static EntitlementsTestRule testRule = new EntitlementsTestRule(true, null); public EntitlementsDeniedIT(@Name("actionName") String actionName) { - this.actionName = actionName; + super(actionName, false); } @ParametersFactory @@ -49,13 +29,8 @@ public static Iterable data() { return RestEntitlementsCheckAction.getAllCheckActions().stream().map(action -> new Object[] { action }).toList(); } - public void testCheckThrows() { - logger.info("Executing Entitlement test for [{}]", actionName); - var exception = expectThrows(IOException.class, () -> { - var request = new Request("GET", "/_entitlement_check"); - request.addParameter("action", actionName); - client().performRequest(request); - }); - assertThat(exception.getMessage(), containsString("not_entitled_exception")); + @Override + protected String getTestRestCluster() { + return testRule.cluster.getHttpAddresses(); } } diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedNonModularIT.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedNonModularIT.java index a2a4773bf7523..6f2003f7275d4 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedNonModularIT.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsDeniedNonModularIT.java @@ -12,36 +12,16 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.client.Request; import org.elasticsearch.entitlement.qa.test.RestEntitlementsCheckAction; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.rest.ESRestTestCase; import org.junit.ClassRule; -import java.io.IOException; - -import static org.hamcrest.Matchers.containsString; - -public class EntitlementsDeniedNonModularIT extends ESRestTestCase { +public class EntitlementsDeniedNonModularIT extends AbstractEntitlementsIT { @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .module("entitlement-test-plugin", spec -> EntitlementsUtil.setupEntitlements(spec, false, null)) - .systemProperty("es.entitlements.enabled", "true") - .setting("xpack.security.enabled", "false") - // Logs in libs/entitlement/qa/build/test-results/javaRestTest/TEST-org.elasticsearch.entitlement.qa.EntitlementsDeniedIT.xml - // .setting("logger.org.elasticsearch.entitlement", "DEBUG") - .build(); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - private final String actionName; + public static EntitlementsTestRule testRule = new EntitlementsTestRule(false, null); public EntitlementsDeniedNonModularIT(@Name("actionName") String actionName) { - this.actionName = actionName; + super(actionName, false); } @ParametersFactory @@ -49,13 +29,8 @@ public static Iterable data() { return RestEntitlementsCheckAction.getAllCheckActions().stream().map(action -> new Object[] { action }).toList(); } - public void testCheckThrows() { - logger.info("Executing Entitlement test for [{}]", actionName); - var exception = expectThrows(IOException.class, () -> { - var request = new Request("GET", "/_entitlement_check"); - request.addParameter("action", actionName); - client().performRequest(request); - }); - assertThat(exception.getMessage(), containsString("not_entitled_exception")); + @Override + protected String getTestRestCluster() { + return testRule.cluster.getHttpAddresses(); } } diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java new file mode 100644 index 0000000000000..1a0a75588f02c --- /dev/null +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.PluginInstallSpec; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.yaml.YamlXContent; +import org.junit.rules.RuleChain; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Path; + +class EntitlementsTestRule implements TestRule { + + interface PolicyBuilder { + void build(XContentBuilder builder, Path tempDir) throws IOException; + } + + final TemporaryFolder testDir; + final ElasticsearchCluster cluster; + final TestRule ruleChain; + + @SuppressWarnings("this-escape") + EntitlementsTestRule(boolean modular, PolicyBuilder policyBuilder) { + testDir = new TemporaryFolder(); + cluster = ElasticsearchCluster.local() + .module("entitlement-test-plugin", spec -> setupEntitlements(spec, modular, policyBuilder)) + .systemProperty("es.entitlements.enabled", "true") + .systemProperty("es.entitlements.testdir", () -> testDir.getRoot().getAbsolutePath()) + .setting("xpack.security.enabled", "false") + .build(); + ruleChain = RuleChain.outerRule(testDir).around(cluster); + } + + @Override + public Statement apply(Statement statement, Description description) { + return ruleChain.apply(statement, description); + } + + private void setupEntitlements(PluginInstallSpec spec, boolean modular, PolicyBuilder policyBuilder) { + String moduleName = modular ? "org.elasticsearch.entitlement.qa.test" : "ALL-UNNAMED"; + if (policyBuilder != null) { + spec.withEntitlementsOverride(old -> { + try { + try (var builder = YamlXContent.contentBuilder()) { + builder.startObject(); + builder.field(moduleName); + builder.startArray(); + policyBuilder.build(builder, testDir.getRoot().toPath()); + builder.endArray(); + builder.endObject(); + + String policy = Strings.toString(builder); + System.out.println("Using entitlement policy:\n" + policy); + return Resource.fromString(policy); + } + + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }); + } + + if (modular == false) { + spec.withPropertiesOverride(old -> { + String props = old.replace("modulename=org.elasticsearch.entitlement.qa.test", ""); + System.out.println("Using plugin properties:\n" + props); + return Resource.fromString(props); + }); + } + } +} diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsUtil.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsUtil.java deleted file mode 100644 index 46d411baf1aa0..0000000000000 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsUtil.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.entitlement.qa; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.test.cluster.local.PluginInstallSpec; -import org.elasticsearch.test.cluster.util.resource.Resource; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.yaml.YamlXContent; - -import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.List; -import java.util.Map; - -class EntitlementsUtil { - - static final CheckedConsumer ALLOWED_ENTITLEMENTS = builder -> { - builder.value("create_class_loader"); - builder.value("set_https_connection_properties"); - builder.value("inbound_network"); - builder.value("outbound_network"); - builder.value("load_native_libraries"); - builder.value( - Map.of( - "write_system_properties", - Map.of("properties", List.of("es.entitlements.checkSetSystemProperty", "es.entitlements.checkClearSystemProperty")) - ) - ); - }; - - static void setupEntitlements(PluginInstallSpec spec, boolean modular, CheckedConsumer policyBuilder) { - String moduleName = modular ? "org.elasticsearch.entitlement.qa.test" : "ALL-UNNAMED"; - if (policyBuilder != null) { - try { - try (var builder = YamlXContent.contentBuilder()) { - builder.startObject(); - builder.field(moduleName); - builder.startArray(); - policyBuilder.accept(builder); - builder.endArray(); - builder.endObject(); - - String policy = Strings.toString(builder); - System.out.println("Using entitlement policy:\n" + policy); - spec.withEntitlementsOverride(old -> Resource.fromString(policy)); - } - - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - if (modular == false) { - spec.withPropertiesOverride(old -> { - String props = old.replace("modulename=org.elasticsearch.entitlement.qa.test", ""); - System.out.println("Using plugin properties:\n" + props); - return Resource.fromString(props); - }); - } - } - - private EntitlementsUtil() {} -} From 1cb2a65e19f87e13738d36f8ea2999c1019fa2ec Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Sat, 25 Jan 2025 11:59:31 +0200 Subject: [PATCH 040/383] Skip flaky configuration in randomized testing for logsdb (#120859) --- muted-tests.yml | 11 ----------- .../logsdb/datageneration/FieldType.java | 14 +------------- .../DefaultMappingParametersHandler.java | 7 ++++--- 3 files changed, 5 insertions(+), 27 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 34b63e8b063fc..f547aadc66830 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -243,17 +243,6 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/3rd_party_deployment/Test start deployment fails while model download in progress} issue: https://github.com/elastic/elasticsearch/issues/120810 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - issue: https://github.com/elastic/elasticsearch/issues/120816 -- class: org.elasticsearch.xpack.logsdb.qa.StandardVersusStandardReindexedIntoLogsDbChallengeRestIT - method: testEsqlSource - issue: https://github.com/elastic/elasticsearch/issues/120830 -- class: org.elasticsearch.xpack.logsdb.qa.StandardVersusLogsIndexModeRandomDataChallengeRestIT - method: testEsqlSource - issue: https://github.com/elastic/elasticsearch/issues/120831 -- class: org.elasticsearch.xpack.logsdb.qa.StoredSourceLogsDbVersusReindexedLogsDbChallengeRestIT - method: testEsqlSource - issue: https://github.com/elastic/elasticsearch/issues/120832 # Examples: # diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java index 96b75f29382e2..13c802fcd5809 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java @@ -11,13 +11,9 @@ import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.fields.leaf.ByteFieldDataGenerator; -import org.elasticsearch.logsdb.datageneration.fields.leaf.DoubleFieldDataGenerator; -import org.elasticsearch.logsdb.datageneration.fields.leaf.FloatFieldDataGenerator; -import org.elasticsearch.logsdb.datageneration.fields.leaf.HalfFloatFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.IntegerFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.KeywordFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.LongFieldDataGenerator; -import org.elasticsearch.logsdb.datageneration.fields.leaf.ScaledFloatFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.ShortFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.UnsignedLongFieldDataGenerator; @@ -30,11 +26,7 @@ public enum FieldType { UNSIGNED_LONG("unsigned_long"), INTEGER("integer"), SHORT("short"), - BYTE("byte"), - DOUBLE("double"), - FLOAT("float"), - HALF_FLOAT("half_float"), - SCALED_FLOAT("scaled_float"); + BYTE("byte"); private final String name; @@ -50,10 +42,6 @@ public FieldDataGenerator generator(String fieldName, DataSource dataSource) { case INTEGER -> new IntegerFieldDataGenerator(fieldName, dataSource); case SHORT -> new ShortFieldDataGenerator(fieldName, dataSource); case BYTE -> new ByteFieldDataGenerator(fieldName, dataSource); - case DOUBLE -> new DoubleFieldDataGenerator(fieldName, dataSource); - case FLOAT -> new FloatFieldDataGenerator(fieldName, dataSource); - case HALF_FLOAT -> new HalfFloatFieldDataGenerator(fieldName, dataSource); - case SCALED_FLOAT -> new ScaledFloatFieldDataGenerator(fieldName, dataSource); }; } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java index b639108ea6ad2..832a3205cfcb9 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java @@ -32,8 +32,8 @@ public DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceReques return new DataSourceResponse.LeafMappingParametersGenerator(switch (request.fieldType()) { case KEYWORD -> keywordMapping(request, map); - case LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, HALF_FLOAT, UNSIGNED_LONG -> plain(map); - case SCALED_FLOAT -> scaledFloatMapping(map); + case LONG, INTEGER, SHORT, BYTE, UNSIGNED_LONG -> plain(map); + }); } @@ -61,7 +61,8 @@ private Supplier> keywordMapping( .collect(Collectors.toSet()); if (options.isEmpty() == false) { - injected.put("copy_to", ESTestCase.randomFrom(options)); + // TODO: re-enable once #120831 is resolved + // injected.put("copy_to", ESTestCase.randomFrom(options)); } } From c9cce2cfaa3cd2e1fc01f895881605a20bde4367 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Sat, 25 Jan 2025 13:49:29 +0200 Subject: [PATCH 041/383] Restore single-element array removal in synthetic source (#120844) --- .../index/mapper/DocumentParser.java | 43 +++++++++++++------ .../mapper/IgnoredSourceFieldMapperTests.java | 21 ++++++++- 2 files changed, 49 insertions(+), 15 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 5a417c541d716..ac3f019636b66 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -57,6 +57,7 @@ public final class DocumentParser { static final NodeFeature FIX_PARSING_SUBOBJECTS_FALSE_DYNAMIC_FALSE = new NodeFeature( "mapper.fix_parsing_subobjects_false_dynamic_false" ); + private static final String NOOP_FIELD_MAPPER_NAME = "no-op"; private final XContentParserConfiguration parserConfiguration; private final MappingParserContext mappingParserContext; @@ -706,6 +707,8 @@ private static void parseNonDynamicArray( canRemoveSingleLeafElement = mapper instanceof FieldMapper && mode == Mapper.SourceKeepMode.ARRAYS + && context.inArrayScope() == false + && mapper.leafName().equals(NOOP_FIELD_MAPPER_NAME) == false && fieldWithFallbackSyntheticSource == false && copyToFieldHasValuesInDocument == false; @@ -729,20 +732,28 @@ private static void parseNonDynamicArray( XContentParser parser = context.parser(); XContentParser.Token token; + int elements = 0; while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { + elements = 2; parseObject(context, lastFieldName); } else if (token == XContentParser.Token.START_ARRAY) { + elements = 2; parseArray(context, lastFieldName); } else if (token == XContentParser.Token.VALUE_NULL) { + elements++; parseNullValue(context, lastFieldName); } else if (token == null) { throwEOFOnParseArray(arrayFieldName, context); } else { assert token.isValue(); + elements++; parseValue(context, lastFieldName); } } + if (elements <= 1 && canRemoveSingleLeafElement) { + context.removeLastIgnoredField(fullPath); + } postProcessDynamicArrayMapping(context, lastFieldName); } @@ -917,22 +928,26 @@ private static Mapper getLeafMapper(final DocumentParserContext context, String } private static FieldMapper noopFieldMapper(String path) { - return new FieldMapper("no-op", new MappedFieldType("no-op", false, false, false, TextSearchInfo.NONE, Collections.emptyMap()) { - @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - throw new UnsupportedOperationException(); - } + return new FieldMapper( + NOOP_FIELD_MAPPER_NAME, + new MappedFieldType(NOOP_FIELD_MAPPER_NAME, false, false, false, TextSearchInfo.NONE, Collections.emptyMap()) { + @Override + public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + throw new UnsupportedOperationException(); + } - @Override - public String typeName() { - throw new UnsupportedOperationException(); - } + @Override + public String typeName() { + throw new UnsupportedOperationException(); + } - @Override - public Query termQuery(Object value, SearchExecutionContext context) { - throw new UnsupportedOperationException(); - } - }, FieldMapper.BuilderParams.empty()) { + @Override + public Query termQuery(Object value, SearchExecutionContext context) { + throw new UnsupportedOperationException(); + } + }, + FieldMapper.BuilderParams.empty() + ) { @Override protected void parseCreateField(DocumentParserContext context) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java index 2b36c0ce0b5a4..d12bf5dc2e34c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java @@ -743,7 +743,9 @@ public void testIndexStoredArraySourceSingleLeafElement() throws IOException { b.startObject("int_value").field("type", "integer").endObject(); })).documentMapper(); var syntheticSource = syntheticSource(documentMapper, b -> b.array("int_value", new int[] { 10 })); - assertEquals("{\"int_value\":[10]}", syntheticSource); + assertEquals("{\"int_value\":10}", syntheticSource); + ParsedDocument doc = documentMapper.parse(source(syntheticSource)); + assertNull(doc.rootDoc().getField("_ignored_source")); } public void testIndexStoredArraySourceSingleLeafElementAndNull() throws IOException { @@ -754,6 +756,23 @@ public void testIndexStoredArraySourceSingleLeafElementAndNull() throws IOExcept assertEquals("{\"value\":[\"foo\",null]}", syntheticSource); } + public void testIndexStoredArraySourceSingleLeafElementInObjectArray() throws IOException { + DocumentMapper documentMapper = createMapperServiceWithStoredArraySource(mapping(b -> { + b.startObject("path").field("synthetic_source_keep", "none").startObject("properties"); + { + b.startObject("int_value").field("type", "integer").endObject(); + } + b.endObject().endObject(); + })).documentMapper(); + var syntheticSource = syntheticSource(documentMapper, b -> { + b.startArray("path"); + b.startObject().field("int_value", 10).endObject(); + b.startObject().array("int_value", new int[] { 20 }).endObject(); + b.endArray(); + }); + assertEquals("{\"path\":{\"int_value\":[10,20]}}", syntheticSource); + } + public void testIndexStoredArraySourceSingleObjectElement() throws IOException { DocumentMapper documentMapper = createMapperServiceWithStoredArraySource(mapping(b -> { b.startObject("path").startObject("properties"); From 7dfede2fc667b6bcfb807155aa5a9538d3e6641f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 26 Jan 2025 08:35:25 +1100 Subject: [PATCH 042/383] Mute org.elasticsearch.indices.mapping.UpdateMappingIntegrationIT org.elasticsearch.indices.mapping.UpdateMappingIntegrationIT #116126 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f547aadc66830..01d935b7340b2 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -243,6 +243,8 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/3rd_party_deployment/Test start deployment fails while model download in progress} issue: https://github.com/elastic/elasticsearch/issues/120810 +- class: org.elasticsearch.indices.mapping.UpdateMappingIntegrationIT + issue: https://github.com/elastic/elasticsearch/issues/116126 # Examples: # From fb3ba877468aa0bc71ad37955876ec1f2fe59845 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 27 Jan 2025 08:23:27 +0000 Subject: [PATCH 043/383] Remove trappy timeouts from `ClusterInfoRequest` (#120707) These constructors are not used any more, so this commit removes them. Relates #107984 --- .../mapping/get/GetMappingsRequest.java | 3 ++- .../master/info/ClusterInfoRequest.java | 22 +++---------------- .../core/ilm/ExplainLifecycleRequest.java | 3 ++- 3 files changed, 7 insertions(+), 21 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequest.java index 84789d8a2acfb..80f0fcfdd8b41 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequest.java @@ -10,6 +10,7 @@ package org.elasticsearch.action.admin.indices.mapping.get; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.info.ClusterInfoRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.TimeValue; @@ -23,7 +24,7 @@ public class GetMappingsRequest extends ClusterInfoRequest { public GetMappingsRequest(TimeValue masterTimeout) { - super(masterTimeout); + super(masterTimeout, IndicesOptions.strictExpandOpen()); } public GetMappingsRequest(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java index 634a103e9754a..2f3e48cbca0d9 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java @@ -26,30 +26,14 @@ public abstract class ClusterInfoRequest Date: Mon, 27 Jan 2025 09:44:57 +0100 Subject: [PATCH 044/383] [Entitlements] Deny setting global defaults for Locale / TimeZone (#120804) Part of #ES-10359 --- .../entitlement/bridge/EntitlementChecker.java | 8 ++++++++ .../qa/test/RestEntitlementsCheckAction.java | 4 ++++ .../qa/test/WritePropertiesCheckActions.java | 15 +++++++++++++++ .../api/ElasticsearchEntitlementChecker.java | 17 +++++++++++++++++ 4 files changed, 44 insertions(+) diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index 4a61b51614108..d2c9541742d0a 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -42,7 +42,9 @@ import java.nio.channels.SocketChannel; import java.security.cert.CertStoreParameters; import java.util.List; +import java.util.Locale; import java.util.Properties; +import java.util.TimeZone; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; @@ -188,6 +190,12 @@ public interface EntitlementChecker { void check$java_util_logging_LogManager$(Class callerClass); + void check$java_util_Locale$$setDefault(Class callerClass, Locale locale); + + void check$java_util_Locale$$setDefault(Class callerClass, Locale.Category category, Locale locale); + + void check$java_util_TimeZone$$setDefault(Class callerClass, TimeZone zone); + void check$java_net_DatagramSocket$$setDatagramSocketImplFactory(Class callerClass, DatagramSocketImplFactory fac); void check$java_net_HttpURLConnection$$setFollowRedirects(Class callerClass, boolean set); diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java index 712502b38fe90..8672620dbbd58 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java @@ -123,6 +123,10 @@ static CheckAction alwaysDenied(CheckedRunnable action) { entry("timeZoneNameProvider", alwaysDenied(RestEntitlementsCheckAction::timeZoneNameProvider$)), entry("logManager", alwaysDenied(RestEntitlementsCheckAction::logManager$)), + entry("locale_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultLocale)), + entry("locale_setDefaultForCategory", alwaysDenied(WritePropertiesCheckActions::setDefaultLocaleForCategory)), + entry("timeZone_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultTimeZone)), + entry("system_setProperty", forPlugins(WritePropertiesCheckActions::setSystemProperty)), entry("system_clearProperty", forPlugins(WritePropertiesCheckActions::clearSystemProperty)), entry("system_setSystemProperties", alwaysDenied(WritePropertiesCheckActions::setSystemProperties)), diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/WritePropertiesCheckActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/WritePropertiesCheckActions.java index 6ddb3e54a62cf..7d7fcd2175fed 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/WritePropertiesCheckActions.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/WritePropertiesCheckActions.java @@ -11,6 +11,9 @@ import org.elasticsearch.core.SuppressForbidden; +import java.util.Locale; +import java.util.TimeZone; + @SuppressForbidden(reason = "testing entitlements") class WritePropertiesCheckActions { private WritePropertiesCheckActions() {} @@ -32,4 +35,16 @@ static void clearSystemProperty() { static void setSystemProperties() { System.setProperties(System.getProperties()); // no side effect in case if allowed (but shouldn't) } + + static void setDefaultLocale() { + Locale.setDefault(Locale.getDefault()); + } + + static void setDefaultLocaleForCategory() { + Locale.setDefault(Locale.Category.DISPLAY, Locale.getDefault(Locale.Category.DISPLAY)); + } + + static void setDefaultTimeZone() { + TimeZone.setDefault(TimeZone.getDefault()); + } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 1e921e64a5ae7..55adbf45699ab 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -46,7 +46,9 @@ import java.nio.channels.SocketChannel; import java.security.cert.CertStoreParameters; import java.util.List; +import java.util.Locale; import java.util.Properties; +import java.util.TimeZone; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; @@ -292,6 +294,21 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { policyManager.checkChangeJVMGlobalState(callerClass); } + @Override + public void check$java_util_Locale$$setDefault(Class callerClass, Locale.Category category, Locale locale) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + @Override + public void check$java_util_Locale$$setDefault(Class callerClass, Locale locale) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + + @Override + public void check$java_util_TimeZone$$setDefault(Class callerClass, TimeZone zone) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + @Override public void check$java_net_DatagramSocket$$setDatagramSocketImplFactory(Class callerClass, DatagramSocketImplFactory fac) { policyManager.checkChangeJVMGlobalState(callerClass); From 52e0f21bdd0e6446c8de380b58d13d880b430dbd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Mon, 27 Jan 2025 10:25:54 +0100 Subject: [PATCH 045/383] Enable queryable built-in roles feature by default (#120323) Making the `es.queryable_built_in_roles_enabled` feature flag enabled by default. This feature makes the built-in roles automatically indexed in `.security` index and available for querying via Query Role API. The consequence of this is that `.security` index is now created eagerly (if it's not existing) on cluster formation. In order to keep the scope of this PR small, the feature is disabled for some of the tests, because they are either non-trivial to adjust or the gain is not worthy the effort to do it now. The tests will be adjusted in a follow-up PR and later the flag will be removed completely. Relates to #117581 --- docs/build.gradle | 2 + .../data_stream/140_data_stream_aliases.yml | 6 +- modules/dot-prefix-validation/build.gradle | 1 + .../DotPrefixClientYamlTestSuiteIT.java | 6 ++ .../test/dot_prefix/10_basic.yml | 2 +- .../packaging/test/PasswordToolsTests.java | 67 ++++++++++++++++--- .../packaging/util/ServerUtils.java | 6 +- .../test/InternalTestCluster.java | 36 +++++++--- x-pack/plugin/build.gradle | 3 + x-pack/plugin/core/build.gradle | 1 + x-pack/plugin/fleet/build.gradle | 1 + .../xpack/fleet/FleetDataStreamIT.java | 5 ++ .../RemoteClusterSecuritySpecialUserIT.java | 2 +- .../xpack/security/LicenseDLSFLSRoleIT.java | 6 +- .../xpack/security/QueryRoleIT.java | 26 ++++--- .../PermissionPrecedenceTests.java | 22 +++++- ...ervedRealmElasticAutoconfigIntegTests.java | 37 +++++----- .../security/authz/ReadActionsTests.java | 8 ++- .../security/authz/WriteActionsTests.java | 8 ++- .../profile/AbstractProfileIntegTestCase.java | 3 +- ...eanupRoleMappingDuplicatesMigrationIT.java | 2 + .../filter/IpFilteringIntegrationTests.java | 12 ++++ .../QueryableBuiltInRolesSynchronizer.java | 8 ++- .../test/SecurityIntegTestCase.java | 7 +- .../xpack/test/rest/XPackRestIT.java | 6 ++ .../test/roles/50_remote_only.yml | 12 +++- .../test/roles/60_bulk_roles.yml | 11 ++- 27 files changed, 241 insertions(+), 65 deletions(-) diff --git a/docs/build.gradle b/docs/build.gradle index 43b61ea97c089..3a1070b3fc356 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -120,6 +120,8 @@ testClusters.matching { it.name == "yamlRestTest"}.configureEach { // TODO: remove this once cname is prepended to transport.publish_address by default in 8.0 systemProperty 'es.transport.cname_in_publish_address', 'true' + systemProperty 'es.queryable_built_in_roles_enabled', 'false' + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") requiresFeature 'es.failure_store_feature_flag_enabled', Version.fromString("8.12.0") diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml index b9042d51deb70..4b2fbad7d6a5c 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml @@ -240,7 +240,8 @@ test: {} - do: - indices.get_alias: { } + indices.get_alias: + index: test* - match: { test1.aliases.test: { } } - match: { test2.aliases.test: { } } - match: { test3.aliases.test: { } } @@ -255,7 +256,8 @@ - is_true: acknowledged - do: - indices.get_alias: {} + indices.get_alias: + index: test* - match: {test1.aliases: {}} - match: {test2.aliases: {}} - match: {test3.aliases: {}} diff --git a/modules/dot-prefix-validation/build.gradle b/modules/dot-prefix-validation/build.gradle index bbbbbb5609f1e..a4ddd92d982fa 100644 --- a/modules/dot-prefix-validation/build.gradle +++ b/modules/dot-prefix-validation/build.gradle @@ -26,4 +26,5 @@ tasks.named('yamlRestTest') { tasks.named('yamlRestCompatTest') { usesDefaultDistribution() + systemProperty 'es.queryable_built_in_roles_enabled', 'false' } diff --git a/modules/dot-prefix-validation/src/yamlRestTest/java/org/elasticsearch/validation/DotPrefixClientYamlTestSuiteIT.java b/modules/dot-prefix-validation/src/yamlRestTest/java/org/elasticsearch/validation/DotPrefixClientYamlTestSuiteIT.java index 9ecf6b9ba7d83..27315e1a99a32 100644 --- a/modules/dot-prefix-validation/src/yamlRestTest/java/org/elasticsearch/validation/DotPrefixClientYamlTestSuiteIT.java +++ b/modules/dot-prefix-validation/src/yamlRestTest/java/org/elasticsearch/validation/DotPrefixClientYamlTestSuiteIT.java @@ -21,6 +21,8 @@ import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.junit.ClassRule; +import java.util.Objects; + import static org.elasticsearch.test.cluster.FeatureFlag.FAILURE_STORE_ENABLED; public class DotPrefixClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -55,6 +57,10 @@ private static ElasticsearchCluster createCluster() { if (setNodes) { clusterBuilder.nodes(2); } + clusterBuilder.systemProperty("es.queryable_built_in_roles_enabled", () -> { + final String enabled = System.getProperty("es.queryable_built_in_roles_enabled"); + return Objects.requireNonNullElse(enabled, ""); + }); return clusterBuilder.build(); } diff --git a/modules/dot-prefix-validation/src/yamlRestTest/resources/rest-api-spec/test/dot_prefix/10_basic.yml b/modules/dot-prefix-validation/src/yamlRestTest/resources/rest-api-spec/test/dot_prefix/10_basic.yml index ae256daeb8abb..3ad7438b16b62 100644 --- a/modules/dot-prefix-validation/src/yamlRestTest/resources/rest-api-spec/test/dot_prefix/10_basic.yml +++ b/modules/dot-prefix-validation/src/yamlRestTest/resources/rest-api-spec/test/dot_prefix/10_basic.yml @@ -2,7 +2,7 @@ teardown: - do: indices.delete: - index: .* + index: .*,-.security-* --- "Index creation with a dot-prefix is deprecated unless x-elastic-product-origin set": diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PasswordToolsTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PasswordToolsTests.java index 16795abf1b931..5fb4131c80913 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PasswordToolsTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PasswordToolsTests.java @@ -20,6 +20,7 @@ import java.nio.file.Path; import java.util.HashMap; import java.util.Map; +import java.util.concurrent.Callable; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Stream; @@ -47,7 +48,9 @@ public void test010Install() throws Exception { public void test20GeneratePasswords() throws Exception { assertWhileRunning(() -> { ServerUtils.waitForElasticsearch(installation); - Shell.Result result = installation.executables().setupPasswordsTool.run("auto --batch", null); + Shell.Result result = retryOnAuthenticationErrors( + () -> installation.executables().setupPasswordsTool.run("auto --batch", null) + ); Map userpasses = parseUsersAndPasswords(result.stdout()); for (Map.Entry userpass : userpasses.entrySet()) { String response = ServerUtils.makeRequest( @@ -102,20 +105,26 @@ public void test30AddBootstrapPassword() throws Exception { installation.executables().keystoreTool.run("add --stdin bootstrap.password", BOOTSTRAP_PASSWORD); assertWhileRunning(() -> { - String response = ServerUtils.makeRequest( - Request.Get("http://localhost:9200/_cluster/health?wait_for_status=green&timeout=180s"), - "elastic", - BOOTSTRAP_PASSWORD, - null + ServerUtils.waitForElasticsearch("green", null, installation, "elastic", BOOTSTRAP_PASSWORD, null); + final String response = retryOnAuthenticationErrors( + () -> ServerUtils.makeRequest( + Request.Get("http://localhost:9200/_cluster/health?wait_for_status=green&timeout=180s"), + "elastic", + BOOTSTRAP_PASSWORD, + null + ) ); assertThat(response, containsString("\"status\":\"green\"")); }); + } public void test40GeneratePasswordsBootstrapAlreadySet() throws Exception { assertWhileRunning(() -> { - - Shell.Result result = installation.executables().setupPasswordsTool.run("auto --batch", null); + ServerUtils.waitForElasticsearch("green", null, installation, "elastic", BOOTSTRAP_PASSWORD, null); + Shell.Result result = retryOnAuthenticationErrors( + () -> installation.executables().setupPasswordsTool.run("auto --batch", null) + ); Map userpasses = parseUsersAndPasswords(result.stdout()); assertThat(userpasses, hasKey("elastic")); for (Map.Entry userpass : userpasses.entrySet()) { @@ -130,6 +139,48 @@ public void test40GeneratePasswordsBootstrapAlreadySet() throws Exception { }); } + /** + * The security index is created on startup. + * It can happen that even when the security index exists, we get an authentication failure as `elastic` + * user because the reserved realm checks the security index first. + * This is because we check the security index too early (just after the creation) when all shards did not get allocated yet. + * Hence, the call can result in an `UnavailableShardsException` and cause the authentication to fail. + * We retry here on authentication errors for a couple of seconds just to verify that this is not the case. + */ + private R retryOnAuthenticationErrors(final Callable callable) throws Exception { + Exception failure = null; + int retries = 5; + while (retries-- > 0) { + try { + return callable.call(); + } catch (Exception e) { + if (e.getMessage() != null + && (e.getMessage().contains("401 Unauthorized") || e.getMessage().contains("Failed to authenticate user"))) { + logger.info( + "Authentication failed (possibly due to UnavailableShardsException for the security index), retrying [{}].", + retries, + e + ); + if (failure == null) { + failure = e; + } else { + failure.addSuppressed(e); + } + try { + Thread.sleep(1000); + } catch (InterruptedException interrupted) { + Thread.currentThread().interrupt(); + failure.addSuppressed(interrupted); + throw failure; + } + } else { + throw e; + } + } + } + throw failure; + } + private Map parseUsersAndPasswords(String output) { Matcher matcher = USERPASS_REGEX.matcher(output); assertNotNull(matcher); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java index f03e8ed205b48..ea71308b11940 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java @@ -66,7 +66,7 @@ public class ServerUtils { private static final long waitTime = TimeUnit.MINUTES.toMillis(3); private static final long timeoutLength = TimeUnit.SECONDS.toMillis(30); private static final long requestInterval = TimeUnit.SECONDS.toMillis(5); - private static final long dockerWaitForSecurityIndex = TimeUnit.SECONDS.toMillis(25); + private static final long dockerWaitForSecurityIndex = TimeUnit.SECONDS.toMillis(60); public static void waitForElasticsearch(Installation installation) throws Exception { final boolean securityEnabled; @@ -260,9 +260,7 @@ public static void waitForElasticsearch( // `elastic` , the reserved realm checks the security index first. It can happen that we check the security index // too early after the security index creation in DockerTests causing an UnavailableShardsException. We retry // authentication errors for a couple of seconds just to verify this is not the case. - if (installation.distribution.isDocker() - && timeElapsed < dockerWaitForSecurityIndex - && response.getStatusLine().getStatusCode() == 401) { + if (timeElapsed < dockerWaitForSecurityIndex && response.getStatusLine().getStatusCode() == 401) { logger.info( "Authentication against docker failed (possibly due to UnavailableShardsException for the security index)" + ", retrying..." diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index b11d96cb3fa24..ba3d801bf9d13 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -18,6 +18,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.store.AlreadyClosedException; +import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsRequest; import org.elasticsearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction; @@ -146,6 +147,8 @@ import static org.elasticsearch.node.Node.INITIAL_STATE_TIMEOUT_SETTING; import static org.elasticsearch.test.ESTestCase.TEST_REQUEST_TIMEOUT; import static org.elasticsearch.test.ESTestCase.assertBusy; +import static org.elasticsearch.test.ESTestCase.assertFalse; +import static org.elasticsearch.test.ESTestCase.assertTrue; import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.runInParallel; import static org.elasticsearch.test.ESTestCase.safeAwait; @@ -160,9 +163,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** @@ -1240,16 +1241,29 @@ public synchronized void validateClusterFormed() { } logger.trace("validating cluster formed, expecting {}", expectedNodes); - assertFalse( - client().admin() - .cluster() - .prepareHealth(TEST_REQUEST_TIMEOUT) - .setWaitForEvents(Priority.LANGUID) - .setWaitForNodes(Integer.toString(expectedNodes.size())) - .get(TimeValue.timeValueSeconds(40)) - .isTimedOut() - ); try { + assertBusy(() -> { + try { + final boolean timeout = client().admin() + .cluster() + .prepareHealth(TEST_REQUEST_TIMEOUT) + .setWaitForEvents(Priority.LANGUID) + .setWaitForNodes(Integer.toString(expectedNodes.size())) + .get(TimeValue.timeValueSeconds(40)) + .isTimedOut(); + if (timeout) { + throw new IllegalStateException("timed out waiting for cluster to form"); + } + } catch (UnavailableShardsException e) { + if (e.getMessage() != null && e.getMessage().contains(".security")) { + // security index may not be ready yet, throwing assertion error to retry + throw new AssertionError(e); + } else { + throw e; + } + } + }, 30, TimeUnit.SECONDS); + final Object[] previousStates = new Object[1]; assertBusy(() -> { final List states = nodes.values() diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 5987f75f4f198..7054a71c8c614 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -104,3 +104,6 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> }) +tasks.named('yamlRestCompatTest').configure { + systemProperty 'es.queryable_built_in_roles_enabled', 'false' +} diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 04458c5ecaa57..df830eb9462b6 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -158,6 +158,7 @@ testClusters.configureEach { keystore 'bootstrap.password', 'x-pack-test-password' user username: "x_pack_rest_user", password: "x-pack-test-password" requiresFeature 'es.failure_store_feature_flag_enabled', Version.fromString("8.15.0") + systemProperty 'es.queryable_built_in_roles_enabled', 'false' } if (buildParams.isSnapshotBuild() == false) { diff --git a/x-pack/plugin/fleet/build.gradle b/x-pack/plugin/fleet/build.gradle index c00a2af430f4a..013d0acb4123b 100644 --- a/x-pack/plugin/fleet/build.gradle +++ b/x-pack/plugin/fleet/build.gradle @@ -29,4 +29,5 @@ testClusters.configureEach { setting 'xpack.security.enabled', 'true' setting 'xpack.security.autoconfiguration.enabled', 'false' user username: 'x_pack_rest_user', password: 'x-pack-test-password' + systemProperty 'es.queryable_built_in_roles_enabled', 'false' } diff --git a/x-pack/plugin/fleet/src/javaRestTest/java/org/elasticsearch/xpack/fleet/FleetDataStreamIT.java b/x-pack/plugin/fleet/src/javaRestTest/java/org/elasticsearch/xpack/fleet/FleetDataStreamIT.java index 98d8059382d6a..c32b2119f0056 100644 --- a/x-pack/plugin/fleet/src/javaRestTest/java/org/elasticsearch/xpack/fleet/FleetDataStreamIT.java +++ b/x-pack/plugin/fleet/src/javaRestTest/java/org/elasticsearch/xpack/fleet/FleetDataStreamIT.java @@ -48,6 +48,11 @@ protected Settings restAdminSettings() { .build(); } + @Override + protected boolean preserveSecurityIndicesUponCompletion() { + return true; + } + public void testAliasWithSystemDataStream() throws Exception { // Create a system data stream Request initialDocResponse = new Request("POST", ".fleet-actions-results/_doc"); diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java index 53c622898476a..5513724632389 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java @@ -218,7 +218,7 @@ public void testAnonymousUserFromQueryClusterWorks() throws Exception { { "password": "%s" }""", PASS)); assertOK(client().performRequest(changePasswordRequest)); - final Request elasticUserSearchRequest = new Request("GET", "/*:.security*/_search"); + final Request elasticUserSearchRequest = new Request("GET", "/*:.security*/_search?size=1"); elasticUserSearchRequest.setOptions( RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", basicAuthHeaderValue("elastic", PASS)) ); diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/LicenseDLSFLSRoleIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/LicenseDLSFLSRoleIT.java index 552e9f5cba578..e22e3b94ce85d 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/LicenseDLSFLSRoleIT.java +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/LicenseDLSFLSRoleIT.java @@ -132,7 +132,8 @@ public void testQueryDLSFLSRolesShowAsDisabled() throws Exception { .build() }; createRoleWithIndicesPrivileges(adminClient(), "role_with_FLS_and_DLS", indicesPrivileges); } - assertQuery(client(), "", 4, roles -> { + assertQuery(client(), """ + {"query":{"bool":{"must_not":{"term":{"metadata._reserved":true}}}}}""", 4, roles -> { roles.sort(Comparator.comparing(o -> ((String) o.get("name")))); assertThat(roles, iterableWithSize(4)); assertThat(roles.get(0).get("name"), equalTo("role_with_DLS")); @@ -152,7 +153,8 @@ public void testQueryDLSFLSRolesShowAsDisabled() throws Exception { assertTrue(((Boolean) responseMap.get("basic_was_started"))); assertTrue(((Boolean) responseMap.get("acknowledged"))); // now the same roles show up as disabled ("enabled" is "false") - assertQuery(client(), "", 4, roles -> { + assertQuery(client(), """ + {"query":{"bool":{"must_not":{"term":{"metadata._reserved":true}}}}}""", 4, roles -> { roles.sort(Comparator.comparing(o -> ((String) o.get("name")))); assertThat(roles, iterableWithSize(4)); assertThat(roles.get(0).get("name"), equalTo("role_with_DLS")); diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryRoleIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryRoleIT.java index 311510352d805..4d719cf2f5f02 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryRoleIT.java +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryRoleIT.java @@ -16,8 +16,10 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; +import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.security.support.SecurityMigrations; import org.hamcrest.Matchers; +import org.junit.Before; import java.io.IOException; import java.util.ArrayList; @@ -49,15 +51,23 @@ public final class QueryRoleIT extends SecurityInBasicRestTestCase { private static final String READ_SECURITY_USER_AUTH_HEADER = "Basic cmVhZF9zZWN1cml0eV91c2VyOnJlYWQtc2VjdXJpdHktcGFzc3dvcmQ="; - public void testSimpleQueryAllRoles() throws IOException { - assertQuery("", 0, roles -> assertThat(roles, emptyIterable())); - RoleDescriptor createdRole = createRandomRole(); - assertQuery("", 1, roles -> { - assertThat(roles, iterableWithSize(1)); - assertRoleMap(roles.get(0), createdRole); + @Before + public void initialize() { + new ReservedRolesStore(); + } + + public void testSimpleQueryAllRoles() throws Exception { + createRandomRole(); + assertQuery("", 1 + ReservedRolesStore.names().size(), roles -> { + // default size is 10 + assertThat(roles, iterableWithSize(10)); }); - assertQuery(""" - {"query":{"match_all":{}},"from":1}""", 1, roles -> assertThat(roles, emptyIterable())); + assertQuery( + Strings.format(""" + {"query":{"match_all":{}},"from":%d}""", 1 + ReservedRolesStore.names().size()), + 1 + ReservedRolesStore.names().size(), + roles -> assertThat(roles, emptyIterable()) + ); } public void testDisallowedFields() throws Exception { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/PermissionPrecedenceTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/PermissionPrecedenceTests.java index 60923ac023474..804b16f307329 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/PermissionPrecedenceTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/PermissionPrecedenceTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; +import org.junit.After; import java.util.Collections; import java.util.List; @@ -35,9 +36,14 @@ */ public class PermissionPrecedenceTests extends SecurityIntegTestCase { + @After + public void cleanupSecurityIndex() { + super.deleteSecurityIndex(); + } + @Override protected String configRoles() { - return """ + return super.configRoles() + "\n" + """ admin: cluster: [ all ]\s indices: @@ -54,12 +60,22 @@ protected String configUsers() { final String usersPasswdHashed = new String( getFastStoredHashAlgoForTests().hash(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING) ); - return "admin:" + usersPasswdHashed + "\n" + "client:" + usersPasswdHashed + "\n" + "user:" + usersPasswdHashed + "\n"; + return super.configUsers() + + "\n" + + "admin:" + + usersPasswdHashed + + "\n" + + "client:" + + usersPasswdHashed + + "\n" + + "user:" + + usersPasswdHashed + + "\n"; } @Override protected String configUsersRoles() { - return """ + return super.configUsersRoles() + "\n" + """ admin:admin transport_client:client user:user diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmElasticAutoconfigIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmElasticAutoconfigIntegTests.java index 8148c5021e9b9..1d7e1da66a91f 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmElasticAutoconfigIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmElasticAutoconfigIntegTests.java @@ -72,7 +72,7 @@ protected SecureString getBootstrapPassword() { private boolean isMigrationComplete(ClusterState state) { IndexMetadata indexMetadata = state.metadata().getIndices().get(TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7); - return indexMetadata.getCustomData(MIGRATION_VERSION_CUSTOM_KEY) != null; + return indexMetadata != null && indexMetadata.getCustomData(MIGRATION_VERSION_CUSTOM_KEY) != null; } private void awaitSecurityMigrationRanOnce() { @@ -89,8 +89,27 @@ private void awaitSecurityMigrationRanOnce() { safeAwait(latch); } - public void testAutoconfigFailedPasswordPromotion() { + private void deleteSecurityIndex() { + // delete the security index, if it exist + GetIndexRequest getIndexRequest = new GetIndexRequest(TEST_REQUEST_TIMEOUT); + getIndexRequest.indices(SECURITY_MAIN_ALIAS); + getIndexRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); + GetIndexResponse getIndexResponse = client().admin().indices().getIndex(getIndexRequest).actionGet(); + if (getIndexResponse.getIndices().length > 0) { + assertThat(getIndexResponse.getIndices().length, is(1)); + assertThat(getIndexResponse.getIndices()[0], is(TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7)); + + // Security migration needs to finish before deleting the index + awaitSecurityMigrationRanOnce(); + DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(getIndexResponse.getIndices()); + assertAcked(client().admin().indices().delete(deleteIndexRequest).actionGet()); + } + } + + public void testAutoconfigFailedPasswordPromotion() throws Exception { try { + // .security index is created automatically on node startup so delete the security index first + deleteSecurityIndex(); // prevents the .security index from being created automatically (after elastic user authentication) ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest( TEST_REQUEST_TIMEOUT, @@ -99,20 +118,6 @@ public void testAutoconfigFailedPasswordPromotion() { updateSettingsRequest.transientSettings(Settings.builder().put(Metadata.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), true)); assertAcked(clusterAdmin().updateSettings(updateSettingsRequest).actionGet()); - // delete the security index, if it exist - GetIndexRequest getIndexRequest = new GetIndexRequest(TEST_REQUEST_TIMEOUT); - getIndexRequest.indices(SECURITY_MAIN_ALIAS); - getIndexRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); - GetIndexResponse getIndexResponse = client().admin().indices().getIndex(getIndexRequest).actionGet(); - if (getIndexResponse.getIndices().length > 0) { - assertThat(getIndexResponse.getIndices().length, is(1)); - assertThat(getIndexResponse.getIndices()[0], is(TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7)); - // Security migration needs to finish before deleting the index - awaitSecurityMigrationRanOnce(); - DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(getIndexResponse.getIndices()); - assertAcked(client().admin().indices().delete(deleteIndexRequest).actionGet()); - } - // elastic user gets 503 for the good password Request restRequest = randomFrom( new Request("GET", "/_security/_authenticate"), diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java index 0acc281dd8440..7836c0e8d507d 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; +import org.junit.After; import java.util.ArrayList; import java.util.List; @@ -38,9 +39,14 @@ public class ReadActionsTests extends SecurityIntegTestCase { + @After + public void cleanupSecurityIndex() { + super.deleteSecurityIndex(); + } + @Override protected String configRoles() { - return Strings.format(""" + return super.configRoles() + "\n" + Strings.format(""" %s: cluster: [ ALL ] indices: diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/WriteActionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/WriteActionsTests.java index a3a4f1b074232..75a8476e56245 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/WriteActionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/WriteActionsTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; +import org.junit.After; import static org.elasticsearch.test.SecurityTestsUtils.assertAuthorizationExceptionDefaultUsers; import static org.elasticsearch.test.SecurityTestsUtils.assertThrowsAuthorizationExceptionDefaultUsers; @@ -32,9 +33,14 @@ public class WriteActionsTests extends SecurityIntegTestCase { + @After + public void cleanupSecurityIndex() { + super.deleteSecurityIndex(); + } + @Override protected String configRoles() { - return Strings.format(""" + return super.configRoles() + "\n" + Strings.format(""" %s: cluster: [ ALL ] indices: diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/AbstractProfileIntegTestCase.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/AbstractProfileIntegTestCase.java index e5b709759971a..a7309bc905ba3 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/AbstractProfileIntegTestCase.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/AbstractProfileIntegTestCase.java @@ -49,7 +49,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { } @Before - public void createNativeUsers() { + public void createNativeUsers() throws Exception { final PutUserRequest putUserRequest1 = new PutUserRequest(); putUserRequest1.username(RAC_USER_NAME); putUserRequest1.roles(RAC_ROLE, NATIVE_RAC_ROLE); @@ -57,6 +57,7 @@ public void createNativeUsers() { putUserRequest1.passwordHash(nativeRacUserPasswordHash.toCharArray()); putUserRequest1.email(RAC_USER_NAME + "@example.com"); assertThat(client().execute(PutUserAction.INSTANCE, putUserRequest1).actionGet().created(), is(true)); + assertSecurityIndexActive(); } @Override diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/CleanupRoleMappingDuplicatesMigrationIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/CleanupRoleMappingDuplicatesMigrationIT.java index e7f544399bdf0..571a1d3270166 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/CleanupRoleMappingDuplicatesMigrationIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/CleanupRoleMappingDuplicatesMigrationIT.java @@ -305,6 +305,8 @@ public void testNewIndexSkipMigration() { internalCluster().setBootstrapMasterNodeIndex(0); final String masterNode = internalCluster().getMasterName(); ensureGreen(); + deleteSecurityIndex(); // hack to force a new security index to be created + ensureGreen(); CountDownLatch awaitMigrations = awaitMigrationVersionUpdates( masterNode, SecurityMigrations.CLEANUP_ROLE_MAPPING_DUPLICATES_MIGRATION_VERSION diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/filter/IpFilteringIntegrationTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/filter/IpFilteringIntegrationTests.java index f479c4703194b..1f052da89d245 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/filter/IpFilteringIntegrationTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/filter/IpFilteringIntegrationTests.java @@ -16,6 +16,8 @@ import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.transport.Transport; import org.elasticsearch.xpack.core.common.socket.SocketAccess; +import org.junit.After; +import org.junit.Before; import org.junit.BeforeClass; import java.io.IOException; @@ -57,6 +59,16 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { .build(); } + @Before + public void waitForSecurityIndex() throws Exception { + assertSecurityIndexActive(); + } + + @After + public void cleanupSecurityIndex() throws Exception { + super.deleteSecurityIndex(); + } + public void testThatIpFilteringIsIntegratedIntoNettyPipelineViaHttp() throws Exception { TransportAddress transportAddress = randomFrom( internalCluster().getDataNodeInstance(HttpServerTransport.class).boundAddress().boundAddresses() diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/QueryableBuiltInRolesSynchronizer.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/QueryableBuiltInRolesSynchronizer.java index 61178ed93a4a4..578ab7ce5c16a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/QueryableBuiltInRolesSynchronizer.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/QueryableBuiltInRolesSynchronizer.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.SimpleBatchedExecutor; +import org.elasticsearch.cluster.coordination.FailedToCommitClusterStateException; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; @@ -85,9 +86,9 @@ public final class QueryableBuiltInRolesSynchronizer implements ClusterStateList public static final boolean QUERYABLE_BUILT_IN_ROLES_ENABLED; static { final var propertyValue = System.getProperty("es.queryable_built_in_roles_enabled"); - if (propertyValue == null || propertyValue.isEmpty() || "false".equals(propertyValue)) { + if ("false".equals(propertyValue)) { QUERYABLE_BUILT_IN_ROLES_ENABLED = false; - } else if ("true".equals(propertyValue)) { + } else if (propertyValue == null || propertyValue.isEmpty() || "true".equals(propertyValue)) { QUERYABLE_BUILT_IN_ROLES_ENABLED = true; } else { throw new IllegalStateException( @@ -307,7 +308,8 @@ private static boolean isExpectedFailure(final Exception e) { || cause instanceof ResourceAlreadyExistsException || cause instanceof VersionConflictEngineException || cause instanceof DocumentMissingException - || cause instanceof FailedToMarkBuiltInRolesAsSyncedException; + || cause instanceof FailedToMarkBuiltInRolesAsSyncedException + || (e instanceof FailedToCommitClusterStateException && "node closed".equals(cause.getMessage())); } private boolean shouldSyncBuiltInRoles(final ClusterState state) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java index 0c5e66344e2a2..f2dfa0eae9973 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.test; +import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; @@ -439,7 +440,11 @@ protected void createSecurityIndexWithWaitForActiveShards() { ); CreateIndexRequest createIndexRequest = new CreateIndexRequest(SECURITY_MAIN_ALIAS).waitForActiveShards(ActiveShardCount.ALL) .masterNodeTimeout(TEST_REQUEST_TIMEOUT); - client.admin().indices().create(createIndexRequest).actionGet(); + try { + client.admin().indices().create(createIndexRequest).actionGet(); + } catch (ResourceAlreadyExistsException e) { + logger.info("Security index already exists, ignoring.", e); + } } protected static Index resolveSecurityIndex(Metadata metadata) { diff --git a/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index 988ee93bda6b4..2e7e8a6d31881 100644 --- a/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -17,6 +17,8 @@ import org.junit.Before; import org.junit.ClassRule; +import java.util.Objects; + public class XPackRestIT extends AbstractXPackRestTest { @ClassRule @@ -47,6 +49,10 @@ public class XPackRestIT extends AbstractXPackRestTest { .configFile("testnode.pem", Resource.fromClasspath("org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) .configFile("testnode.crt", Resource.fromClasspath("org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) .configFile("service_tokens", Resource.fromClasspath("service_tokens")) + .systemProperty("es.queryable_built_in_roles_enabled", () -> { + final String enabled = System.getProperty("es.queryable_built_in_roles_enabled"); + return Objects.requireNonNullElse(enabled, ""); + }) .build(); public XPackRestIT(ClientYamlTestCandidate testCandidate) { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/50_remote_only.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/50_remote_only.yml index 1b5ce381319d2..cac31381eefb8 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/50_remote_only.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/50_remote_only.yml @@ -75,7 +75,17 @@ teardown: - do: security.query_role: body: > - {} + { + "query": { + "bool": { + "must_not": { + "term": { + "metadata._reserved": true + } + } + } + } + } - match: { total: 1 } - match: { count: 1 } - match: { roles.0.name: "remote_role" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml index c7a707f437e0c..d00e34d51d241 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml @@ -81,7 +81,16 @@ teardown: security.query_role: body: > { - "query": { "match_all": {} }, "sort": ["name"] + "query": { + "bool": { + "must_not": { + "term": { + "metadata._reserved": true + } + } + } + }, + "sort": ["name"] } - match: { total: 2 } - match: { count: 2 } From 242b841402813e0905797c97965f2c21760861d0 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Mon, 27 Jan 2025 10:36:36 +0100 Subject: [PATCH 046/383] Disallow CCS with lookup join (#120277) --- .../xpack/esql/parser/LogicalPlanBuilder.java | 25 ++++++- .../esql/parser/StatementParserTests.java | 75 +++++++++++++------ 2 files changed, 76 insertions(+), 24 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index ba74bf467f2aa..7ddd3dafd2784 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -11,10 +11,12 @@ import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.tree.ParseTree; import org.elasticsearch.Build; +import org.elasticsearch.common.Strings; import org.elasticsearch.core.Tuple; import org.elasticsearch.dissect.DissectException; import org.elasticsearch.dissect.DissectParser; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -527,6 +529,13 @@ public PlanFactory visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { if (rightPattern.contains(WILDCARD)) { throw new ParsingException(source(target), "invalid index pattern [{}], * is not allowed in LOOKUP JOIN", rightPattern); } + if (RemoteClusterAware.isRemoteIndexName(rightPattern)) { + throw new ParsingException( + source(target), + "invalid index pattern [{}], remote clusters are not supported in LOOKUP JOIN", + rightPattern + ); + } UnresolvedRelation right = new UnresolvedRelation( source(target), @@ -557,6 +566,20 @@ public PlanFactory visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { throw new ParsingException(source, "JOIN ON clause only supports one field at the moment, found [{}]", matchFieldsCount); } - return p -> new LookupJoin(source, p, right, joinFields); + return p -> { + p.forEachUp(UnresolvedRelation.class, r -> { + for (var leftPattern : Strings.splitStringByCommaToArray(r.indexPattern().indexPattern())) { + if (RemoteClusterAware.isRemoteIndexName(leftPattern)) { + throw new ParsingException( + source(target), + "invalid index pattern [{}], remote clusters are not supported in LOOKUP JOIN", + r.indexPattern().indexPattern() + ); + } + } + }); + + return new LookupJoin(source, p, right, joinFields); + }; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 792b43433e1ee..cb2df8dec9a6f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -78,6 +78,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.paramAsIdentifier; import static org.elasticsearch.xpack.esql.EsqlTestUtils.paramAsPattern; import static org.elasticsearch.xpack.esql.EsqlTestUtils.referenceAttribute; +import static org.elasticsearch.xpack.esql.IdentifierGenerator.Features.CROSS_CLUSTER; import static org.elasticsearch.xpack.esql.IdentifierGenerator.Features.WILDCARD_PATTERN; import static org.elasticsearch.xpack.esql.IdentifierGenerator.randomIndexPattern; import static org.elasticsearch.xpack.esql.IdentifierGenerator.randomIndexPatterns; @@ -307,18 +308,18 @@ public void testStatsWithoutGroups() { ); } - public void testStatsWithoutAggs() throws Exception { + public void testStatsWithoutAggs() { assertEquals( new Aggregate(EMPTY, PROCESSING_CMD_INPUT, Aggregate.AggregateType.STANDARD, List.of(attribute("a")), List.of(attribute("a"))), processingCommand("stats by a") ); } - public void testStatsWithoutAggsOrGroup() throws Exception { + public void testStatsWithoutAggsOrGroup() { expectError("from text | stats", "At least one aggregation or grouping expression required in [stats]"); } - public void testAggsWithGroupKeyAsAgg() throws Exception { + public void testAggsWithGroupKeyAsAgg() { var queries = new String[] { """ row a = 1, b = 2 | stats a by a @@ -339,7 +340,7 @@ public void testAggsWithGroupKeyAsAgg() throws Exception { } } - public void testStatsWithGroupKeyAndAggFilter() throws Exception { + public void testStatsWithGroupKeyAndAggFilter() { var a = attribute("a"); var f = new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(a)); var filter = new Alias(EMPTY, "min(a) where a > 1", new FilteredExpression(EMPTY, f, new GreaterThan(EMPTY, a, integer(1)))); @@ -349,7 +350,7 @@ public void testStatsWithGroupKeyAndAggFilter() throws Exception { ); } - public void testStatsWithGroupKeyAndMixedAggAndFilter() throws Exception { + public void testStatsWithGroupKeyAndMixedAggAndFilter() { var a = attribute("a"); var min = new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(a)); var max = new UnresolvedFunction(EMPTY, "max", DEFAULT, List.of(a)); @@ -384,7 +385,7 @@ public void testStatsWithGroupKeyAndMixedAggAndFilter() throws Exception { ); } - public void testStatsWithoutGroupKeyMixedAggAndFilter() throws Exception { + public void testStatsWithoutGroupKeyMixedAggAndFilter() { var a = attribute("a"); var f = new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(a)); var filter = new Alias(EMPTY, "min(a) where a > 1", new FilteredExpression(EMPTY, f, new GreaterThan(EMPTY, a, integer(1)))); @@ -2067,41 +2068,41 @@ private void assertStringAsLookupIndexPattern(String string, String statement) { assertThat(tableName.fold(FoldContext.small()), equalTo(string)); } - public void testIdPatternUnquoted() throws Exception { + public void testIdPatternUnquoted() { var string = "regularString"; assertThat(breakIntoFragments(string), contains(string)); } - public void testIdPatternQuoted() throws Exception { + public void testIdPatternQuoted() { var string = "`escaped string`"; assertThat(breakIntoFragments(string), contains(string)); } - public void testIdPatternQuotedWithDoubleBackticks() throws Exception { + public void testIdPatternQuotedWithDoubleBackticks() { var string = "`escaped``string`"; assertThat(breakIntoFragments(string), contains(string)); } - public void testIdPatternUnquotedAndQuoted() throws Exception { + public void testIdPatternUnquotedAndQuoted() { var string = "this`is`a`mix`of`ids`"; assertThat(breakIntoFragments(string), contains("this", "`is`", "a", "`mix`", "of", "`ids`")); } - public void testIdPatternQuotedTraling() throws Exception { + public void testIdPatternQuotedTraling() { var string = "`foo`*"; assertThat(breakIntoFragments(string), contains("`foo`", "*")); } - public void testIdPatternWithDoubleQuotedStrings() throws Exception { + public void testIdPatternWithDoubleQuotedStrings() { var string = "`this``is`a`quoted `` string``with`backticks"; assertThat(breakIntoFragments(string), contains("`this``is`", "a", "`quoted `` string``with`", "backticks")); } - public void testSpaceNotAllowedInIdPattern() throws Exception { + public void testSpaceNotAllowedInIdPattern() { expectError("ROW a = 1| RENAME a AS this is `not okay`", "mismatched input 'is' expecting {, '|', ',', '.'}"); } - public void testSpaceNotAllowedInIdPatternKeep() throws Exception { + public void testSpaceNotAllowedInIdPatternKeep() { expectError("ROW a = 1, b = 1| KEEP a b", "extraneous input 'b'"); } @@ -2939,13 +2940,20 @@ public void testNamedFunctionArgumentWithUnsupportedNamedParameterTypes() { } } - public void testValidJoinPattern() { + public void testValidFromPattern() { var basePattern = randomIndexPatterns(); - var joinPattern = randomIndexPattern(without(WILDCARD_PATTERN)); + + var plan = statement("FROM " + basePattern); + + assertThat(as(plan, UnresolvedRelation.class).indexPattern().indexPattern(), equalTo(unquoteIndexPattern(basePattern))); + } + + public void testValidJoinPattern() { + var basePattern = randomIndexPatterns(without(CROSS_CLUSTER)); + var joinPattern = randomIndexPattern(without(WILDCARD_PATTERN), without(CROSS_CLUSTER)); var onField = randomIdentifier(); - var type = randomFrom("", "LOOKUP "); - var plan = statement("FROM " + basePattern + " | " + type + " JOIN " + joinPattern + " ON " + onField); + var plan = statement("FROM " + basePattern + " | LOOKUP JOIN " + joinPattern + " ON " + onField); var join = as(plan, LookupJoin.class); assertThat(as(join.left(), UnresolvedRelation.class).indexPattern().indexPattern(), equalTo(unquoteIndexPattern(basePattern))); @@ -2958,10 +2966,31 @@ public void testValidJoinPattern() { } public void testInvalidJoinPatterns() { - var joinPattern = randomIndexPattern(WILDCARD_PATTERN); - expectError( - "FROM " + randomIndexPatterns() + " | JOIN " + joinPattern + " ON " + randomIdentifier(), - "invalid index pattern [" + unquoteIndexPattern(joinPattern) + "], * is not allowed in LOOKUP JOIN" - ); + { + // wildcard + var joinPattern = randomIndexPattern(WILDCARD_PATTERN, without(CROSS_CLUSTER)); + expectError( + "FROM " + randomIndexPatterns() + " | LOOKUP JOIN " + joinPattern + " ON " + randomIdentifier(), + "invalid index pattern [" + unquoteIndexPattern(joinPattern) + "], * is not allowed in LOOKUP JOIN" + ); + } + { + // remote cluster on the right + var fromPatterns = randomIndexPatterns(without(CROSS_CLUSTER)); + var joinPattern = randomIndexPattern(CROSS_CLUSTER, without(WILDCARD_PATTERN)); + expectError( + "FROM " + fromPatterns + " | LOOKUP JOIN " + joinPattern + " ON " + randomIdentifier(), + "invalid index pattern [" + unquoteIndexPattern(joinPattern) + "], remote clusters are not supported in LOOKUP JOIN" + ); + } + { + // remote cluster on the left + var fromPatterns = randomIndexPatterns(CROSS_CLUSTER); + var joinPattern = randomIndexPattern(without(CROSS_CLUSTER), without(WILDCARD_PATTERN)); + expectError( + "FROM " + fromPatterns + " | LOOKUP JOIN " + joinPattern + " ON " + randomIdentifier(), + "invalid index pattern [" + unquoteIndexPattern(fromPatterns) + "], remote clusters are not supported in LOOKUP JOIN" + ); + } } } From a0bb46d95ffd6f776dafe292eff03121f547fda3 Mon Sep 17 00:00:00 2001 From: Dimitris Rempapis Date: Mon, 27 Jan 2025 12:11:44 +0200 Subject: [PATCH 047/383] Remove duplicate code in ESIntegTestCase (#120799) Remote duplicated code --- docs/changelog/120799.yaml | 5 +++++ .../main/java/org/elasticsearch/test/ESIntegTestCase.java | 4 ---- 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/120799.yaml diff --git a/docs/changelog/120799.yaml b/docs/changelog/120799.yaml new file mode 100644 index 0000000000000..b36b07a5d758a --- /dev/null +++ b/docs/changelog/120799.yaml @@ -0,0 +1,5 @@ +pr: 120799 +summary: Remove duplicate code in ESIntegTestCase +area: Search +type: bug +issues: [] diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index bb259cb9b9788..caa66e928827a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1265,10 +1265,6 @@ protected final void doEnsureClusterStateConsistency(NamedWriteableRegistry name masterClusterState.stateUUID(), localClusterState.stateUUID() ); - - // Compare the stateMaps for equality. - assertNull(XContentTestUtils.differenceBetweenMapsIgnoringArrayOrder(masterStateMap, localStateMap)); - // Compare JSON serialization assertNull( "cluster state JSON serialization does not match", From a0840a046335296751e481ef9d303187c1b395a7 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Mon, 27 Jan 2025 11:23:34 +0100 Subject: [PATCH 048/383] EQL: set allow_partial_search_results=true by default (#120267) --- docs/changelog/120267.yaml | 16 ++ docs/reference/eql/eql-search-api.asciidoc | 4 +- .../rest-api-spec/api/eql.search.json | 2 +- .../test/eql/BaseEqlSpecTestCase.java | 54 +++-- .../main/resources/test_failing_shards.toml | 49 ++++- .../xpack/eql/qa/mixed_node/EqlSearchIT.java | 4 +- .../rest-api-spec/test/eql/10_basic.yml | 27 ++- .../xpack/eql/action/CCSPartialResultsIT.java | 185 +++++++++--------- .../eql/action/PartialSearchResultsIT.java | 164 +++++++--------- .../xpack/eql/plugin/EqlPlugin.java | 2 +- 10 files changed, 274 insertions(+), 233 deletions(-) create mode 100644 docs/changelog/120267.yaml diff --git a/docs/changelog/120267.yaml b/docs/changelog/120267.yaml new file mode 100644 index 0000000000000..66a30717429c8 --- /dev/null +++ b/docs/changelog/120267.yaml @@ -0,0 +1,16 @@ +pr: 120267 +summary: Set allow_partial_search_results=true by default +area: EQL +type: breaking +issues: [] +breaking: + title: Set allow_partial_search_results=true by default + area: REST API + details: + Before this change, in case of shard failures, EQL queries always returned an error. + With this change, they will keep running and will return partial results. + impact: + EQL queries that would previously fail due to shard failures, will now succeed and return partial results. + The previous defaults can be restored by setting `xpack.eql.default_allow_partial_results` cluster setting to `false` + or setting with `allow_partial_search_results` to `false` in the query request. + notable: false diff --git a/docs/reference/eql/eql-search-api.asciidoc b/docs/reference/eql/eql-search-api.asciidoc index 544e4d7325c5b..e3203163aa818 100644 --- a/docs/reference/eql/eql-search-api.asciidoc +++ b/docs/reference/eql/eql-search-api.asciidoc @@ -102,10 +102,10 @@ If `false`, the request returns an error if one or more shards involved in the q If `true`, the query is executed only on the available shards, ignoring shard request timeouts and <>. + -Defaults to `false`. +Defaults to `true`. + To override the default for this field, set the -`xpack.eql.default_allow_partial_results` cluster setting to `true`. +`xpack.eql.default_allow_partial_results` cluster setting to `false`. [IMPORTANT] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/eql.search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/eql.search.json index 0f9af508f4c16..0b1a7ad5a38d3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/eql.search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/eql.search.json @@ -45,7 +45,7 @@ "allow_partial_search_results": { "type":"boolean", "description":"Control whether the query should keep running in case of shard failures, and return partial results", - "default":false + "default":true }, "allow_partial_sequence_results": { "type":"boolean", diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/BaseEqlSpecTestCase.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/BaseEqlSpecTestCase.java index 3557114e2f4c7..05ba6762ec3ad 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/BaseEqlSpecTestCase.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/BaseEqlSpecTestCase.java @@ -187,29 +187,29 @@ protected ObjectPath runQuery(String index, String query) throws Exception { builder.field("max_samples_per_key", maxSamplesPerKey); } boolean allowPartialResultsInBody = randomBoolean(); - if (allowPartialSearchResults != null) { - if (allowPartialResultsInBody) { + + if (allowPartialResultsInBody) { + if (allowPartialSearchResults != null) { builder.field("allow_partial_search_results", String.valueOf(allowPartialSearchResults)); - if (allowPartialSequenceResults != null) { - builder.field("allow_partial_sequence_results", String.valueOf(allowPartialSequenceResults)); - } - } else { - // these will be overwritten by the path params, that have higher priority than the query (JSON body) params - if (allowPartialSearchResults != null) { - builder.field("allow_partial_search_results", randomBoolean()); - } - if (allowPartialSequenceResults != null) { - builder.field("allow_partial_sequence_results", randomBoolean()); - } + } else if (randomBoolean()) { + builder.field("allow_partial_search_results", true); + } + if (allowPartialSequenceResults != null) { + builder.field("allow_partial_sequence_results", String.valueOf(allowPartialSequenceResults)); + } else if (randomBoolean()) { + builder.field("allow_partial_sequence_results", false); } } else { - // Tests that don't specify a setting for these parameters should always pass. - // These params should be irrelevant. - if (randomBoolean()) { + // these will be overwritten by the path params, that have higher priority than the query (JSON body) params + if (allowPartialSearchResults != null) { builder.field("allow_partial_search_results", randomBoolean()); + } else if (randomBoolean()) { + builder.field("allow_partial_search_results", true); } - if (randomBoolean()) { + if (allowPartialSequenceResults != null) { builder.field("allow_partial_sequence_results", randomBoolean()); + } else if (randomBoolean()) { + builder.field("allow_partial_sequence_results", false); } } builder.endObject(); @@ -219,23 +219,17 @@ protected ObjectPath runQuery(String index, String query) throws Exception { if (ccsMinimizeRoundtrips != null) { request.addParameter("ccs_minimize_roundtrips", ccsMinimizeRoundtrips.toString()); } - if (allowPartialSearchResults != null) { - if (allowPartialResultsInBody == false) { + if (allowPartialResultsInBody == false) { + if (allowPartialSearchResults != null) { request.addParameter("allow_partial_search_results", String.valueOf(allowPartialSearchResults)); - if (allowPartialSequenceResults != null) { - request.addParameter("allow_partial_sequence_results", String.valueOf(allowPartialSequenceResults)); - } + } else if (randomBoolean()) { + request.addParameter("allow_partial_search_results", String.valueOf(true)); } - } else { - // Tests that don't specify a setting for these parameters should always pass. - // These params should be irrelevant. - if (randomBoolean()) { - request.addParameter("allow_partial_search_results", String.valueOf(randomBoolean())); - } - if (randomBoolean()) { - request.addParameter("allow_partial_sequence_results", String.valueOf(randomBoolean())); + if (allowPartialSequenceResults != null) { + request.addParameter("allow_partial_sequence_results", String.valueOf(allowPartialSequenceResults)); } } + int timeout = Math.toIntExact(timeout().millis()); RequestConfig config = RequestConfig.copy(RequestConfig.DEFAULT) .setConnectionRequestTimeout(timeout) diff --git a/x-pack/plugin/eql/qa/common/src/main/resources/test_failing_shards.toml b/x-pack/plugin/eql/qa/common/src/main/resources/test_failing_shards.toml index a551c66fd48bd..dd0638a7e6e79 100644 --- a/x-pack/plugin/eql/qa/common/src/main/resources/test_failing_shards.toml +++ b/x-pack/plugin/eql/qa/common/src/main/resources/test_failing_shards.toml @@ -10,6 +10,13 @@ expect_shard_failures = false [[queries]] name = "eventQueryShardFailures" query = 'process where serial_event_id == 1 or broken == 1' +expected_event_ids = [1] +expect_shard_failures = true + + +[[queries]] +name = "eventQueryShardFailuresTrue" +query = 'process where serial_event_id == 1 or broken == 1' allow_partial_search_results = true expected_event_ids = [1] expect_shard_failures = true @@ -18,7 +25,6 @@ expect_shard_failures = true [[queries]] name = "eventQueryShardFailuresOptionalField" query = 'process where serial_event_id == 1 and ?optional_field_default_null == null or broken == 1' -allow_partial_search_results = true expected_event_ids = [1] expect_shard_failures = true @@ -26,7 +32,6 @@ expect_shard_failures = true [[queries]] name = "eventQueryShardFailuresOptionalFieldMatching" query = 'process where serial_event_id == 2 and ?subtype == "create" or broken == 1' -allow_partial_search_results = true expected_event_ids = [2] expect_shard_failures = true @@ -64,7 +69,6 @@ sequence [process where serial_event_id == 1] [process where serial_event_id == 2] ''' -allow_partial_search_results = true expected_event_ids = [1, 2] expect_shard_failures = false @@ -76,6 +80,17 @@ sequence [process where serial_event_id == 1 or broken == 1] [process where serial_event_id == 2] ''' +expected_event_ids = [] +expect_shard_failures = true + + +[[queries]] +name = "sequenceQueryMissingShardsTrue" +query = ''' +sequence + [process where serial_event_id == 1 or broken == 1] + [process where serial_event_id == 2] +''' allow_partial_search_results = true expected_event_ids = [] expect_shard_failures = true @@ -88,6 +103,18 @@ sequence [process where serial_event_id == 1 or broken == 1] [process where serial_event_id == 2] ''' +allow_partial_sequence_results = true +expected_event_ids = [1, 2] +expect_shard_failures = true + + +[[queries]] +name = "sequenceQueryMissingShardsPartialResultsTrue" +query = ''' +sequence + [process where serial_event_id == 1 or broken == 1] + [process where serial_event_id == 2] +''' allow_partial_search_results = true allow_partial_sequence_results = true expected_event_ids = [1, 2] @@ -101,7 +128,6 @@ sequence [process where ?serial_event_id == 1 or broken == 1] [process where serial_event_id == 2] ''' -allow_partial_search_results = true allow_partial_sequence_results = true expected_event_ids = [1, 2] expect_shard_failures = true @@ -114,7 +140,6 @@ sequence with maxspan=100000d [process where serial_event_id == 1 and ?subtype == "create" or broken == 1] [process where serial_event_id == 2] ''' -allow_partial_search_results = true allow_partial_sequence_results = true expected_event_ids = [1, 2] expect_shard_failures = true @@ -128,7 +153,6 @@ sequence with maxspan=100000d ![process where broken == 1] [process where serial_event_id == 2] ''' -allow_partial_search_results = true allow_partial_sequence_results = true expected_event_ids = [1, -1, 2] expect_shard_failures = true @@ -142,7 +166,6 @@ sequence with maxspan=100000d ![process where broken == 1] [process where serial_event_id == 2] ''' -allow_partial_search_results = true allow_partial_sequence_results = true expected_event_ids = [1, -1, 2] expect_shard_failures = true @@ -155,6 +178,17 @@ sample by event_subtype_full [process where serial_event_id == 1 or broken == 1] [process where serial_event_id == 2] ''' +expected_event_ids = [1, 2] +expect_shard_failures = true + + +[[queries]] +name = "sampleQueryMissingShardsPartialResultsTrue" +query = ''' +sample by event_subtype_full + [process where serial_event_id == 1 or broken == 1] + [process where serial_event_id == 2] +''' allow_partial_search_results = true expected_event_ids = [1, 2] expect_shard_failures = true @@ -167,7 +201,6 @@ sample by event_subtype_full [process where serial_event_id == 1 and ?subtype == "create" or broken == 1] [process where serial_event_id == 2] ''' -allow_partial_search_results = true expected_event_ids = [1, 2] expect_shard_failures = true diff --git a/x-pack/plugin/eql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/eql/qa/mixed_node/EqlSearchIT.java b/x-pack/plugin/eql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/eql/qa/mixed_node/EqlSearchIT.java index 60c7fb1c7ad25..9ad69a9a8eabe 100644 --- a/x-pack/plugin/eql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/eql/qa/mixed_node/EqlSearchIT.java +++ b/x-pack/plugin/eql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/eql/qa/mixed_node/EqlSearchIT.java @@ -410,10 +410,10 @@ private void assertMultiValueFunctionQuery( StringBuilder payload = new StringBuilder("{\"query\":\"" + query + "\""); if (randomBoolean()) { - payload.append(", \"allow_partial_search_results\": true"); + payload.append(", \"allow_partial_search_results\": " + randomBoolean()); } if (randomBoolean()) { - payload.append(", \"allow_partial_sequence_results\": true"); + payload.append(", \"allow_partial_sequence_results\": " + randomBoolean()); } payload.append("}"); request.setJsonEntity(payload.toString()); diff --git a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml index c7974f3b584b4..17ca924c009ac 100644 --- a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml +++ b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml @@ -509,6 +509,30 @@ setup: --- + +"Execute query shard failures": + - do: + eql.search: + index: eql_test* + body: + query: 'process where user == "SYSTEM" and day_of_week == "Monday"' + fields: [{"field":"@timestamp","format":"epoch_millis"},"id","valid","day_of_week"] + allow_partial_search_results: true + + - match: {timed_out: false} + - match: {hits.total.value: 1} + - match: {hits.total.relation: "eq"} + - match: {hits.events.0._source.user: "SYSTEM"} + - match: {hits.events.0._id: "1"} + - match: {hits.events.0.fields.@timestamp: ["1580733296000"]} + - match: {hits.events.0.fields.id: [123]} + - match: {hits.events.0.fields.valid: [false]} + - match: {hits.events.0.fields.day_of_week: ["Monday"]} + - match: {shard_failures.0.index: "eql_test_rebel"} + + +--- + "Execute query shard failures and with allow_partial_search_results": - do: eql.search: @@ -535,7 +559,6 @@ setup: - do: eql.search: index: eql_test* - allow_partial_search_results: true body: query: 'process where user == "SYSTEM" and day_of_week == "Monday"' fields: [{"field":"@timestamp","format":"epoch_millis"},"id","valid","day_of_week"] @@ -575,7 +598,6 @@ setup: body: query: 'sequence [process where user == "SYSTEM" and day_of_week == "Monday"] [process where user == "SYSTEM" and day_of_week == "Tuesday"]' fields: [{"field":"@timestamp","format":"epoch_millis"},"id","valid","day_of_week"] - allow_partial_search_results: true allow_partial_sequence_results: true - match: {timed_out: false} @@ -600,7 +622,6 @@ setup: - do: eql.search: index: eql_test* - allow_partial_search_results: true allow_partial_sequence_results: true body: query: 'sequence [process where user == "SYSTEM" and day_of_week == "Monday"] [process where user == "SYSTEM" and day_of_week == "Tuesday"]' diff --git a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/CCSPartialResultsIT.java b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/CCSPartialResultsIT.java index da6bb6180428b..6cf4bf54f1f23 100644 --- a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/CCSPartialResultsIT.java +++ b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/CCSPartialResultsIT.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; @@ -26,6 +27,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; public class CCSPartialResultsIT extends AbstractMultiClustersTestCase { @@ -222,9 +224,10 @@ public void testAllowPartialSearchAndSequence_event() throws ExecutionException, cluster(REMOTE_CLUSTER).stopNode(remoteNode); // event query - var request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("process where true") - .allowPartialSearchResults(true); + var request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*").query("process where true"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } var response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().events().size(), equalTo(5)); for (int i = 0; i < 5; i++) { @@ -244,8 +247,10 @@ public void testAllowPartialSearchAndSequence_sequence() throws ExecutionExcepti // sequence query on both shards var request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") .query("sequence [process where value == 1] [process where value == 2]") - .allowPartialSearchResults(true) .allowPartialSequenceResults(true); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } var response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -255,8 +260,10 @@ public void testAllowPartialSearchAndSequence_sequence() throws ExecutionExcepti // sequence query on the available shard only request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") .query("sequence [process where value == 1] [process where value == 3]") - .allowPartialSearchResults(true) .allowPartialSequenceResults(true); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(1)); var sequence = response.hits().sequences().get(0); @@ -269,8 +276,10 @@ public void testAllowPartialSearchAndSequence_sequence() throws ExecutionExcepti // sequence query on the unavailable shard only request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") .query("sequence [process where value == 0] [process where value == 2]") - .allowPartialSearchResults(true) .allowPartialSequenceResults(true); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -280,8 +289,10 @@ public void testAllowPartialSearchAndSequence_sequence() throws ExecutionExcepti // sequence query with missing event on unavailable shard. THIS IS A FALSE POSITIVE request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") .query("sequence with maxspan=10s [process where value == 1] ![process where value == 2] [process where value == 3]") - .allowPartialSearchResults(true) .allowPartialSequenceResults(true); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(1)); sequence = response.hits().sequences().get(0); @@ -303,9 +314,10 @@ public void testAllowPartialSearchAndSequence_sample() throws ExecutionException // sample query on both shards var request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sample by key [process where value == 2] [process where value == 1]") - .allowPartialSearchResults(true) - .allowPartialSequenceResults(true); + .query("sample by key [process where value == 2] [process where value == 1]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } var response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -314,9 +326,10 @@ public void testAllowPartialSearchAndSequence_sample() throws ExecutionException // sample query on the available shard only request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sample by key [process where value == 3] [process where value == 1]") - .allowPartialSearchResults(true) - .allowPartialSequenceResults(true); + .query("sample by key [process where value == 3] [process where value == 1]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(1)); var sample = response.hits().sequences().get(0); @@ -328,9 +341,10 @@ public void testAllowPartialSearchAndSequence_sample() throws ExecutionException // sample query on the unavailable shard only request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sample by key [process where value == 2] [process where value == 0]") - .allowPartialSearchResults(true) - .allowPartialSequenceResults(true); + .query("sample by key [process where value == 2] [process where value == 0]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -353,9 +367,10 @@ public void testAllowPartialSearch_event() throws ExecutionException, Interrupte cluster(REMOTE_CLUSTER).stopNode(remoteNode); // event query - var request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("process where true") - .allowPartialSearchResults(true); + var request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*").query("process where true"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } var response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().events().size(), equalTo(5)); for (int i = 0; i < 5; i++) { @@ -375,8 +390,10 @@ public void testAllowPartialSearch_sequence() throws ExecutionException, Interru // sequence query on both shards var request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sequence [process where value == 1] [process where value == 2]") - .allowPartialSearchResults(true); + .query("sequence [process where value == 1] [process where value == 2]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } var response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -385,8 +402,10 @@ public void testAllowPartialSearch_sequence() throws ExecutionException, Interru // sequence query on the available shard only request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sequence [process where value == 1] [process where value == 3]") - .allowPartialSearchResults(true); + .query("sequence [process where value == 1] [process where value == 3]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -395,8 +414,10 @@ public void testAllowPartialSearch_sequence() throws ExecutionException, Interru // sequence query on the unavailable shard only request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sequence [process where value == 0] [process where value == 2]") - .allowPartialSearchResults(true); + .query("sequence [process where value == 0] [process where value == 2]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -405,8 +426,10 @@ public void testAllowPartialSearch_sequence() throws ExecutionException, Interru // sequence query with missing event on unavailable shard. THIS IS A FALSE POSITIVE request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sequence with maxspan=10s [process where value == 1] ![process where value == 2] [process where value == 3]") - .allowPartialSearchResults(true); + .query("sequence with maxspan=10s [process where value == 1] ![process where value == 2] [process where value == 3]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -425,8 +448,10 @@ public void testAllowPartialSearch_sample() throws ExecutionException, Interrupt // sample query on both shards var request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sample by key [process where value == 2] [process where value == 1]") - .allowPartialSearchResults(true); + .query("sample by key [process where value == 2] [process where value == 1]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } var response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -435,8 +460,10 @@ public void testAllowPartialSearch_sample() throws ExecutionException, Interrupt // sample query on the available shard only request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sample by key [process where value == 3] [process where value == 1]") - .allowPartialSearchResults(true); + .query("sample by key [process where value == 3] [process where value == 1]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(1)); var sample = response.hits().sequences().get(0); @@ -448,8 +475,10 @@ public void testAllowPartialSearch_sample() throws ExecutionException, Interrupt // sample query on the unavailable shard only request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sample by key [process where value == 2] [process where value == 0]") - .allowPartialSearchResults(true); + .query("sample by key [process where value == 2] [process where value == 0]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -459,7 +488,7 @@ public void testAllowPartialSearch_sample() throws ExecutionException, Interrupt } // ------------------------------------------------------------------------ - // same queries, with missing shards and with default xpack.eql.default_allow_partial_results=true + // same queries, with missing shards and with default xpack.eql.default_allow_partial_results=false // ------------------------------------------------------------------------ public void testClusterSetting_event() throws ExecutionException, InterruptedException, IOException { @@ -474,19 +503,13 @@ public void testClusterSetting_event() throws ExecutionException, InterruptedExc .execute( ClusterUpdateSettingsAction.INSTANCE, new ClusterUpdateSettingsRequest(TimeValue.THIRTY_SECONDS, TimeValue.THIRTY_SECONDS).persistentSettings( - Settings.builder().put(EqlPlugin.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS.getKey(), true) + Settings.builder().put(EqlPlugin.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS.getKey(), false) ) ) .get(); // event query - var request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*").query("process where true"); - var response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().events().size(), equalTo(5)); - for (int i = 0; i < 5; i++) { - assertThat(response.hits().events().get(i).toString(), containsString("\"value\" : " + (i * 2 + 1))); - } - assertThat(response.shardFailures().length, is(1)); + shouldFailWithDefaults("process where true"); localClient().execute( ClusterUpdateSettingsAction.INSTANCE, @@ -508,45 +531,23 @@ public void testClusterSetting_sequence() throws ExecutionException, Interrupted .execute( ClusterUpdateSettingsAction.INSTANCE, new ClusterUpdateSettingsRequest(TimeValue.THIRTY_SECONDS, TimeValue.THIRTY_SECONDS).persistentSettings( - Settings.builder().put(EqlPlugin.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS.getKey(), true) + Settings.builder().put(EqlPlugin.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS.getKey(), false) ) ) .get(); // sequence query on both shards - var request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sequence [process where value == 1] [process where value == 2]"); - var response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().sequences().size(), equalTo(0)); - assertThat(response.shardFailures().length, is(1)); - assertThat(response.shardFailures()[0].index(), is("test-1-remote")); - assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); + shouldFailWithDefaults("sequence [process where value == 1] [process where value == 2]"); // sequence query on the available shard only - request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sequence [process where value == 1] [process where value == 3]"); - response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().sequences().size(), equalTo(0)); - assertThat(response.shardFailures().length, is(1)); - assertThat(response.shardFailures()[0].index(), is("test-1-remote")); - assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); + shouldFailWithDefaults("sequence [process where value == 1] [process where value == 3]"); // sequence query on the unavailable shard only - request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sequence [process where value == 0] [process where value == 2]"); - response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().sequences().size(), equalTo(0)); - assertThat(response.shardFailures().length, is(1)); - assertThat(response.shardFailures()[0].index(), is("test-1-remote")); - assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); + shouldFailWithDefaults("sequence [process where value == 0] [process where value == 2]"); // sequence query with missing event on unavailable shard. THIS IS A FALSE POSITIVE - request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sequence with maxspan=10s [process where value == 1] ![process where value == 2] [process where value == 3]"); - response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().sequences().size(), equalTo(0)); - assertThat(response.shardFailures().length, is(1)); - assertThat(response.shardFailures()[0].index(), is("test-1-remote")); - assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); + shouldFailWithDefaults( + "sequence with maxspan=10s [process where value == 1] ![process where value == 2] [process where value == 3]" + ); localClient().execute( ClusterUpdateSettingsAction.INSTANCE, @@ -568,40 +569,19 @@ public void testClusterSetting_sample() throws ExecutionException, InterruptedEx .execute( ClusterUpdateSettingsAction.INSTANCE, new ClusterUpdateSettingsRequest(TimeValue.THIRTY_SECONDS, TimeValue.THIRTY_SECONDS).persistentSettings( - Settings.builder().put(EqlPlugin.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS.getKey(), true) + Settings.builder().put(EqlPlugin.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS.getKey(), false) ) ) .get(); // sample query on both shards - var request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sample by key [process where value == 2] [process where value == 1]"); - var response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().sequences().size(), equalTo(0)); - assertThat(response.shardFailures().length, is(1)); - assertThat(response.shardFailures()[0].index(), is("test-1-remote")); - assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); + shouldFailWithDefaults("sample by key [process where value == 2] [process where value == 1]"); // sample query on the available shard only - request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sample by key [process where value == 3] [process where value == 1]"); - response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().sequences().size(), equalTo(1)); - var sample = response.hits().sequences().get(0); - assertThat(sample.events().get(0).toString(), containsString("\"value\" : 3")); - assertThat(sample.events().get(1).toString(), containsString("\"value\" : 1")); - assertThat(response.shardFailures().length, is(1)); - assertThat(response.shardFailures()[0].index(), is("test-1-remote")); - assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); + shouldFailWithDefaults("sample by key [process where value == 3] [process where value == 1]"); // sample query on the unavailable shard only - request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*") - .query("sample by key [process where value == 2] [process where value == 0]"); - response = localClient().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().sequences().size(), equalTo(0)); - assertThat(response.shardFailures().length, is(1)); - assertThat(response.shardFailures()[0].index(), is("test-1-remote")); - assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); + shouldFailWithDefaults("sample by key [process where value == 2] [process where value == 0]"); localClient().execute( ClusterUpdateSettingsAction.INSTANCE, @@ -610,4 +590,17 @@ public void testClusterSetting_sample() throws ExecutionException, InterruptedEx ) ).get(); } + + private void shouldFailWithDefaults(String query) throws InterruptedException { + EqlSearchRequest request = new EqlSearchRequest().indices(REMOTE_CLUSTER + ":test-*").query(query); + if (randomBoolean()) { + request = request.allowPartialSequenceResults(randomBoolean()); + } + try { + localClient().execute(EqlSearchAction.INSTANCE, request).get(); + fail(); + } catch (ExecutionException e) { + assertThat(e.getCause().getCause(), instanceOf(SearchPhaseExecutionException.class)); + } + } } diff --git a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/PartialSearchResultsIT.java b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/PartialSearchResultsIT.java index 9048d11f4eddf..712695fa1b9ce 100644 --- a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/PartialSearchResultsIT.java +++ b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/PartialSearchResultsIT.java @@ -274,7 +274,10 @@ public void testAllowPartialSearchAndSequenceResults_event() throws Exception { internalCluster().stopNode(assignedNodeForIndex1); // event query - var request = new EqlSearchRequest().indices("test-*").query("process where true").allowPartialSearchResults(true); + var request = new EqlSearchRequest().indices("test-*").query("process where true"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } var response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().events().size(), equalTo(5)); for (int i = 0; i < 5; i++) { @@ -295,8 +298,11 @@ public void testAllowPartialSearchAndSequenceResults_sequence() throws Exception // sequence query on both shards var request = new EqlSearchRequest().indices("test-*") .query("sequence [process where value == 1] [process where value == 2]") - .allowPartialSearchResults(true) .allowPartialSequenceResults(true); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } + var response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -306,8 +312,10 @@ public void testAllowPartialSearchAndSequenceResults_sequence() throws Exception // sequence query on the available shard only request = new EqlSearchRequest().indices("test-*") .query("sequence [process where value == 1] [process where value == 3]") - .allowPartialSearchResults(true) .allowPartialSequenceResults(true); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(1)); var sequence = response.hits().sequences().get(0); @@ -320,8 +328,10 @@ public void testAllowPartialSearchAndSequenceResults_sequence() throws Exception // sequence query on the unavailable shard only request = new EqlSearchRequest().indices("test-*") .query("sequence [process where value == 0] [process where value == 2]") - .allowPartialSearchResults(true) .allowPartialSequenceResults(true); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -331,8 +341,10 @@ public void testAllowPartialSearchAndSequenceResults_sequence() throws Exception // sequence query with missing event on unavailable shard. THIS IS A FALSE POSITIVE request = new EqlSearchRequest().indices("test-*") .query("sequence with maxspan=10s [process where value == 1] ![process where value == 2] [process where value == 3]") - .allowPartialSearchResults(true) .allowPartialSequenceResults(true); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(1)); sequence = response.hits().sequences().get(0); @@ -355,8 +367,10 @@ public void testAllowPartialSearchAndSequenceResults_sample() throws Exception { // sample query on both shards var request = new EqlSearchRequest().indices("test-*") .query("sample by key [process where value == 2] [process where value == 1]") - .allowPartialSearchResults(true) .allowPartialSequenceResults(true); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } var response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -366,8 +380,10 @@ public void testAllowPartialSearchAndSequenceResults_sample() throws Exception { // sample query on the available shard only request = new EqlSearchRequest().indices("test-*") .query("sample by key [process where value == 3] [process where value == 1]") - .allowPartialSearchResults(true) .allowPartialSequenceResults(true); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(1)); var sample = response.hits().sequences().get(0); @@ -380,8 +396,10 @@ public void testAllowPartialSearchAndSequenceResults_sample() throws Exception { // sample query on the unavailable shard only request = new EqlSearchRequest().indices("test-*") .query("sample by key [process where value == 2] [process where value == 0]") - .allowPartialSearchResults(true) .allowPartialSequenceResults(true); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -404,7 +422,10 @@ public void testAllowPartialSearchResults_event() throws Exception { internalCluster().stopNode(assignedNodeForIndex1); // event query - var request = new EqlSearchRequest().indices("test-*").query("process where true").allowPartialSearchResults(true); + var request = new EqlSearchRequest().indices("test-*").query("process where true"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } var response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().events().size(), equalTo(5)); for (int i = 0; i < 5; i++) { @@ -423,9 +444,10 @@ public void testAllowPartialSearchResults_sequence() throws Exception { internalCluster().stopNode(assignedNodeForIndex1); // sequence query on both shards - var request = new EqlSearchRequest().indices("test-*") - .query("sequence [process where value == 1] [process where value == 2]") - .allowPartialSearchResults(true); + var request = new EqlSearchRequest().indices("test-*").query("sequence [process where value == 1] [process where value == 2]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } var response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -433,9 +455,10 @@ public void testAllowPartialSearchResults_sequence() throws Exception { assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); // sequence query on the available shard only - request = new EqlSearchRequest().indices("test-*") - .query("sequence [process where value == 1] [process where value == 3]") - .allowPartialSearchResults(true); + request = new EqlSearchRequest().indices("test-*").query("sequence [process where value == 1] [process where value == 3]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -443,9 +466,10 @@ public void testAllowPartialSearchResults_sequence() throws Exception { assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); // sequence query on the unavailable shard only - request = new EqlSearchRequest().indices("test-*") - .query("sequence [process where value == 0] [process where value == 2]") - .allowPartialSearchResults(true); + request = new EqlSearchRequest().indices("test-*").query("sequence [process where value == 0] [process where value == 2]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -454,8 +478,10 @@ public void testAllowPartialSearchResults_sequence() throws Exception { // sequence query with missing event on unavailable shard. THIS IS A FALSE POSITIVE request = new EqlSearchRequest().indices("test-*") - .query("sequence with maxspan=10s [process where value == 1] ![process where value == 2] [process where value == 3]") - .allowPartialSearchResults(true); + .query("sequence with maxspan=10s [process where value == 1] ![process where value == 2] [process where value == 3]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -473,9 +499,10 @@ public void testAllowPartialSearchResults_sample() throws Exception { internalCluster().stopNode(assignedNodeForIndex1); // sample query on both shards - var request = new EqlSearchRequest().indices("test-*") - .query("sample by key [process where value == 2] [process where value == 1]") - .allowPartialSearchResults(true); + var request = new EqlSearchRequest().indices("test-*").query("sample by key [process where value == 2] [process where value == 1]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } var response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -483,9 +510,10 @@ public void testAllowPartialSearchResults_sample() throws Exception { assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); // sample query on the available shard only - request = new EqlSearchRequest().indices("test-*") - .query("sample by key [process where value == 3] [process where value == 1]") - .allowPartialSearchResults(true); + request = new EqlSearchRequest().indices("test-*").query("sample by key [process where value == 3] [process where value == 1]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(1)); var sample = response.hits().sequences().get(0); @@ -496,9 +524,10 @@ public void testAllowPartialSearchResults_sample() throws Exception { assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); // sample query on the unavailable shard only - request = new EqlSearchRequest().indices("test-*") - .query("sample by key [process where value == 2] [process where value == 0]") - .allowPartialSearchResults(true); + request = new EqlSearchRequest().indices("test-*").query("sample by key [process where value == 2] [process where value == 0]"); + if (randomBoolean()) { + request = request.allowPartialSearchResults(true); + } response = client().execute(EqlSearchAction.INSTANCE, request).get(); assertThat(response.hits().sequences().size(), equalTo(0)); assertThat(response.shardFailures().length, is(1)); @@ -605,7 +634,7 @@ public void testAsyncAllowPartialSearchResults_sample() throws Exception { } // ------------------------------------------------------------------------ - // same queries, with missing shards and with default xpack.eql.default_allow_partial_results=true + // same queries, with missing shards and with default xpack.eql.default_allow_partial_results=false // ------------------------------------------------------------------------ public void testClusterSetting_event() throws Exception { @@ -619,18 +648,12 @@ public void testClusterSetting_event() throws Exception { client().execute( ClusterUpdateSettingsAction.INSTANCE, new ClusterUpdateSettingsRequest(TimeValue.THIRTY_SECONDS, TimeValue.THIRTY_SECONDS).persistentSettings( - Settings.builder().put(EqlPlugin.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS.getKey(), true) + Settings.builder().put(EqlPlugin.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS.getKey(), false) ) ).get(); // event query - var request = new EqlSearchRequest().indices("test-*").query("process where true"); - var response = client().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().events().size(), equalTo(5)); - for (int i = 0; i < 5; i++) { - assertThat(response.hits().events().get(i).toString(), containsString("\"value\" : " + (i * 2 + 1))); - } - assertThat(response.shardFailures().length, is(1)); + shouldFail("process where true"); client().execute( ClusterUpdateSettingsAction.INSTANCE, @@ -651,41 +674,20 @@ public void testClusterSetting_sequence() throws Exception { client().execute( ClusterUpdateSettingsAction.INSTANCE, new ClusterUpdateSettingsRequest(TimeValue.THIRTY_SECONDS, TimeValue.THIRTY_SECONDS).persistentSettings( - Settings.builder().put(EqlPlugin.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS.getKey(), true) + Settings.builder().put(EqlPlugin.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS.getKey(), false) ) ).get(); // sequence query on both shards - var request = new EqlSearchRequest().indices("test-*").query("sequence [process where value == 1] [process where value == 2]"); - var response = client().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().sequences().size(), equalTo(0)); - assertThat(response.shardFailures().length, is(1)); - assertThat(response.shardFailures()[0].index(), is("test-1")); - assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); + shouldFail("sequence [process where value == 1] [process where value == 2]"); // sequence query on the available shard only - request = new EqlSearchRequest().indices("test-*").query("sequence [process where value == 1] [process where value == 3]"); - response = client().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().sequences().size(), equalTo(0)); - assertThat(response.shardFailures().length, is(1)); - assertThat(response.shardFailures()[0].index(), is("test-1")); - assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); + shouldFail("sequence [process where value == 1] [process where value == 3]"); // sequence query on the unavailable shard only - request = new EqlSearchRequest().indices("test-*").query("sequence [process where value == 0] [process where value == 2]"); - response = client().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().sequences().size(), equalTo(0)); - assertThat(response.shardFailures().length, is(1)); - assertThat(response.shardFailures()[0].index(), is("test-1")); - assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); + shouldFail("sequence [process where value == 0] [process where value == 2]"); // sequence query with missing event on unavailable shard. THIS IS A FALSE POSITIVE - request = new EqlSearchRequest().indices("test-*") - .query("sequence with maxspan=10s [process where value == 1] ![process where value == 2] [process where value == 3]"); - response = client().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().sequences().size(), equalTo(0)); - assertThat(response.shardFailures().length, is(1)); - assertThat(response.shardFailures()[0].index(), is("test-1")); - assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); + shouldFail("sequence with maxspan=10s [process where value == 1] ![process where value == 2] [process where value == 3]"); client().execute( ClusterUpdateSettingsAction.INSTANCE, @@ -706,36 +708,18 @@ public void testClusterSetting_sample() throws Exception { client().execute( ClusterUpdateSettingsAction.INSTANCE, new ClusterUpdateSettingsRequest(TimeValue.THIRTY_SECONDS, TimeValue.THIRTY_SECONDS).persistentSettings( - Settings.builder().put(EqlPlugin.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS.getKey(), true) + Settings.builder().put(EqlPlugin.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS.getKey(), false) ) ).get(); // sample query on both shards - var request = new EqlSearchRequest().indices("test-*").query("sample by key [process where value == 2] [process where value == 1]"); - var response = client().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().sequences().size(), equalTo(0)); - assertThat(response.shardFailures().length, is(1)); - assertThat(response.shardFailures()[0].index(), is("test-1")); - assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); + shouldFail("sample by key [process where value == 2] [process where value == 1]"); // sample query on the available shard only - request = new EqlSearchRequest().indices("test-*").query("sample by key [process where value == 3] [process where value == 1]"); - response = client().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().sequences().size(), equalTo(1)); - var sample = response.hits().sequences().get(0); - assertThat(sample.events().get(0).toString(), containsString("\"value\" : 3")); - assertThat(sample.events().get(1).toString(), containsString("\"value\" : 1")); - assertThat(response.shardFailures().length, is(1)); - assertThat(response.shardFailures()[0].index(), is("test-1")); - assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); + shouldFail("sample by key [process where value == 3] [process where value == 1]"); // sample query on the unavailable shard only - request = new EqlSearchRequest().indices("test-*").query("sample by key [process where value == 2] [process where value == 0]"); - response = client().execute(EqlSearchAction.INSTANCE, request).get(); - assertThat(response.hits().sequences().size(), equalTo(0)); - assertThat(response.shardFailures().length, is(1)); - assertThat(response.shardFailures()[0].index(), is("test-1")); - assertThat(response.shardFailures()[0].reason(), containsString("NoShardAvailableActionException")); + shouldFail("sample by key [process where value == 2] [process where value == 0]"); client().execute( ClusterUpdateSettingsAction.INSTANCE, @@ -751,7 +735,9 @@ private static EqlSearchResponse runAsync(String query, Boolean allowPartialSear EqlSearchResponse response; request = new EqlSearchRequest().indices("test-*").query(query).waitForCompletionTimeout(TimeValue.ZERO); if (allowPartialSearchResults != null) { - request = request.allowPartialSearchResults(allowPartialSearchResults); + if (allowPartialSearchResults == false || randomBoolean()) request = request.allowPartialSearchResults( + allowPartialSearchResults + ); } response = client().execute(EqlSearchAction.INSTANCE, request).get(); while (response.isRunning()) { @@ -764,9 +750,7 @@ private static EqlSearchResponse runAsync(String query, Boolean allowPartialSear private static void shouldFail(String query) throws InterruptedException { EqlSearchRequest request = new EqlSearchRequest().indices("test-*").query(query); - if (randomBoolean()) { - request = request.allowPartialSearchResults(false); - } + request = request.allowPartialSearchResults(false); if (randomBoolean()) { request = request.allowPartialSequenceResults(randomBoolean()); } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java index 210f88c991539..62f4110f9f457 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java @@ -62,7 +62,7 @@ public class EqlPlugin extends Plugin implements ActionPlugin, CircuitBreakerPlu public static final Setting DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS = Setting.boolSetting( "xpack.eql.default_allow_partial_results", - false, + true, Setting.Property.NodeScope, Setting.Property.Dynamic ); From 4821552a95d8e0582077dc238d6aa412bcec2a8f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 27 Jan 2025 21:32:22 +1100 Subject: [PATCH 049/383] Mute org.elasticsearch.xpack.esql.ccq.RequestIndexFilteringIT testIndicesDontExist #120889 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 01d935b7340b2..9310f6a171281 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -245,6 +245,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120810 - class: org.elasticsearch.indices.mapping.UpdateMappingIntegrationIT issue: https://github.com/elastic/elasticsearch/issues/116126 +- class: org.elasticsearch.xpack.esql.ccq.RequestIndexFilteringIT + method: testIndicesDontExist + issue: https://github.com/elastic/elasticsearch/issues/120889 # Examples: # From 516542a8479a38ebc83e9d91d80c4df597de697e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 27 Jan 2025 21:46:24 +1100 Subject: [PATCH 050/383] Mute org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT test {p0=data_stream/140_data_stream_aliases/Create data stream aliases using wildcard expression} #120890 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 9310f6a171281..ade5a6207d981 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -248,6 +248,9 @@ tests: - class: org.elasticsearch.xpack.esql.ccq.RequestIndexFilteringIT method: testIndicesDontExist issue: https://github.com/elastic/elasticsearch/issues/120889 +- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT + method: test {p0=data_stream/140_data_stream_aliases/Create data stream aliases using wildcard expression} + issue: https://github.com/elastic/elasticsearch/issues/120890 # Examples: # From 49275d51ba3bb2aa9a920737263906777e019c30 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 27 Jan 2025 22:29:19 +1100 Subject: [PATCH 051/383] Mute org.elasticsearch.xpack.test.rest.XPackRestIT org.elasticsearch.xpack.test.rest.XPackRestIT #120816 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index ade5a6207d981..e403da00ddd6b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -251,6 +251,8 @@ tests: - class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT method: test {p0=data_stream/140_data_stream_aliases/Create data stream aliases using wildcard expression} issue: https://github.com/elastic/elasticsearch/issues/120890 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + issue: https://github.com/elastic/elasticsearch/issues/120816 # Examples: # From 5f9168fc394a448430a2a06219be58c182f8ca55 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Mon, 27 Jan 2025 14:14:24 +0100 Subject: [PATCH 052/383] Unmute LearningToRankExplainIT since it is fixed by #120809 (#120876) --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index e403da00ddd6b..2c0adf048038d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -237,9 +237,6 @@ tests: - class: org.elasticsearch.action.search.SearchProgressActionListenerIT method: testSearchProgressWithHits issue: https://github.com/elastic/elasticsearch/issues/120671 -- class: org.elasticsearch.xpack.ml.integration.LearningToRankExplainIT - method: testLtrExplainWithMultipleShardsAndReplicas - issue: https://github.com/elastic/elasticsearch/issues/120805 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/3rd_party_deployment/Test start deployment fails while model download in progress} issue: https://github.com/elastic/elasticsearch/issues/120810 From 9d86341f7b3537382cbaf18474fd6662bc3c38d9 Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Mon, 27 Jan 2025 08:41:13 -0500 Subject: [PATCH 053/383] Improve error messages to help debug flaky tests (#120838) Recent test failure in CrossClusterEsqlRCS2EnrichUnavailableRemotesIT is undecipherable because of the structure of these qa tests, so adding more context to the error message to help debug them if/when they next fail. --- ...terEsqlRCS1EnrichUnavailableRemotesIT.java | 72 +++++++++-------- ...terEsqlRCS2EnrichUnavailableRemotesIT.java | 81 +++++-------------- .../RemoteClusterSecurityEsqlIT.java | 4 +- 3 files changed, 65 insertions(+), 92 deletions(-) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1EnrichUnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1EnrichUnavailableRemotesIT.java index 0ceffa984a979..c1cecff28789e 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1EnrichUnavailableRemotesIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1EnrichUnavailableRemotesIT.java @@ -23,6 +23,7 @@ import java.util.ArrayList; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; @@ -108,32 +109,34 @@ private void esqlEnrichWithRandomSkipUnavailable() throws Exception { Map localClusterDetails = (Map) clusterDetails.get("(local)"); Map remoteClusterDetails = (Map) clusterDetails.get("my_remote_cluster"); + Function info = (msg) -> "test: esqlEnrichWithRandomSkipUnavailable: " + msg; + assertOK(response); - assertThat((int) map.get("took"), greaterThan(0)); - assertThat(values.size(), is(6)); + assertThat(info.apply("overall took"), (int) map.get("took"), greaterThan(0)); + assertThat(info.apply("overall num values"), values.size(), is(6)); for (int i = 0; i < 6; i++) { ArrayList value = (ArrayList) values.get(i); // Size is 3: ID, Email, Designation. - assertThat(value.size(), is(3)); + assertThat(info.apply("should be id, email, designation, so size 3"), value.size(), is(3)); // Email - assertThat((String) value.get(0), endsWith("@corp.co")); + assertThat(info.apply("email was: " + value.get(0)), (String) value.get(0), endsWith("@corp.co")); // ID - assertThat(value.get(1), is(i + 1)); + assertThat(info.apply("id"), value.get(1), is(i + 1)); } - assertThat((int) clusters.get("total"), is(2)); - assertThat((int) clusters.get("successful"), is(2)); - assertThat((int) clusters.get("running"), is(0)); - assertThat((int) clusters.get("skipped"), is(0)); - assertThat((int) clusters.get("partial"), is(0)); - assertThat((int) clusters.get("failed"), is(0)); + assertThat(info.apply("total clusters"), (int) clusters.get("total"), is(2)); + assertThat(info.apply("successful clusters"), (int) clusters.get("successful"), is(2)); + assertThat(info.apply("running clusters"), (int) clusters.get("running"), is(0)); + assertThat(info.apply("skipped clusters"), (int) clusters.get("skipped"), is(0)); + assertThat(info.apply("partial clusters"), (int) clusters.get("partial"), is(0)); + assertThat(info.apply("failed clusters"), (int) clusters.get("failed"), is(0)); assertThat(clusterDetails.size(), is(2)); - assertThat((int) localClusterDetails.get("took"), greaterThan(0)); - assertThat(localClusterDetails.get("status"), is("successful")); + assertThat(info.apply("local cluster took"), (int) localClusterDetails.get("took"), greaterThan(0)); + assertThat(info.apply("local cluster status"), localClusterDetails.get("status"), is("successful")); - assertThat((int) remoteClusterDetails.get("took"), greaterThan(0)); - assertThat(remoteClusterDetails.get("status"), is("successful")); + assertThat(info.apply("remote cluster took"), (int) remoteClusterDetails.get("took"), greaterThan(0)); + assertThat(info.apply("remote cluster status"), remoteClusterDetails.get("status"), is("successful")); } @SuppressWarnings("unchecked") @@ -153,44 +156,48 @@ private void esqlEnrichWithSkipUnavailableTrue() throws Exception { Map localClusterDetails = (Map) clusterDetails.get("(local)"); Map remoteClusterDetails = (Map) clusterDetails.get("my_remote_cluster"); + Function info = (msg) -> "test: esqlEnrichWithSkipUnavailableTrue: " + msg; + assertOK(response); - assertThat((int) map.get("took"), greaterThan(0)); - assertThat(values.size(), is(3)); + assertThat(info.apply("overall took"), (int) map.get("took"), greaterThan(0)); + assertThat(info.apply("overall values.size"), values.size(), is(3)); // We only have 3 values since the remote cluster is turned off. for (int i = 0; i < 3; i++) { ArrayList value = (ArrayList) values.get(i); // Size is 3: ID, Email, Designation. - assertThat(value.size(), is(3)); + assertThat(info.apply("should be id, email, designation but had size: "), value.size(), is(3)); // Email - assertThat((String) value.get(0), endsWith("@corp.co")); + assertThat(info.apply("email was: " + value.get(0)), (String) value.get(0), endsWith("@corp.co")); // ID - assertThat(value.get(1), is(i + 1)); + assertThat(info.apply("id"), value.get(1), is(i + 1)); } - assertThat((int) clusters.get("total"), is(2)); - assertThat((int) clusters.get("successful"), is(1)); - assertThat((int) clusters.get("running"), is(0)); - assertThat((int) clusters.get("skipped"), is(1)); - assertThat((int) clusters.get("partial"), is(0)); - assertThat((int) clusters.get("failed"), is(0)); + assertThat(info.apply("total clusters"), (int) clusters.get("total"), is(2)); + assertThat(info.apply("successful clusters"), (int) clusters.get("successful"), is(1)); + assertThat(info.apply("running clusters"), (int) clusters.get("running"), is(0)); + assertThat(info.apply("skipped clusters"), (int) clusters.get("skipped"), is(1)); + assertThat(info.apply("partial clusters"), (int) clusters.get("partial"), is(0)); + assertThat(info.apply("failed clusters"), (int) clusters.get("failed"), is(0)); - assertThat(clusterDetails.size(), is(2)); - assertThat((int) localClusterDetails.get("took"), greaterThan(0)); - assertThat(localClusterDetails.get("status"), is("successful")); + assertThat(info.apply("cluster details size"), clusterDetails.size(), is(2)); + assertThat(info.apply("local cluster took"), (int) localClusterDetails.get("took"), greaterThan(0)); + assertThat(info.apply("local cluster status"), localClusterDetails.get("status"), is("successful")); - assertThat((int) remoteClusterDetails.get("took"), greaterThan(0)); - assertThat(remoteClusterDetails.get("status"), is("skipped")); + assertThat(info.apply("remote cluster took"), (int) remoteClusterDetails.get("took"), greaterThan(0)); + assertThat(info.apply("remote cluster status"), remoteClusterDetails.get("status"), is("skipped")); ArrayList remoteClusterFailures = (ArrayList) remoteClusterDetails.get("failures"); - assertThat(remoteClusterFailures.size(), equalTo(1)); + assertThat(info.apply("remote cluster failure count"), remoteClusterFailures.size(), equalTo(1)); Map failuresMap = (Map) remoteClusterFailures.get(0); Map reason = (Map) failuresMap.get("reason"); assertThat( + info.apply("unexpected failure reason: " + reason), reason.get("type").toString(), oneOf("node_disconnected_exception", "connect_transport_exception", "node_not_connected_exception") ); + } finally { fulfillingCluster.start(); closeFulfillingClusterClient(); @@ -208,6 +215,7 @@ private void esqlEnrichWithSkipUnavailableFalse() throws Exception { ResponseException ex = expectThrows(ResponseException.class, () -> client().performRequest(esqlRequest(query))); assertThat( + "esqlEnrichWithSkipUnavailableFalse failure", ex.getMessage(), anyOf( containsString("connect_transport_exception"), diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2EnrichUnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2EnrichUnavailableRemotesIT.java index 075ea86c22e98..fdc9f29620c8c 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2EnrichUnavailableRemotesIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2EnrichUnavailableRemotesIT.java @@ -25,6 +25,7 @@ import java.util.ArrayList; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; @@ -110,56 +111,16 @@ public void setupPreRequisites() throws IOException { } public void testEsqlEnrichWithSkipUnavailable() throws Exception { - esqlEnrichWithRandomSkipUnavailable(); esqlEnrichWithSkipUnavailableTrue(); esqlEnrichWithSkipUnavailableFalse(); } - private void esqlEnrichWithRandomSkipUnavailable() throws Exception { - configureRemoteCluster("my_remote_cluster", fulfillingCluster, false, randomBoolean(), randomBoolean()); - - String query = "FROM to-be-enr*,my_remote_cluster:to-be-enr* | ENRICH " + randomFrom(modes) + ":employees-policy | LIMIT 10"; - Response response = performRequestWithRemoteSearchUser(esqlRequest(query)); - - Map map = responseAsMap(response); - ArrayList values = (ArrayList) map.get("values"); - Map clusters = (Map) map.get("_clusters"); - Map clusterDetails = (Map) clusters.get("details"); - Map localClusterDetails = (Map) clusterDetails.get("(local)"); - Map remoteClusterDetails = (Map) clusterDetails.get("my_remote_cluster"); - - assertOK(response); - assertThat((int) map.get("took"), greaterThan(0)); - assertThat(values.size(), is(6)); - for (int i = 0; i < 6; i++) { - ArrayList value = (ArrayList) values.get(i); - // Size is 3: ID, Email, Designation. - assertThat(value.size(), is(3)); - // Email - assertThat((String) value.get(0), endsWith("@corp.co")); - // ID - assertThat(value.get(1), is(i + 1)); - } - - assertThat((int) clusters.get("total"), is(2)); - assertThat((int) clusters.get("successful"), is(2)); - assertThat((int) clusters.get("running"), is(0)); - assertThat((int) clusters.get("skipped"), is(0)); - assertThat((int) clusters.get("partial"), is(0)); - assertThat((int) clusters.get("failed"), is(0)); - - assertThat(clusterDetails.size(), is(2)); - assertThat((int) localClusterDetails.get("took"), greaterThan(0)); - assertThat(localClusterDetails.get("status"), is("successful")); - - assertThat((int) remoteClusterDetails.get("took"), greaterThan(0)); - assertThat(remoteClusterDetails.get("status"), is("successful")); - } - @SuppressWarnings("unchecked") private void esqlEnrichWithSkipUnavailableTrue() throws Exception { configureRemoteCluster("my_remote_cluster", fulfillingCluster, false, randomBoolean(), true); + Function info = (msg) -> "test: esqlEnrichWithSkipUnavailableTrue: " + msg; + try { fulfillingCluster.stop(true); @@ -174,40 +135,41 @@ private void esqlEnrichWithSkipUnavailableTrue() throws Exception { Map remoteClusterDetails = (Map) clusterDetails.get("my_remote_cluster"); assertOK(response); - assertThat((int) map.get("took"), greaterThan(0)); - assertThat(values.size(), is(3)); + assertThat(info.apply("overall took"), (int) map.get("took"), greaterThan(0)); + assertThat(info.apply("overall values.size"), values.size(), is(3)); // We only have 3 values since the remote cluster is turned off. for (int i = 0; i < 3; i++) { ArrayList value = (ArrayList) values.get(i); // Size is 3: ID, Email, Designation. - assertThat(value.size(), is(3)); + assertThat(info.apply("should be id, email, designation: "), value.size(), is(3)); // Email - assertThat((String) value.get(0), endsWith("@corp.co")); + assertThat(info.apply("email was: " + value.get(0)), (String) value.get(0), endsWith("@corp.co")); // ID - assertThat(value.get(1), is(i + 1)); + assertThat(info.apply("id"), value.get(1), is(i + 1)); } - assertThat((int) clusters.get("total"), is(2)); - assertThat((int) clusters.get("successful"), is(1)); - assertThat((int) clusters.get("running"), is(0)); - assertThat((int) clusters.get("skipped"), is(1)); - assertThat((int) clusters.get("partial"), is(0)); - assertThat((int) clusters.get("failed"), is(0)); + assertThat(info.apply("total clusters"), (int) clusters.get("total"), is(2)); + assertThat(info.apply("successful clusters"), (int) clusters.get("successful"), is(1)); + assertThat(info.apply("running clusters"), (int) clusters.get("running"), is(0)); + assertThat(info.apply("skipped clusters"), (int) clusters.get("skipped"), is(1)); + assertThat(info.apply("partial clusters"), (int) clusters.get("partial"), is(0)); + assertThat(info.apply("failed clusters"), (int) clusters.get("failed"), is(0)); - assertThat(clusterDetails.size(), is(2)); - assertThat((int) localClusterDetails.get("took"), greaterThan(0)); - assertThat(localClusterDetails.get("status"), is("successful")); + assertThat(info.apply("cluster details size"), clusterDetails.size(), is(2)); + assertThat(info.apply("local cluster took"), (int) localClusterDetails.get("took"), greaterThan(0)); + assertThat(info.apply("local cluster status"), localClusterDetails.get("status"), is("successful")); - assertThat((int) remoteClusterDetails.get("took"), greaterThan(0)); - assertThat(remoteClusterDetails.get("status"), is("skipped")); + assertThat(info.apply("remote cluster took"), (int) remoteClusterDetails.get("took"), greaterThan(0)); + assertThat(info.apply("remote cluster status"), remoteClusterDetails.get("status"), is("skipped")); ArrayList remoteClusterFailures = (ArrayList) remoteClusterDetails.get("failures"); - assertThat(remoteClusterFailures.size(), equalTo(1)); + assertThat(info.apply("remote cluster failure count"), remoteClusterFailures.size(), equalTo(1)); Map failuresMap = (Map) remoteClusterFailures.get(0); Map reason = (Map) failuresMap.get("reason"); assertThat( + info.apply("unexpected failure reason: " + reason), reason.get("type").toString(), oneOf("node_disconnected_exception", "connect_transport_exception", "node_not_connected_exception") ); @@ -227,6 +189,7 @@ private void esqlEnrichWithSkipUnavailableFalse() throws Exception { String query = "FROM to-be-enr*,my_remote_cluster:to-be-enr* | ENRICH " + randomFrom(modes) + ":employees-policy | LIMIT 10"; ResponseException ex = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(esqlRequest(query))); assertThat( + "esqlEnrichWithSkipUnavailableFalse failure", ex.getMessage(), anyOf( containsString("connect_transport_exception"), diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java index 42d03838ed8d6..09dda0f708a86 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java @@ -729,7 +729,9 @@ public void testCrossClusterQueryWithOnlyRemotePrivs() throws Exception { @SuppressWarnings("unchecked") public void testCrossClusterEnrich() throws Exception { - configureRemoteCluster(); + boolean isProxyMode = randomBoolean(); + boolean skipUnavailable = randomBoolean(); + configureRemoteCluster(REMOTE_CLUSTER_ALIAS, fulfillingCluster, false, isProxyMode, skipUnavailable); populateData(); // Query cluster { From ec69df0f6174fe97feb73c0bb064e899bf1406f6 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 00:48:18 +1100 Subject: [PATCH 054/383] Mute org.elasticsearch.xpack.security.authc.service.ServiceAccountIT testAuthenticateShouldNotFallThroughInCaseOfFailure #120902 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 2c0adf048038d..933890cee5e71 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -250,6 +250,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120890 - class: org.elasticsearch.xpack.test.rest.XPackRestIT issue: https://github.com/elastic/elasticsearch/issues/120816 +- class: org.elasticsearch.xpack.security.authc.service.ServiceAccountIT + method: testAuthenticateShouldNotFallThroughInCaseOfFailure + issue: https://github.com/elastic/elasticsearch/issues/120902 # Examples: # From 3532d0bb1074b5b1b250d9e28d145c1e67c726fd Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 27 Jan 2025 16:21:28 +0200 Subject: [PATCH 055/383] [DOCS] Update documentation for index sorting and routing for logsdb (#120721) * [DOCS] Update documentation for index sorting and routing for logsdb * update * Apply suggestions from code review Co-authored-by: Marci W <333176+marciw@users.noreply.github.com> * Update logs.asciidoc * Update docs/reference/data-streams/logs.asciidoc Co-authored-by: Marci W <333176+marciw@users.noreply.github.com> * Update logs.asciidoc --------- Co-authored-by: Marci W <333176+marciw@users.noreply.github.com> --- docs/reference/data-streams/logs.asciidoc | 90 +++++++++++++---------- 1 file changed, 52 insertions(+), 38 deletions(-) diff --git a/docs/reference/data-streams/logs.asciidoc b/docs/reference/data-streams/logs.asciidoc index 3af5e09889a89..7058cfe51496f 100644 --- a/docs/reference/data-streams/logs.asciidoc +++ b/docs/reference/data-streams/logs.asciidoc @@ -1,9 +1,9 @@ [[logs-data-stream]] == Logs data stream -IMPORTANT: The {es} `logsdb` index mode is generally available in Elastic Cloud Hosted -and self-managed Elasticsearch as of version 8.17, and is enabled by default for -logs in https://www.elastic.co/elasticsearch/serverless[{serverless-full}]. +IMPORTANT: The {es} `logsdb` index mode is generally available in Elastic Cloud Hosted +and self-managed Elasticsearch as of version 8.17, and is enabled by default for +logs in https://www.elastic.co/elasticsearch/serverless[{serverless-full}]. A logs data stream is a data stream type that stores log data more efficiently. @@ -54,57 +54,49 @@ DELETE _index_template/my-index-template === Synthetic source If you have the required https://www.elastic.co/subscriptions[subscription], `logsdb` index mode uses <>, which omits storing the original `_source` -field. Instead, the document source is synthesized from doc values or stored fields upon document retrieval. +field. Instead, the document source is synthesized from doc values or stored fields upon document retrieval. If you don't have the required https://www.elastic.co/subscriptions[subscription], `logsdb` mode uses the original `_source` field. -Before using synthetic source, make sure to review the <>. +Before using synthetic source, make sure to review the <>. When working with multi-value fields, the `index.mapping.synthetic_source_keep` setting controls how field values are preserved for <> reconstruction. In `logsdb`, the default value is `arrays`, which retains both duplicate values and the order of entries. However, the exact structure of -array elements and objects is not necessarily retained. Preserving duplicates and ordering can be critical for some -log fields, such as DNS A records, HTTP headers, and log entries that represent sequential or repeated events. +array elements and objects is not necessarily retained. Preserving duplicates and ordering can be critical for some +log fields, such as DNS A records, HTTP headers, and log entries that represent sequential or repeated events. [discrete] [[logsdb-sort-settings]] === Index sort settings -In `logsdb` index mode, the following sort settings are applied by default: +In `logsdb` index mode, indices are sorted by the fields `host.name` and `@timestamp` by default. -`index.sort.field`: `["host.name", "@timestamp"]`:: -Indices are sorted by `host.name` and `@timestamp` by default. The `@timestamp` field is automatically injected if it is not present. - -`index.sort.order`: `["desc", "desc"]`:: -Both `host.name` and `@timestamp` are sorted in descending (`desc`) order, prioritizing the latest data. - -`index.sort.mode`: `["min", "min"]`:: -The `min` mode sorts indices by the minimum value of multi-value fields. - -`index.sort.missing`: `["_first", "_first"]`:: -Missing values are sorted to appear `_first`. - -You can override these default sort settings. For example, to sort on different fields -and change the order, manually configure `index.sort.field` and `index.sort.order`. For more details, see -<>. - -When using the default sort settings, the `host.name` field is automatically injected into the index mappings as a `keyword` field to ensure that sorting can be applied. This guarantees that logs are efficiently sorted and retrieved based on the `host.name` and `@timestamp` fields. - -NOTE: If `subobjects` is set to `true` (default), the `host` field is mapped as an object field -named `host` with a `name` child field of type `keyword`. If `subobjects` is set to `false`, +* If the `@timestamp` field is not present, it is automatically injected. +* If the `host.name` field is not present, it is automatically injected as a `keyword` field, if possible. +** If `host.name` can't be injected (for example, `host` is a keyword field) or can't be used for sorting +(for example, its value is an IP address), only the `@timestamp` is used for sorting. +** If `host.name` is injected and `subobjects` is set to `true` (default), the `host` field is mapped as +an object field named `host` with a `name` child field of type `keyword`. If `subobjects` is set to `false`, a single `host.name` field is mapped as a `keyword` field. +* To prioritize the latest data, `host.name` is sorted in ascending order and `@timestamp` is sorted in +descending order. + +You can override the default sort settings by manually configuring `index.sort.field` +and `index.sort.order`. For more details, see <>. -To apply different sort settings to an existing data stream, update the data stream's component templates, and then -perform or wait for a <>. +To modify the sort configuration of an existing data stream, update the data stream's +component templates, and then perform or wait for a <>. -NOTE: In `logsdb` mode, the `@timestamp` field is automatically injected if it's not already present. If you apply custom sort settings, the `@timestamp` field is injected into the mappings but is not -automatically added to the list of sort fields. +NOTE: If you apply custom sort settings, the `@timestamp` field is injected into the mappings but is not +automatically added to the list of sort fields. For best results, include it manually as the last sort +field, with `desc` ordering. [discrete] [[logsdb-host-name]] ==== Existing data streams -If you're enabling `logsdb` index mode on a data stream that already exists, make sure to check mappings and sorting. The `logsdb` mode automatically maps `host.name` as a keyword if it's included in the sort settings. If a `host.name` field already exists but has a different type, mapping errors might occur, preventing `logsdb` mode from being fully applied. +If you're enabling `logsdb` index mode on a data stream that already exists, make sure to check mappings and sorting. The `logsdb` mode automatically maps `host.name` as a keyword if it's included in the sort settings. If a `host.name` field already exists but has a different type, mapping errors might occur, preventing `logsdb` mode from being fully applied. To avoid mapping conflicts, consider these options: @@ -114,7 +106,29 @@ To avoid mapping conflicts, consider these options: * **Switch to a different <>**: If resolving `host.name` mapping conflicts is not feasible, you can choose not to use `logsdb` mode. -IMPORTANT: On existing data streams, `logsdb` mode is applied on <> (automatic or manual). +IMPORTANT: On existing data streams, `logsdb` mode is applied on <> (automatic or manual). + +[discrete] +[[logsdb-sort-routing]] +==== Optimized routing on sort fields + +To reduce the storage footprint of `logsdb` indexes, you can enable routing optimizations. A routing optimization uses the fields in the sort configuration (except for `@timestamp`) to route documents to shards. + +In benchmarks, +routing optimizations reduced storage requirements by 20% compared to the default `logsdb` configuration, with a negligible penalty to ingestion +performance (1-4%). Routing optimizations can benefit data streams that are expected to grow substantially over +time. Exact results depend on the sort configuration and the nature of the logged data. + +To configure a routing optimization: + + * Include the index setting `[index.logsdb.route_on_sort_fields:true]` in the data stream configuration. + * <> with two or more fields, in addition to `@timestamp`. + * Make sure the <> field is not populated in ingested documents. It should be + auto-generated instead. + +A custom sort configuration is required, to improve storage efficiency and to minimize hotspots +from logging spikes that may route documents to a single shard. For best results, use a few sort fields +that have a relatively low cardinality and don't co-vary (for example, `host.name` and `host.id` are not optimal). [discrete] [[logsdb-specialized-codecs]] @@ -123,7 +137,7 @@ IMPORTANT: On existing data streams, `logsdb` mode is applied on <>, which applies {wikipedia}/Zstd[ZSTD] compression to stored fields. You can switch to the `default` codec for faster compression with a slightly larger storage footprint. -The `logsdb` index mode also automatically applies specialized codecs for numeric doc values, in order to optimize storage usage. Numeric fields are +The `logsdb` index mode also automatically applies specialized codecs for numeric doc values, in order to optimize storage usage. Numeric fields are encoded using the following sequence of codecs: * **Delta encoding**: @@ -173,9 +187,9 @@ _characters._ Using UTF-8 encoding, this results in a limit of 32764 bytes, depe The mapping-level `ignore_above` setting takes precedence. If a specific field has an `ignore_above` value defined in its mapping, that value overrides the index-level `index.mapping.ignore_above` value. This default -behavior helps to optimize indexing performance by preventing excessively large string values from being indexed. +behavior helps to optimize indexing performance by preventing excessively large string values from being indexed. -If you need to customize the limit, you can override it at the mapping level or change the index level default. +If you need to customize the limit, you can override it at the mapping level or change the index level default. [discrete] [[logs-db-ignore-limit]] @@ -202,7 +216,7 @@ reconstructing the original value. [[logsdb-settings-summary]] === Settings reference -The `logsdb` index mode uses the following settings: +The `logsdb` index mode uses the following settings: * **`index.mode`**: `"logsdb"` From df24b905db82b6d7d2312e4c76d8928d82b619fb Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 01:25:49 +1100 Subject: [PATCH 056/383] Mute org.elasticsearch.xpack.esql.parser.StatementParserTests testInvalidJoinPatterns #120849 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 933890cee5e71..8e6893646b0d6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -253,6 +253,9 @@ tests: - class: org.elasticsearch.xpack.security.authc.service.ServiceAccountIT method: testAuthenticateShouldNotFallThroughInCaseOfFailure issue: https://github.com/elastic/elasticsearch/issues/120902 +- class: org.elasticsearch.xpack.esql.parser.StatementParserTests + method: testInvalidJoinPatterns + issue: https://github.com/elastic/elasticsearch/issues/120849 # Examples: # From 55bb3ed97e5060457945cb810395fea59c64fb3e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 01:26:01 +1100 Subject: [PATCH 057/383] Mute org.elasticsearch.xpack.esql.parser.StatementParserTests testValidJoinPattern #120848 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 8e6893646b0d6..7c1971bbfbfe5 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -256,6 +256,9 @@ tests: - class: org.elasticsearch.xpack.esql.parser.StatementParserTests method: testInvalidJoinPatterns issue: https://github.com/elastic/elasticsearch/issues/120849 +- class: org.elasticsearch.xpack.esql.parser.StatementParserTests + method: testValidJoinPattern + issue: https://github.com/elastic/elasticsearch/issues/120848 # Examples: # From 5b3436dce0b2f7d56979b397b77b6d002f5f0869 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Mon, 27 Jan 2025 09:34:50 -0500 Subject: [PATCH 058/383] Esql - Support date nanos in date extract function (#120727) Resolves https://github.com/elastic/elasticsearch/issues/110000 Add support for running the date extract function on nanosecond dates. --- docs/changelog/120727.yaml | 6 + .../kibana/definition/date_extract.json | 36 ++++ .../functions/types/date_extract.asciidoc | 2 + .../src/main/resources/date_nanos.csv-spec | 21 +++ .../DateExtractConstantMillisEvaluator.java | 137 ++++++++++++++ .../DateExtractConstantNanosEvaluator.java | 137 ++++++++++++++ .../date/DateExtractMillisEvaluator.java | 169 ++++++++++++++++++ .../date/DateExtractNanosEvaluator.java | 169 ++++++++++++++++++ .../xpack/esql/action/EsqlCapabilities.java | 5 +- .../function/scalar/date/DateExtract.java | 66 +++++-- .../esql/type/EsqlDataTypeConverter.java | 25 ++- .../scalar/date/DateExtractErrorTests.java | 2 +- .../scalar/date/DateExtractTests.java | 108 ++++++----- 13 files changed, 825 insertions(+), 58 deletions(-) create mode 100644 docs/changelog/120727.yaml create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java diff --git a/docs/changelog/120727.yaml b/docs/changelog/120727.yaml new file mode 100644 index 0000000000000..4d0241e6baad1 --- /dev/null +++ b/docs/changelog/120727.yaml @@ -0,0 +1,6 @@ +pr: 120727 +summary: Esql - Support date nanos in date extract function +area: ES|QL +type: enhancement +issues: + - 110000 diff --git a/docs/reference/esql/functions/kibana/definition/date_extract.json b/docs/reference/esql/functions/kibana/definition/date_extract.json index c6dc6583f324d..0ababf80d9137 100644 --- a/docs/reference/esql/functions/kibana/definition/date_extract.json +++ b/docs/reference/esql/functions/kibana/definition/date_extract.json @@ -22,6 +22,24 @@ "variadic" : false, "returnType" : "long" }, + { + "params" : [ + { + "name" : "datePart", + "type" : "keyword", + "optional" : false, + "description" : "Part of the date to extract. Can be: `aligned_day_of_week_in_month`, `aligned_day_of_week_in_year`, `aligned_week_of_month`, `aligned_week_of_year`, `ampm_of_day`, `clock_hour_of_ampm`, `clock_hour_of_day`, `day_of_month`, `day_of_week`, `day_of_year`, `epoch_day`, `era`, `hour_of_ampm`, `hour_of_day`, `instant_seconds`, `micro_of_day`, `micro_of_second`, `milli_of_day`, `milli_of_second`, `minute_of_day`, `minute_of_hour`, `month_of_year`, `nano_of_day`, `nano_of_second`, `offset_seconds`, `proleptic_month`, `second_of_day`, `second_of_minute`, `year`, or `year_of_era`. Refer to https://docs.oracle.com/javase/8/docs/api/java/time/temporal/ChronoField.html[java.time.temporal.ChronoField] for a description of these values. If `null`, the function returns `null`." + }, + { + "name" : "date", + "type" : "date_nanos", + "optional" : false, + "description" : "Date expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "long" + }, { "params" : [ { @@ -39,6 +57,24 @@ ], "variadic" : false, "returnType" : "long" + }, + { + "params" : [ + { + "name" : "datePart", + "type" : "text", + "optional" : false, + "description" : "Part of the date to extract. Can be: `aligned_day_of_week_in_month`, `aligned_day_of_week_in_year`, `aligned_week_of_month`, `aligned_week_of_year`, `ampm_of_day`, `clock_hour_of_ampm`, `clock_hour_of_day`, `day_of_month`, `day_of_week`, `day_of_year`, `epoch_day`, `era`, `hour_of_ampm`, `hour_of_day`, `instant_seconds`, `micro_of_day`, `micro_of_second`, `milli_of_day`, `milli_of_second`, `minute_of_day`, `minute_of_hour`, `month_of_year`, `nano_of_day`, `nano_of_second`, `offset_seconds`, `proleptic_month`, `second_of_day`, `second_of_minute`, `year`, or `year_of_era`. Refer to https://docs.oracle.com/javase/8/docs/api/java/time/temporal/ChronoField.html[java.time.temporal.ChronoField] for a description of these values. If `null`, the function returns `null`." + }, + { + "name" : "date", + "type" : "date_nanos", + "optional" : false, + "description" : "Date expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "long" } ], "examples" : [ diff --git a/docs/reference/esql/functions/types/date_extract.asciidoc b/docs/reference/esql/functions/types/date_extract.asciidoc index ec9bf70c221cc..207e09b00f786 100644 --- a/docs/reference/esql/functions/types/date_extract.asciidoc +++ b/docs/reference/esql/functions/types/date_extract.asciidoc @@ -6,5 +6,7 @@ |=== datePart | date | result keyword | date | long +keyword | date_nanos | long text | date | long +text | date_nanos | long |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec index 1f4e555bd5d83..ec68e7eecc658 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec @@ -500,6 +500,27 @@ millis:date | nanos:date_nanos | num:long 2023-10-23T12:15:03.360Z | 2023-10-23T12:15:03.360103847Z | 1698063303360103847 ; +Date nanos date extract +required_capability: date_nanos_date_extract + +FROM date_nanos +| EVAL nn = MV_MAX(nanos) +| EVAL year = DATE_EXTRACT("year", nn), ns = DATE_EXTRACT("nano_of_second", nn) +| KEEP nn, year, ns; + +nn:date_nanos | year:long | ns:long +2023-10-23T13:55:01.543123456Z | 2023 | 543123456 +2023-10-23T13:53:55.832987654Z | 2023 | 832987654 +2023-10-23T13:52:55.015787878Z | 2023 | 015787878 +2023-10-23T13:51:54.732102837Z | 2023 | 732102837 +2023-10-23T13:33:34.937193000Z | 2023 | 937193000 +2023-10-23T12:27:28.948000000Z | 2023 | 948000000 +2023-10-23T12:15:03.360103847Z | 2023 | 360103847 +2023-10-23T12:15:03.360103847Z | 2023 | 360103847 +2023-03-23T12:15:03.360103847Z | 2023 | 360103847 +2023-03-23T12:15:03.360103847Z | 2023 | 360103847 +; + date nanos to long, index version required_capability: to_date_nanos diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java new file mode 100644 index 0000000000000..11da518a01ce1 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java @@ -0,0 +1,137 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.time.ZoneId; +import java.time.temporal.ChronoField; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. + * This class is generated. Do not edit it. + */ +public final class DateExtractConstantMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator value; + + private final ChronoField chronoField; + + private final ZoneId zone; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateExtractConstantMillisEvaluator(Source source, EvalOperator.ExpressionEvaluator value, + ChronoField chronoField, ZoneId zone, DriverContext driverContext) { + this.source = source; + this.value = value; + this.chronoField = chronoField; + this.zone = zone; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock valueBlock = (LongBlock) value.eval(page)) { + LongVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + return eval(page.getPositionCount(), valueBlock); + } + return eval(page.getPositionCount(), valueVector).asBlock(); + } + } + + public LongBlock eval(int positionCount, LongBlock valueBlock) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valueBlock.getValueCount(p) != 1) { + if (valueBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendLong(DateExtract.processMillis(valueBlock.getLong(valueBlock.getFirstValueIndex(p)), this.chronoField, this.zone)); + } + return result.build(); + } + } + + public LongVector eval(int positionCount, LongVector valueVector) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendLong(p, DateExtract.processMillis(valueVector.getLong(p), this.chronoField, this.zone)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateExtractConstantMillisEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(value); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory value; + + private final ChronoField chronoField; + + private final ZoneId zone; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value, + ChronoField chronoField, ZoneId zone) { + this.source = source; + this.value = value; + this.chronoField = chronoField; + this.zone = zone; + } + + @Override + public DateExtractConstantMillisEvaluator get(DriverContext context) { + return new DateExtractConstantMillisEvaluator(source, value.get(context), chronoField, zone, context); + } + + @Override + public String toString() { + return "DateExtractConstantMillisEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java new file mode 100644 index 0000000000000..bbd0a59c87ceb --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java @@ -0,0 +1,137 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.time.ZoneId; +import java.time.temporal.ChronoField; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. + * This class is generated. Do not edit it. + */ +public final class DateExtractConstantNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator value; + + private final ChronoField chronoField; + + private final ZoneId zone; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateExtractConstantNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator value, + ChronoField chronoField, ZoneId zone, DriverContext driverContext) { + this.source = source; + this.value = value; + this.chronoField = chronoField; + this.zone = zone; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock valueBlock = (LongBlock) value.eval(page)) { + LongVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + return eval(page.getPositionCount(), valueBlock); + } + return eval(page.getPositionCount(), valueVector).asBlock(); + } + } + + public LongBlock eval(int positionCount, LongBlock valueBlock) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valueBlock.getValueCount(p) != 1) { + if (valueBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendLong(DateExtract.processNanos(valueBlock.getLong(valueBlock.getFirstValueIndex(p)), this.chronoField, this.zone)); + } + return result.build(); + } + } + + public LongVector eval(int positionCount, LongVector valueVector) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendLong(p, DateExtract.processNanos(valueVector.getLong(p), this.chronoField, this.zone)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateExtractConstantNanosEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(value); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory value; + + private final ChronoField chronoField; + + private final ZoneId zone; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value, + ChronoField chronoField, ZoneId zone) { + this.source = source; + this.value = value; + this.chronoField = chronoField; + this.zone = zone; + } + + @Override + public DateExtractConstantNanosEvaluator get(DriverContext context) { + return new DateExtractConstantNanosEvaluator(source, value.get(context), chronoField, zone, context); + } + + @Override + public String toString() { + return "DateExtractConstantNanosEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java new file mode 100644 index 0000000000000..edc0b2cb0f0ce --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java @@ -0,0 +1,169 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.time.ZoneId; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. + * This class is generated. Do not edit it. + */ +public final class DateExtractMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator value; + + private final EvalOperator.ExpressionEvaluator chronoField; + + private final ZoneId zone; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateExtractMillisEvaluator(Source source, EvalOperator.ExpressionEvaluator value, + EvalOperator.ExpressionEvaluator chronoField, ZoneId zone, DriverContext driverContext) { + this.source = source; + this.value = value; + this.chronoField = chronoField; + this.zone = zone; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock valueBlock = (LongBlock) value.eval(page)) { + try (BytesRefBlock chronoFieldBlock = (BytesRefBlock) chronoField.eval(page)) { + LongVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + return eval(page.getPositionCount(), valueBlock, chronoFieldBlock); + } + BytesRefVector chronoFieldVector = chronoFieldBlock.asVector(); + if (chronoFieldVector == null) { + return eval(page.getPositionCount(), valueBlock, chronoFieldBlock); + } + return eval(page.getPositionCount(), valueVector, chronoFieldVector); + } + } + } + + public LongBlock eval(int positionCount, LongBlock valueBlock, BytesRefBlock chronoFieldBlock) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + BytesRef chronoFieldScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (valueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valueBlock.getValueCount(p) != 1) { + if (valueBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (chronoFieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (chronoFieldBlock.getValueCount(p) != 1) { + if (chronoFieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendLong(DateExtract.processMillis(valueBlock.getLong(valueBlock.getFirstValueIndex(p)), chronoFieldBlock.getBytesRef(chronoFieldBlock.getFirstValueIndex(p), chronoFieldScratch), this.zone)); + } catch (IllegalArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public LongBlock eval(int positionCount, LongVector valueVector, + BytesRefVector chronoFieldVector) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + BytesRef chronoFieldScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendLong(DateExtract.processMillis(valueVector.getLong(p), chronoFieldVector.getBytesRef(p, chronoFieldScratch), this.zone)); + } catch (IllegalArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateExtractMillisEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(value, chronoField); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory value; + + private final EvalOperator.ExpressionEvaluator.Factory chronoField; + + private final ZoneId zone; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value, + EvalOperator.ExpressionEvaluator.Factory chronoField, ZoneId zone) { + this.source = source; + this.value = value; + this.chronoField = chronoField; + this.zone = zone; + } + + @Override + public DateExtractMillisEvaluator get(DriverContext context) { + return new DateExtractMillisEvaluator(source, value.get(context), chronoField.get(context), zone, context); + } + + @Override + public String toString() { + return "DateExtractMillisEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java new file mode 100644 index 0000000000000..97a04f0d06a74 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java @@ -0,0 +1,169 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.time.ZoneId; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. + * This class is generated. Do not edit it. + */ +public final class DateExtractNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator value; + + private final EvalOperator.ExpressionEvaluator chronoField; + + private final ZoneId zone; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateExtractNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator value, + EvalOperator.ExpressionEvaluator chronoField, ZoneId zone, DriverContext driverContext) { + this.source = source; + this.value = value; + this.chronoField = chronoField; + this.zone = zone; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock valueBlock = (LongBlock) value.eval(page)) { + try (BytesRefBlock chronoFieldBlock = (BytesRefBlock) chronoField.eval(page)) { + LongVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + return eval(page.getPositionCount(), valueBlock, chronoFieldBlock); + } + BytesRefVector chronoFieldVector = chronoFieldBlock.asVector(); + if (chronoFieldVector == null) { + return eval(page.getPositionCount(), valueBlock, chronoFieldBlock); + } + return eval(page.getPositionCount(), valueVector, chronoFieldVector); + } + } + } + + public LongBlock eval(int positionCount, LongBlock valueBlock, BytesRefBlock chronoFieldBlock) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + BytesRef chronoFieldScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (valueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valueBlock.getValueCount(p) != 1) { + if (valueBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (chronoFieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (chronoFieldBlock.getValueCount(p) != 1) { + if (chronoFieldBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendLong(DateExtract.processNanos(valueBlock.getLong(valueBlock.getFirstValueIndex(p)), chronoFieldBlock.getBytesRef(chronoFieldBlock.getFirstValueIndex(p), chronoFieldScratch), this.zone)); + } catch (IllegalArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public LongBlock eval(int positionCount, LongVector valueVector, + BytesRefVector chronoFieldVector) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + BytesRef chronoFieldScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendLong(DateExtract.processNanos(valueVector.getLong(p), chronoFieldVector.getBytesRef(p, chronoFieldScratch), this.zone)); + } catch (IllegalArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateExtractNanosEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(value, chronoField); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory value; + + private final EvalOperator.ExpressionEvaluator.Factory chronoField; + + private final ZoneId zone; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value, + EvalOperator.ExpressionEvaluator.Factory chronoField, ZoneId zone) { + this.source = source; + this.value = value; + this.chronoField = chronoField; + this.zone = zone; + } + + @Override + public DateExtractNanosEvaluator get(DriverContext context) { + return new DateExtractNanosEvaluator(source, value.get(context), chronoField.get(context), zone, context); + } + + @Override + public String toString() { + return "DateExtractNanosEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 182328b54c4c5..ef46d71ac1de1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -475,7 +475,10 @@ public enum Cap { * Support Least and Greatest functions on Date Nanos type */ LEAST_GREATEST_FOR_DATENANOS(), - + /** + * support date extract function for date nanos + */ + DATE_NANOS_DATE_EXTRACT(), /** * Support add and subtract on date nanos */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index 7fc5d82441802..20ff398803854 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -33,10 +33,10 @@ import java.time.temporal.ChronoField; import java.util.List; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isDate; import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.EsqlConverter.STRING_TO_CHRONO_FIELD; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.chronoToLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.chronoToLongNanos; public class DateExtract extends EsqlConfigurationFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( @@ -72,7 +72,11 @@ public DateExtract( Refer to https://docs.oracle.com/javase/8/docs/api/java/time/temporal/ChronoField.html[java.time.temporal.ChronoField] for a description of these values.\n If `null`, the function returns `null`.""") Expression chronoFieldExp, - @Param(name = "date", type = "date", description = "Date expression. If `null`, the function returns `null`.") Expression field, + @Param( + name = "date", + type = { "date", "date_nanos" }, + description = "Date expression. If `null`, the function returns `null`." + ) Expression field, Configuration configuration ) { super(source, List.of(chronoFieldExp, field), configuration); @@ -109,17 +113,42 @@ public String getWriteableName() { @Override public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { - var fieldEvaluator = toEvaluator.apply(children().get(1)); + boolean isNanos = switch (field().dataType()) { + case DataType.DATETIME -> false; + case DataType.DATE_NANOS -> true; + default -> throw new UnsupportedOperationException( + "Unsupported field type [" + + field().dataType().name() + + "]. " + + "If you're seeing this, there's a bug in DateExtract.resolveType" + ); + }; + + ExpressionEvaluator.Factory fieldEvaluator = toEvaluator.apply(children().get(1)); + + // Constant chrono field if (children().get(0).foldable()) { ChronoField chrono = chronoField(toEvaluator.foldCtx()); if (chrono == null) { BytesRef field = (BytesRef) children().get(0).fold(toEvaluator.foldCtx()); throw new InvalidArgumentException("invalid date field for [{}]: {}", sourceText(), field.utf8ToString()); } - return new DateExtractConstantEvaluator.Factory(source(), fieldEvaluator, chrono, configuration().zoneId()); + + if (isNanos) { + return new DateExtractConstantNanosEvaluator.Factory(source(), fieldEvaluator, chrono, configuration().zoneId()); + } else { + return new DateExtractConstantMillisEvaluator.Factory(source(), fieldEvaluator, chrono, configuration().zoneId()); + } } + var chronoEvaluator = toEvaluator.apply(children().get(0)); - return new DateExtractEvaluator.Factory(source(), fieldEvaluator, chronoEvaluator, configuration().zoneId()); + + if (isNanos) { + return new DateExtractNanosEvaluator.Factory(source(), fieldEvaluator, chronoEvaluator, configuration().zoneId()); + } else { + return new DateExtractMillisEvaluator.Factory(source(), fieldEvaluator, chronoEvaluator, configuration().zoneId()); + } + } private ChronoField chronoField(FoldContext ctx) { @@ -138,16 +167,26 @@ private ChronoField chronoField(FoldContext ctx) { return chronoField; } - @Evaluator(warnExceptions = { IllegalArgumentException.class }) - static long process(long value, BytesRef chronoField, @Fixed ZoneId zone) { + @Evaluator(extraName = "Millis", warnExceptions = { IllegalArgumentException.class }) + static long processMillis(long value, BytesRef chronoField, @Fixed ZoneId zone) { return chronoToLong(value, chronoField, zone); } - @Evaluator(extraName = "Constant") - static long process(long value, @Fixed ChronoField chronoField, @Fixed ZoneId zone) { + @Evaluator(extraName = "ConstantMillis") + static long processMillis(long value, @Fixed ChronoField chronoField, @Fixed ZoneId zone) { return chronoToLong(value, chronoField, zone); } + @Evaluator(extraName = "Nanos", warnExceptions = { IllegalArgumentException.class }) + static long processNanos(long value, BytesRef chronoField, @Fixed ZoneId zone) { + return chronoToLongNanos(value, chronoField, zone); + } + + @Evaluator(extraName = "ConstantNanos") + static long processNanos(long value, @Fixed ChronoField chronoField, @Fixed ZoneId zone) { + return chronoToLongNanos(value, chronoField, zone); + } + @Override public Expression replaceChildren(List newChildren) { return new DateExtract(source(), newChildren.get(0), newChildren.get(1), configuration()); @@ -168,8 +207,15 @@ protected TypeResolution resolveType() { if (childrenResolved() == false) { return new TypeResolution("Unresolved children"); } + String operationName = sourceText(); return isStringAndExact(children().get(0), sourceText(), TypeResolutions.ParamOrdinal.FIRST).and( - isDate(children().get(1), sourceText(), TypeResolutions.ParamOrdinal.SECOND) + TypeResolutions.isType( + children().get(1), + DataType::isDate, + operationName, + TypeResolutions.ParamOrdinal.SECOND, + "datetime or date_nanos" + ) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index eef0df6b89dd3..4259de7347abd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -464,13 +464,36 @@ private static ChronoField stringToChrono(Object field) { public static long chronoToLong(long dateTime, BytesRef chronoField, ZoneId zone) { ChronoField chrono = ChronoField.valueOf(chronoField.utf8ToString().toUpperCase(Locale.ROOT)); - return Instant.ofEpochMilli(dateTime).atZone(zone).getLong(chrono); + return chronoToLong(dateTime, chrono, zone); } public static long chronoToLong(long dateTime, ChronoField chronoField, ZoneId zone) { return Instant.ofEpochMilli(dateTime).atZone(zone).getLong(chronoField); } + /** + * Extract the given {@link ChronoField} value from a date specified as a long number of nanoseconds since epoch + * @param dateNanos - long nanoseconds since epoch + * @param chronoField - The field to extract + * @param zone - Timezone for the given date + * @return - long representing the given ChronoField value + */ + public static long chronoToLongNanos(long dateNanos, BytesRef chronoField, ZoneId zone) { + ChronoField chrono = ChronoField.valueOf(chronoField.utf8ToString().toUpperCase(Locale.ROOT)); + return chronoToLongNanos(dateNanos, chrono, zone); + } + + /** + * Extract the given {@link ChronoField} value from a date specified as a long number of nanoseconds since epoch + * @param dateNanos - long nanoseconds since epoch + * @param chronoField - The field to extract + * @param zone - Timezone for the given date + * @return - long representing the given ChronoField value + */ + public static long chronoToLongNanos(long dateNanos, ChronoField chronoField, ZoneId zone) { + return DateUtils.toInstant(dateNanos).atZone(zone).getLong(chronoField); + } + /** * The following conversions are between String and other data types. */ diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractErrorTests.java index d5b9a06c8738e..feee1dd06f30f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractErrorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractErrorTests.java @@ -35,7 +35,7 @@ protected Expression build(Source source, List args) { protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> switch (p) { case 0 -> "string"; - case 1 -> "datetime"; + case 1 -> "datetime or date_nanos"; default -> ""; })); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java index cd27ce511b317..01a84d7885ed3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java @@ -26,6 +26,7 @@ import java.time.Instant; import java.time.ZonedDateTime; import java.time.temporal.ChronoField; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -40,53 +41,70 @@ public DateExtractTests(@Name("TestCase") Supplier te @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( - true, - List.of( - new TestCaseSupplier( - List.of(DataType.KEYWORD, DataType.DATETIME), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("YeAr"), DataType.KEYWORD, "chrono"), - new TestCaseSupplier.TypedData(1687944333000L, DataType.DATETIME, "date") - ), - "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", - DataType.LONG, - equalTo(2023L) - ) - ), - new TestCaseSupplier( - List.of(DataType.TEXT, DataType.DATETIME), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("YeAr"), DataType.TEXT, "chrono"), - new TestCaseSupplier.TypedData(1687944333000L, DataType.DATETIME, "date") - ), - "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", - DataType.LONG, - equalTo(2023L) - ) - ), - new TestCaseSupplier( - List.of(DataType.KEYWORD, DataType.DATETIME), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("not a unit"), DataType.KEYWORD, "chrono"), - new TestCaseSupplier.TypedData(0L, DataType.DATETIME, "date") + var suppliers = new ArrayList(); - ), - "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", - DataType.LONG, - is(nullValue()) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning( - "Line -1:-1: java.lang.IllegalArgumentException: " - + "No enum constant java.time.temporal.ChronoField.NOT A UNIT" + for (var stringType : DataType.stringTypes()) { + suppliers.addAll( + List.of( + new TestCaseSupplier( + List.of(stringType, DataType.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("YeAr"), stringType, "chrono"), + new TestCaseSupplier.TypedData(1687944333000L, DataType.DATETIME, "date") + ), + "DateExtractMillisEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", + DataType.LONG, + equalTo(2023L) + ) + ), + new TestCaseSupplier( + List.of(stringType, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("YeAr"), stringType, "chrono"), + new TestCaseSupplier.TypedData(1687944333000000000L, DataType.DATE_NANOS, "date") + ), + "DateExtractNanosEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", + DataType.LONG, + equalTo(2023L) ) - .withFoldingException(InvalidArgumentException.class, "invalid date field for []: not a unit") + ), + new TestCaseSupplier( + List.of(stringType, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("nano_of_second"), stringType, "chrono"), + new TestCaseSupplier.TypedData(1687944333000123456L, DataType.DATE_NANOS, "date") + ), + "DateExtractNanosEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", + DataType.LONG, + equalTo(123456L) + ) + ), + new TestCaseSupplier( + List.of(stringType, DataType.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("not a unit"), stringType, "chrono"), + new TestCaseSupplier.TypedData(0L, DataType.DATETIME, "date") + + ), + "DateExtractMillisEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", + DataType.LONG, + is(nullValue()) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning( + "Line -1:-1: java.lang.IllegalArgumentException: " + + "No enum constant java.time.temporal.ChronoField.NOT A UNIT" + ) + .withFoldingException(InvalidArgumentException.class, "invalid date field for []: not a unit") + ) ) - ) - ); + ); + } + + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } public void testAllChronoFields() { @@ -102,7 +120,7 @@ public void testAllChronoFields() { assertThat(instance.fold(FoldContext.small()), is(date.getLong(value))); assertThat( - DateExtract.process(epochMilli, new BytesRef(value.name()), EsqlTestUtils.TEST_CFG.zoneId()), + DateExtract.processMillis(epochMilli, new BytesRef(value.name()), EsqlTestUtils.TEST_CFG.zoneId()), is(date.getLong(value)) ); } From 084192c80fe2a793df51c1ea70fa533296894ebc Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Mon, 27 Jan 2025 15:37:28 +0100 Subject: [PATCH 059/383] Fix RequestIndexFilteringIT::testIndicesDontExist (#120899) Remote cluster is randomly picked by the test. The failure was happening as now we emit a different error when attempting a join with a remote cluster. --- muted-tests.yml | 3 --- .../qa/rest/RequestIndexFilteringTestCase.java | 14 +++++++++++--- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 7c1971bbfbfe5..10307e0225b1d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -242,9 +242,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120810 - class: org.elasticsearch.indices.mapping.UpdateMappingIntegrationIT issue: https://github.com/elastic/elasticsearch/issues/116126 -- class: org.elasticsearch.xpack.esql.ccq.RequestIndexFilteringIT - method: testIndicesDontExist - issue: https://github.com/elastic/elasticsearch/issues/120889 - class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT method: test {p0=data_stream/140_data_stream_aliases/Create data stream aliases using wildcard expression} issue: https://github.com/elastic/elasticsearch/issues/120890 diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java index 94f6a3c65418d..ad61c52775eb9 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.esql.AssertWarnings; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; @@ -212,13 +213,20 @@ public void testIndicesDontExist() throws IOException { assertThat(e.getMessage(), anyOf(containsString("no such index [foo]"), containsString("no such index [remote_cluster:foo]"))); if (EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()) { + var pattern = from("test1"); e = expectThrows( ResponseException.class, - () -> runEsql(timestampFilter("gte", "2020-01-01").query(from("test1") + " | LOOKUP JOIN foo ON id1")) + () -> runEsql(timestampFilter("gte", "2020-01-01").query(pattern + " | LOOKUP JOIN foo ON id1")) ); assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); - assertThat(e.getMessage(), containsString("verification_exception")); - assertThat(e.getMessage(), containsString("Unknown index [foo]")); + assertThat( + e.getMessage(), + // currently we don't support remote clusters in LOOKUP JOIN + // this check happens before resolving actual indices and results in a different error message + RemoteClusterAware.isRemoteIndexName(pattern) + ? allOf(containsString("parsing_exception"), containsString("remote clusters are not supported in LOOKUP JOIN")) + : allOf(containsString("verification_exception"), containsString("Unknown index [foo]")) + ); } } From 81d46a011829afb88f6f60afcef751b8d085bcf6 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 27 Jan 2025 14:56:41 +0000 Subject: [PATCH 060/383] [ML] Unmute XPackRestIT (#120897) Mute failing inference_crud yml tests and unmute the rest of XPackRestIT For #120816 --- muted-tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/muted-tests.yml b/muted-tests.yml index 10307e0225b1d..d84b193aa7dfc 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -246,6 +246,7 @@ tests: method: test {p0=data_stream/140_data_stream_aliases/Create data stream aliases using wildcard expression} issue: https://github.com/elastic/elasticsearch/issues/120890 - class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=ml/inference_crud/*} issue: https://github.com/elastic/elasticsearch/issues/120816 - class: org.elasticsearch.xpack.security.authc.service.ServiceAccountIT method: testAuthenticateShouldNotFallThroughInCaseOfFailure From bac3b14088f0b1fef2ad7b8042679eb1f699fe91 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 01:56:58 +1100 Subject: [PATCH 061/383] Mute org.elasticsearch.packaging.test.DockerTests test050BasicApiTests #120911 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index d84b193aa7dfc..5d490ec0bda2d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -257,6 +257,9 @@ tests: - class: org.elasticsearch.xpack.esql.parser.StatementParserTests method: testValidJoinPattern issue: https://github.com/elastic/elasticsearch/issues/120848 +- class: org.elasticsearch.packaging.test.DockerTests + method: test050BasicApiTests + issue: https://github.com/elastic/elasticsearch/issues/120911 # Examples: # From 529ad049a8106bdb1c8503022522266b67bd9856 Mon Sep 17 00:00:00 2001 From: Alexey Ivanov Date: Mon, 27 Jan 2025 15:03:06 +0000 Subject: [PATCH 062/383] Add test for system index migration using reindexing script (#120667) --- modules/reindex/build.gradle | 3 + .../AbstractFeatureMigrationIntegTest.java | 12 +- .../migration/FeatureMigrationIT.java | 130 ++++++++++++++---- .../plugin-metadata/entitlement-policy.yaml | 2 + .../plugin-metadata/plugin-security.policy | 3 + 5 files changed, 118 insertions(+), 32 deletions(-) diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 47a3f51115b1d..05cd906f61160 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -44,6 +44,9 @@ dependencies { clusterModules project(':modules:lang-painless') clusterModules project(':modules:parent-join') clusterModules project(":modules:rest-root") + + internalClusterTestImplementation project(':modules:lang-painless') + internalClusterTestImplementation project(':modules:lang-painless:spi') } restResources { diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java index 2a1401242f81c..860d63000f124 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java @@ -146,7 +146,7 @@ public T getPlugin(Class type) { return pluginsService.filterPlugins(type).findFirst().get(); } - public void createSystemIndexForDescriptor(SystemIndexDescriptor descriptor) throws InterruptedException { + protected void createSystemIndexForDescriptor(SystemIndexDescriptor descriptor) { assertThat( "the strategy used below to create index names for descriptors without a primary index name only works for simple patterns", descriptor.getIndexPattern(), @@ -180,9 +180,13 @@ public void createSystemIndexForDescriptor(SystemIndexDescriptor descriptor) thr CreateIndexResponse response = createRequest.get(); Assert.assertTrue(response.isShardsAcknowledged()); + indexDocs(indexName); + } + + protected void indexDocs(String indexName) { List docs = new ArrayList<>(INDEX_DOC_COUNT); for (int i = 0; i < INDEX_DOC_COUNT; i++) { - docs.add(ESIntegTestCase.prepareIndex(indexName).setId(Integer.toString(i)).setSource("some_field", "words words")); + docs.add(ESIntegTestCase.prepareIndex(indexName).setId(Integer.toString(i)).setSource(FIELD_NAME, "words words")); } indexRandom(true, docs); IndicesStatsResponse indexStats = ESIntegTestCase.indicesAdmin().prepareStats(indexName).setDocs(true).get(); @@ -207,7 +211,7 @@ static String createMapping(boolean descriptorManaged, boolean descriptorInterna builder.field("dynamic", "strict"); builder.startObject("properties"); { - builder.startObject("some_field"); + builder.startObject(FIELD_NAME); builder.field("type", "keyword"); builder.endObject(); } @@ -221,7 +225,7 @@ static String createMapping(boolean descriptorManaged, boolean descriptorInterna } } - public void assertIndexHasCorrectProperties( + protected void assertIndexHasCorrectProperties( Metadata metadata, String indexName, int settingsFlagValue, diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java index 06233b6147824..cdf817a6b17b8 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.template.put.PutComponentTemplateAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; +import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -33,8 +34,11 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.painless.PainlessPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.upgrades.FeatureMigrationResults; import org.elasticsearch.upgrades.SingleFeatureMigrationResult; @@ -51,6 +55,7 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; @@ -62,6 +67,25 @@ import static org.hamcrest.Matchers.nullValue; public class FeatureMigrationIT extends AbstractFeatureMigrationIntegTest { + private static final String INTERNAL_MANAGED_WITH_SCRIPT_INDEX_NAME = ".int-mans-old"; + private static final String SCRIPTED_INDEX_FEATURE_NAME = "B-test-feature"; + private static final SystemIndexDescriptor INTERNAL_MANAGED_WITH_SCRIPT = SystemIndexDescriptor.builder() + .setIndexPattern(".int-mans-*") + .setAliasName(".internal-managed-with-script-alias") + .setPrimaryIndex(INTERNAL_MANAGED_WITH_SCRIPT_INDEX_NAME) + .setType(SystemIndexDescriptor.Type.INTERNAL_MANAGED) + .setSettings(createSettings(NEEDS_UPGRADE_INDEX_VERSION, INTERNAL_MANAGED_FLAG_VALUE)) + .setMappings(createMapping(true, true)) + .setOrigin(ORIGIN) + .setAllowedElasticProductOrigins(Collections.emptyList()) + .setPriorSystemIndexDescriptors(Collections.emptyList()) + .setMigrationScript(""" + if (ctx._source.some_field != null) { + ctx._source.some_field = 'migrated'; + } + """) + .build(); + @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)).build(); @@ -77,7 +101,9 @@ protected boolean forbidPrivateIndexSettings() { protected Collection> nodePlugins() { List> plugins = new ArrayList<>(super.nodePlugins()); plugins.add(TestPlugin.class); + plugins.add(SecondTestPlugin.class); plugins.add(ReindexPlugin.class); + plugins.add(PainlessPlugin.class); return plugins; } @@ -115,7 +141,7 @@ public void testStartMigrationAndImmediatelyCheckStatus() throws Exception { }); } - public void testMigrateInternalManagedSystemIndex() throws Exception { + public void testMigrateSystemIndex() throws Exception { createSystemIndexForDescriptor(INTERNAL_MANAGED); createSystemIndexForDescriptor(INTERNAL_UNMANAGED); createSystemIndexForDescriptor(EXTERNAL_MANAGED); @@ -171,25 +197,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { postUpgradeHookCalled.set(true); }); - PostFeatureUpgradeRequest migrationRequest = new PostFeatureUpgradeRequest(TEST_REQUEST_TIMEOUT); - PostFeatureUpgradeResponse migrationResponse = client().execute(PostFeatureUpgradeAction.INSTANCE, migrationRequest).get(); - assertThat(migrationResponse.getReason(), nullValue()); - assertThat(migrationResponse.getElasticsearchException(), nullValue()); - final Set migratingFeatures = migrationResponse.getFeatures() - .stream() - .map(PostFeatureUpgradeResponse.Feature::getFeatureName) - .collect(Collectors.toSet()); - assertThat(migratingFeatures, hasItem(FEATURE_NAME)); - - GetFeatureUpgradeStatusRequest getStatusRequest = new GetFeatureUpgradeStatusRequest(TEST_REQUEST_TIMEOUT); - // The feature upgrade may take longer than ten seconds when tests are running - // in parallel, so we give assertBusy a sixty-second timeout. - assertBusy(() -> { - GetFeatureUpgradeStatusResponse statusResponse = client().execute(GetFeatureUpgradeStatusAction.INSTANCE, getStatusRequest) - .get(); - logger.info(Strings.toString(statusResponse)); - assertThat(statusResponse.getUpgradeStatus(), equalTo(GetFeatureUpgradeStatusResponse.UpgradeStatus.NO_MIGRATION_NEEDED)); - }, 60, TimeUnit.SECONDS); + executeMigration(FEATURE_NAME); // Waiting for shards to stabilize if indices were moved around ensureGreen(); @@ -197,14 +205,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { assertTrue("the pre-migration hook wasn't actually called", preUpgradeHookCalled.get()); assertTrue("the post-migration hook wasn't actually called", postUpgradeHookCalled.get()); - Metadata finalMetadata = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState().metadata(); - // Check that the results metadata is what we expect. - FeatureMigrationResults currentResults = finalMetadata.custom(FeatureMigrationResults.TYPE); - assertThat(currentResults, notNullValue()); - assertThat(currentResults.getFeatureStatuses(), allOf(aMapWithSize(1), hasKey(FEATURE_NAME))); - assertThat(currentResults.getFeatureStatuses().get(FEATURE_NAME).succeeded(), is(true)); - assertThat(currentResults.getFeatureStatuses().get(FEATURE_NAME).getFailedIndexName(), nullValue()); - assertThat(currentResults.getFeatureStatuses().get(FEATURE_NAME).getException(), nullValue()); + Metadata finalMetadata = assertMetadataAfterMigration(FEATURE_NAME); assertIndexHasCorrectProperties( finalMetadata, @@ -240,6 +241,18 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { ); } + private static Metadata assertMetadataAfterMigration(String featureName) { + Metadata finalMetadata = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState().metadata(); + // Check that the results metadata is what we expect. + FeatureMigrationResults currentResults = finalMetadata.custom(FeatureMigrationResults.TYPE); + assertThat(currentResults, notNullValue()); + assertThat(currentResults.getFeatureStatuses(), allOf(aMapWithSize(1), hasKey(featureName))); + assertThat(currentResults.getFeatureStatuses().get(featureName).succeeded(), is(true)); + assertThat(currentResults.getFeatureStatuses().get(featureName).getFailedIndexName(), nullValue()); + assertThat(currentResults.getFeatureStatuses().get(featureName).getException(), nullValue()); + return finalMetadata; + } + public void testMigrateIndexWithWriteBlock() throws Exception { createSystemIndexForDescriptor(INTERNAL_UNMANAGED); @@ -317,6 +330,50 @@ public void onFailure(Exception e) { }); } + private void executeMigration(String featureName) throws Exception { + PostFeatureUpgradeRequest migrationRequest = new PostFeatureUpgradeRequest(TEST_REQUEST_TIMEOUT); + PostFeatureUpgradeResponse migrationResponse = client().execute(PostFeatureUpgradeAction.INSTANCE, migrationRequest).get(); + assertThat(migrationResponse.getReason(), nullValue()); + assertThat(migrationResponse.getElasticsearchException(), nullValue()); + final Set migratingFeatures = migrationResponse.getFeatures() + .stream() + .map(PostFeatureUpgradeResponse.Feature::getFeatureName) + .collect(Collectors.toSet()); + assertThat(migratingFeatures, hasItem(featureName)); + + GetFeatureUpgradeStatusRequest getStatusRequest = new GetFeatureUpgradeStatusRequest(TEST_REQUEST_TIMEOUT); + // The feature upgrade may take longer than ten seconds when tests are running + // in parallel, so we give assertBusy a sixty-second timeout. + assertBusy(() -> { + GetFeatureUpgradeStatusResponse statusResponse = client().execute(GetFeatureUpgradeStatusAction.INSTANCE, getStatusRequest) + .get(); + logger.info(Strings.toString(statusResponse)); + assertThat(statusResponse.getUpgradeStatus(), equalTo(GetFeatureUpgradeStatusResponse.UpgradeStatus.NO_MIGRATION_NEEDED)); + }, 60, TimeUnit.SECONDS); + } + + public void testMigrateUsingScript() throws Exception { + createSystemIndexForDescriptor(INTERNAL_MANAGED_WITH_SCRIPT); + + executeMigration(SCRIPTED_INDEX_FEATURE_NAME); + ensureGreen(); + + Metadata metadata = assertMetadataAfterMigration(SCRIPTED_INDEX_FEATURE_NAME); + String newIndexName = ".int-mans-old-reindexed-for-" + UPGRADED_TO_VERSION; + assertIndexHasCorrectProperties( + metadata, + newIndexName, + INTERNAL_MANAGED_FLAG_VALUE, + true, + true, + Arrays.asList(".int-mans-old", ".internal-managed-with-script-alias") + ); + + SearchRequestBuilder searchRequestBuilder = prepareSearch(newIndexName).setQuery(QueryBuilders.termsQuery(FIELD_NAME, "migrated")) + .setSize(0); + assertHitCountAndNoFailures(searchRequestBuilder, INDEX_DOC_COUNT); + } + private String featureUpgradeErrorResponse(GetFeatureUpgradeStatusResponse statusResp) { return statusResp.getFeatureUpgradeStatuses() .stream() @@ -463,4 +520,21 @@ public void testMigrateWithTemplatesV2() throws Exception { assertThat(statusResp.getUpgradeStatus(), equalTo(GetFeatureUpgradeStatusResponse.UpgradeStatus.NO_MIGRATION_NEEDED)); }); } + + public static class SecondTestPlugin extends Plugin implements SystemIndexPlugin { + @Override + public String getFeatureName() { + return SCRIPTED_INDEX_FEATURE_NAME; + } + + @Override + public String getFeatureDescription() { + return "a plugin for testing system index migration"; + } + + @Override + public Collection getSystemIndexDescriptors(Settings settings) { + return Collections.singletonList(INTERNAL_MANAGED_WITH_SCRIPT); + } + } } diff --git a/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml b/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml index df557f9944253..e9c8a53ef24be 100644 --- a/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,2 +1,4 @@ ALL-UNNAMED: - outbound_network +org.elasticsearch.painless: + - create_class_loader diff --git a/modules/reindex/src/main/plugin-metadata/plugin-security.policy b/modules/reindex/src/main/plugin-metadata/plugin-security.policy index 016cc6365b6ee..2b6d821c4d1a8 100644 --- a/modules/reindex/src/main/plugin-metadata/plugin-security.policy +++ b/modules/reindex/src/main/plugin-metadata/plugin-security.policy @@ -10,6 +10,9 @@ grant { // reindex opens socket connections using the rest client permission java.net.SocketPermission "*", "connect"; + + // needed for Painless to generate runtime classes + permission java.lang.RuntimePermission "createClassLoader"; }; grant codeBase "${codebase.elasticsearch-rest-client}" { From 6648a03baba6fe067dd713f5d7dc2ed9504ea5b4 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Mon, 27 Jan 2025 16:04:36 +0100 Subject: [PATCH 063/383] Revert "Revert "Remove deprecated tracing.apm.* settings for v9 (#119926)"" (#120895) This reverts commit 5f44911ef83bbb101eab68902201938534129e6b. Reverts revert in #120268 after blockers have been resolved. Relates to ES-10293 --- .../server/cli/APMJvmOptions.java | 50 +----- .../server/cli/APMJvmOptionsTests.java | 143 ++++++------------ docs/changelog/119926.yaml | 11 ++ .../org/elasticsearch/telemetry/apm/APM.java | 9 +- .../apm/internal/APMAgentSettings.java | 96 ++---------- .../apm/internal/APMAgentSettingsTests.java | 116 +------------- 6 files changed, 81 insertions(+), 344 deletions(-) create mode 100644 docs/changelog/119926.yaml diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java index c3b9768946767..1e57d9fab7cfd 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java @@ -187,20 +187,12 @@ static String agentCommandLineOption(Path agentJar, Path tmpPropertiesFile) { static void extractSecureSettings(SecureSettings secrets, Map propertiesMap) { final Set settingNames = secrets.getSettingNames(); for (String key : List.of("api_key", "secret_token")) { - for (String prefix : List.of("telemetry.", "tracing.apm.")) { - if (settingNames.contains(prefix + key)) { - if (propertiesMap.containsKey(key)) { - throw new IllegalStateException( - Strings.format("Duplicate telemetry setting: [telemetry.%s] and [tracing.apm.%s]", key, key) - ); - } - - try (SecureString token = secrets.getString(prefix + key)) { - propertiesMap.put(key, token.toString()); - } + String prefix = "telemetry."; + if (settingNames.contains(prefix + key)) { + try (SecureString token = secrets.getString(prefix + key)) { + propertiesMap.put(key, token.toString()); } } - } } @@ -227,44 +219,12 @@ private static Map extractDynamicSettings(Map pr static Map extractApmSettings(Settings settings) throws UserException { final Map propertiesMap = new HashMap<>(); - // tracing.apm.agent. is deprecated by telemetry.agent. final String telemetryAgentPrefix = "telemetry.agent."; - final String deprecatedTelemetryAgentPrefix = "tracing.apm.agent."; final Settings telemetryAgentSettings = settings.getByPrefix(telemetryAgentPrefix); telemetryAgentSettings.keySet().forEach(key -> propertiesMap.put(key, String.valueOf(telemetryAgentSettings.get(key)))); - final Settings apmAgentSettings = settings.getByPrefix(deprecatedTelemetryAgentPrefix); - for (String key : apmAgentSettings.keySet()) { - if (propertiesMap.containsKey(key)) { - throw new IllegalStateException( - Strings.format( - "Duplicate telemetry setting: [%s%s] and [%s%s]", - telemetryAgentPrefix, - key, - deprecatedTelemetryAgentPrefix, - key - ) - ); - } - propertiesMap.put(key, String.valueOf(apmAgentSettings.get(key))); - } - StringJoiner globalLabels = extractGlobalLabels(telemetryAgentPrefix, propertiesMap, settings); - if (globalLabels.length() == 0) { - globalLabels = extractGlobalLabels(deprecatedTelemetryAgentPrefix, propertiesMap, settings); - } else { - StringJoiner tracingGlobalLabels = extractGlobalLabels(deprecatedTelemetryAgentPrefix, propertiesMap, settings); - if (tracingGlobalLabels.length() != 0) { - throw new IllegalArgumentException( - "Cannot have global labels with tracing.agent prefix [" - + globalLabels - + "] and telemetry.apm.agent prefix [" - + tracingGlobalLabels - + "]" - ); - } - } if (globalLabels.length() > 0) { propertiesMap.put("global_labels", globalLabels.toString()); } @@ -274,7 +234,7 @@ static Map extractApmSettings(Settings settings) throws UserExce if (propertiesMap.containsKey(key)) { throw new UserException( ExitCodes.CONFIG, - "Do not set a value for [tracing.apm.agent." + key + "], as this is configured automatically by Elasticsearch" + "Do not set a value for [telemetry.agent." + key + "], as this is configured automatically by Elasticsearch" ); } } diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java index a7ba8eb11fbcc..0e067afc1aa73 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/APMJvmOptionsTests.java @@ -25,18 +25,15 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.Function; import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; @@ -82,109 +79,63 @@ public void testFileDeleteWorks() throws IOException { } public void testExtractSecureSettings() { - MockSecureSettings duplicateSecureSettings = new MockSecureSettings(); + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("telemetry.secret_token", "token"); + secureSettings.setString("telemetry.api_key", "key"); - for (String prefix : List.of("telemetry.", "tracing.apm.")) { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(prefix + "secret_token", "token"); - secureSettings.setString(prefix + "api_key", "key"); - - duplicateSecureSettings.setString(prefix + "api_key", "secret"); - - Map propertiesMap = new HashMap<>(); - APMJvmOptions.extractSecureSettings(secureSettings, propertiesMap); - - assertThat(propertiesMap, matchesMap(Map.of("secret_token", "token", "api_key", "key"))); - } - - Exception exception = expectThrows( - IllegalStateException.class, - () -> APMJvmOptions.extractSecureSettings(duplicateSecureSettings, new HashMap<>()) - ); - assertThat(exception.getMessage(), containsString("Duplicate telemetry setting")); - assertThat(exception.getMessage(), containsString("telemetry.api_key")); - assertThat(exception.getMessage(), containsString("tracing.apm.api_key")); + Map propertiesMap = new HashMap<>(); + APMJvmOptions.extractSecureSettings(secureSettings, propertiesMap); + assertThat(propertiesMap, matchesMap(Map.of("secret_token", "token", "api_key", "key"))); } public void testExtractSettings() throws UserException { - Function buildSettings = (prefix) -> Settings.builder() - .put(prefix + "server_url", "https://myurl:443") - .put(prefix + "service_node_name", "instance-0000000001"); - - for (String prefix : List.of("tracing.apm.agent.", "telemetry.agent.")) { - var name = "APM Tracing"; - var deploy = "123"; - var org = "456"; - var extracted = APMJvmOptions.extractApmSettings( - buildSettings.apply(prefix) - .put(prefix + "global_labels.deployment_name", name) - .put(prefix + "global_labels.deployment_id", deploy) - .put(prefix + "global_labels.organization_id", org) - .build() - ); - - assertThat( - extracted, - allOf( - hasEntry("server_url", "https://myurl:443"), - hasEntry("service_node_name", "instance-0000000001"), - hasEntry(equalTo("global_labels"), not(endsWith(","))), // test that we have collapsed all global labels into one - not(hasKey("global_labels.organization_id")) // tests that we strip out the top level label keys - ) - ); - - List labels = Arrays.stream(extracted.get("global_labels").split(",")).toList(); - assertThat(labels, hasSize(3)); - assertThat(labels, containsInAnyOrder("deployment_name=APM Tracing", "organization_id=" + org, "deployment_id=" + deploy)); - - // test replacing with underscores and skipping empty - name = "APM=Tracing"; - deploy = ""; - org = ",456"; - extracted = APMJvmOptions.extractApmSettings( - buildSettings.apply(prefix) - .put(prefix + "global_labels.deployment_name", name) - .put(prefix + "global_labels.deployment_id", deploy) - .put(prefix + "global_labels.organization_id", org) - .build() - ); - labels = Arrays.stream(extracted.get("global_labels").split(",")).toList(); - assertThat(labels, hasSize(2)); - assertThat(labels, containsInAnyOrder("deployment_name=APM_Tracing", "organization_id=_456")); - } - - IllegalStateException err = expectThrows( - IllegalStateException.class, - () -> APMJvmOptions.extractApmSettings( - Settings.builder() - .put("tracing.apm.agent.server_url", "https://myurl:443") - .put("telemetry.agent.server_url", "https://myurl-2:443") - .build() - ) - ); - assertThat(err.getMessage(), is("Duplicate telemetry setting: [telemetry.agent.server_url] and [tracing.apm.agent.server_url]")); - } - - public void testNoMixedLabels() { - String telemetryAgent = "telemetry.agent."; - String tracingAgent = "tracing.apm.agent."; - Settings settings = Settings.builder() - .put("tracing.apm.enabled", true) - .put(telemetryAgent + "server_url", "https://myurl:443") - .put(telemetryAgent + "service_node_name", "instance-0000000001") - .put(tracingAgent + "global_labels.deployment_id", "123") - .put(telemetryAgent + "global_labels.organization_id", "456") + Settings defaults = Settings.builder() + .put("telemetry.agent.server_url", "https://myurl:443") + .put("telemetry.agent.service_node_name", "instance-0000000001") .build(); - IllegalArgumentException err = assertThrows(IllegalArgumentException.class, () -> APMJvmOptions.extractApmSettings(settings)); + var name = "APM Tracing"; + var deploy = "123"; + var org = "456"; + var extracted = APMJvmOptions.extractApmSettings( + Settings.builder() + .put(defaults) + .put("telemetry.agent.global_labels.deployment_name", name) + .put("telemetry.agent.global_labels.deployment_id", deploy) + .put("telemetry.agent.global_labels.organization_id", org) + .build() + ); + assertThat( - err.getMessage(), - is( - "Cannot have global labels with tracing.agent prefix [organization_id=456] and" - + " telemetry.apm.agent prefix [deployment_id=123]" + extracted, + allOf( + hasEntry("server_url", "https://myurl:443"), + hasEntry("service_node_name", "instance-0000000001"), + hasEntry(equalTo("global_labels"), not(endsWith(","))), // test that we have collapsed all global labels into one + not(hasKey("global_labels.organization_id")) // tests that we strip out the top level label keys ) ); + + List labels = Arrays.stream(extracted.get("global_labels").split(",")).toList(); + assertThat(labels, hasSize(3)); + assertThat(labels, containsInAnyOrder("deployment_name=APM Tracing", "organization_id=" + org, "deployment_id=" + deploy)); + + // test replacing with underscores and skipping empty + name = "APM=Tracing"; + deploy = ""; + org = ",456"; + extracted = APMJvmOptions.extractApmSettings( + Settings.builder() + .put(defaults) + .put("telemetry.agent.global_labels.deployment_name", name) + .put("telemetry.agent.global_labels.deployment_id", deploy) + .put("telemetry.agent.global_labels.organization_id", org) + .build() + ); + labels = Arrays.stream(extracted.get("global_labels").split(",")).toList(); + assertThat(labels, hasSize(2)); + assertThat(labels, containsInAnyOrder("deployment_name=APM_Tracing", "organization_id=_456")); } private Path makeFakeAgentJar() throws IOException { diff --git a/docs/changelog/119926.yaml b/docs/changelog/119926.yaml new file mode 100644 index 0000000000000..3afafd5b2117f --- /dev/null +++ b/docs/changelog/119926.yaml @@ -0,0 +1,11 @@ +pr: 119926 +summary: "Deprecated tracing.apm.* settings got removed." +area: Infra/Metrics +type: breaking +issues: [] +breaking: + title: "Deprecated tracing.apm.* settings got removed." + area: Cluster and node setting + details: Deprecated `tracing.apm.*` settings got removed, use respective `telemetry.*` / `telemetry.tracing.*` settings instead. + impact: 9.x nodes will refuse to start if any such setting (including secret settings) is still present. + notable: false diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java index 339a4ec24ca13..43447cfa21a62 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java @@ -92,14 +92,7 @@ public List> getSettings() { APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING, APMAgentSettings.TELEMETRY_TRACING_NAMES_INCLUDE_SETTING, APMAgentSettings.TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING, - APMAgentSettings.TELEMETRY_TRACING_SANITIZE_FIELD_NAMES, - // The settings below are deprecated and are currently kept as fallback. - APMAgentSettings.TRACING_APM_SECRET_TOKEN_SETTING, - APMAgentSettings.TRACING_APM_API_KEY_SETTING, - APMAgentSettings.TRACING_APM_ENABLED_SETTING, - APMAgentSettings.TRACING_APM_NAMES_INCLUDE_SETTING, - APMAgentSettings.TRACING_APM_NAMES_EXCLUDE_SETTING, - APMAgentSettings.TRACING_APM_SANITIZE_FIELD_NAMES + APMAgentSettings.TELEMETRY_TRACING_SANITIZE_FIELD_NAMES ); } } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java index 68adc97b74449..9c5552f9e03e1 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java @@ -25,9 +25,7 @@ import java.util.List; import java.util.Objects; import java.util.Set; -import java.util.function.Function; -import static org.elasticsearch.common.settings.Setting.Property.Deprecated; import static org.elasticsearch.common.settings.Setting.Property.NodeScope; import static org.elasticsearch.common.settings.Setting.Property.OperatorDynamic; @@ -110,9 +108,6 @@ public void setAgentSetting(String key, String value) { private static final String TELEMETRY_SETTING_PREFIX = "telemetry."; - // The old legacy prefix - private static final String LEGACY_TRACING_APM_SETTING_PREFIX = "tracing.apm."; - /** * Allow-list of APM agent config keys users are permitted to configure. *

WARNING: Make sure to update the module entitlements if permitting additional agent keys @@ -259,56 +254,24 @@ private static Setting concreteAgentSetting(String namespace, String qua public static final Setting.AffixSetting APM_AGENT_SETTINGS = Setting.prefixKeySetting( TELEMETRY_SETTING_PREFIX + "agent.", - LEGACY_TRACING_APM_SETTING_PREFIX + "agent.", - (namespace, qualifiedKey) -> qualifiedKey.startsWith(LEGACY_TRACING_APM_SETTING_PREFIX) - ? concreteAgentSetting(namespace, qualifiedKey, NodeScope, OperatorDynamic, Deprecated) - : concreteAgentSetting(namespace, qualifiedKey, NodeScope, OperatorDynamic) + null, // no fallback + (namespace, qualifiedKey) -> concreteAgentSetting(namespace, qualifiedKey, NodeScope, OperatorDynamic) ); - /** - * @deprecated in favor of TELEMETRY_TRACING_NAMES_INCLUDE_SETTING. - */ - @Deprecated - public static final Setting> TRACING_APM_NAMES_INCLUDE_SETTING = Setting.stringListSetting( - LEGACY_TRACING_APM_SETTING_PREFIX + "names.include", - OperatorDynamic, - NodeScope, - Deprecated - ); - - public static final Setting> TELEMETRY_TRACING_NAMES_INCLUDE_SETTING = Setting.listSetting( + public static final Setting> TELEMETRY_TRACING_NAMES_INCLUDE_SETTING = Setting.stringListSetting( TELEMETRY_SETTING_PREFIX + "tracing.names.include", - TRACING_APM_NAMES_INCLUDE_SETTING, - Function.identity(), OperatorDynamic, NodeScope ); - /** - * @deprecated in favor of TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING. - */ - @Deprecated - public static final Setting> TRACING_APM_NAMES_EXCLUDE_SETTING = Setting.stringListSetting( - LEGACY_TRACING_APM_SETTING_PREFIX + "names.exclude", - OperatorDynamic, - NodeScope, - Deprecated - ); - - public static final Setting> TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING = Setting.listSetting( + public static final Setting> TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING = Setting.stringListSetting( TELEMETRY_SETTING_PREFIX + "tracing.names.exclude", - TRACING_APM_NAMES_EXCLUDE_SETTING, - Function.identity(), OperatorDynamic, NodeScope ); - /** - * @deprecated in favor of TELEMETRY_TRACING_SANITIZE_FIELD_NAMES. - */ - @Deprecated - public static final Setting> TRACING_APM_SANITIZE_FIELD_NAMES = Setting.stringListSetting( - LEGACY_TRACING_APM_SETTING_PREFIX + "sanitize_field_names", + public static final Setting> TELEMETRY_TRACING_SANITIZE_FIELD_NAMES = Setting.stringListSetting( + TELEMETRY_SETTING_PREFIX + "tracing.sanitize_field_names", List.of( "password", "passwd", @@ -324,33 +287,12 @@ private static Setting concreteAgentSetting(String namespace, String qua "set-cookie" ), OperatorDynamic, - NodeScope, - Deprecated - ); - - public static final Setting> TELEMETRY_TRACING_SANITIZE_FIELD_NAMES = Setting.listSetting( - TELEMETRY_SETTING_PREFIX + "tracing.sanitize_field_names", - TRACING_APM_SANITIZE_FIELD_NAMES, - Function.identity(), - OperatorDynamic, NodeScope ); - /** - * @deprecated in favor of TELEMETRY_TRACING_ENABLED_SETTING. - */ - @Deprecated - public static final Setting TRACING_APM_ENABLED_SETTING = Setting.boolSetting( - LEGACY_TRACING_APM_SETTING_PREFIX + "enabled", - false, - OperatorDynamic, - NodeScope, - Deprecated - ); - public static final Setting TELEMETRY_TRACING_ENABLED_SETTING = Setting.boolSetting( TELEMETRY_SETTING_PREFIX + "tracing.enabled", - TRACING_APM_ENABLED_SETTING, + false, OperatorDynamic, NodeScope ); @@ -362,33 +304,13 @@ private static Setting concreteAgentSetting(String namespace, String qua NodeScope ); - /** - * @deprecated in favor of TELEMETRY_SECRET_TOKEN_SETTING. - */ - @Deprecated - public static final Setting TRACING_APM_SECRET_TOKEN_SETTING = SecureSetting.secureString( - LEGACY_TRACING_APM_SETTING_PREFIX + "secret_token", - null, - Deprecated - ); - public static final Setting TELEMETRY_SECRET_TOKEN_SETTING = SecureSetting.secureString( TELEMETRY_SETTING_PREFIX + "secret_token", - TRACING_APM_SECRET_TOKEN_SETTING - ); - - /** - * @deprecated in favor of TELEMETRY_API_KEY_SETTING. - */ - @Deprecated - public static final Setting TRACING_APM_API_KEY_SETTING = SecureSetting.secureString( - LEGACY_TRACING_APM_SETTING_PREFIX + "api_key", - null, - Deprecated + null ); public static final Setting TELEMETRY_API_KEY_SETTING = SecureSetting.secureString( TELEMETRY_SETTING_PREFIX + "api_key", - TRACING_APM_API_KEY_SETTING + null ); } diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java index a60048c82a3c9..5516672420924 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java @@ -11,8 +11,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.MockSecureSettings; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.mockito.Mockito; @@ -21,21 +19,13 @@ import java.util.Set; import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.APM_AGENT_SETTINGS; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_API_KEY_SETTING; import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_SECRET_TOKEN_SETTING; import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_TRACING_ENABLED_SETTING; import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING; import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_TRACING_NAMES_INCLUDE_SETTING; import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_TRACING_SANITIZE_FIELD_NAMES; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_API_KEY_SETTING; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_ENABLED_SETTING; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_NAMES_EXCLUDE_SETTING; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_NAMES_INCLUDE_SETTING; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_SANITIZE_FIELD_NAMES; -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TRACING_APM_SECRET_TOKEN_SETTING; -import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasItem; import static org.mockito.Mockito.clearInvocations; import static org.mockito.Mockito.mock; @@ -70,14 +60,6 @@ public void testEnableTracing() { } } - public void testEnableTracingUsingLegacySetting() { - Settings settings = Settings.builder().put(TRACING_APM_ENABLED_SETTING.getKey(), true).build(); - apmAgentSettings.initAgentSystemProperties(settings); - - verify(apmAgentSettings).setAgentSetting("recording", "true"); - assertWarnings("[tracing.apm.enabled] setting was deprecated in Elasticsearch and will be removed in a future release."); - } - public void testEnableMetrics() { for (boolean tracingEnabled : List.of(true, false)) { clearInvocations(apmAgentSettings, apmTelemetryProvider.getMeterService()); @@ -121,14 +103,6 @@ public void testDisableTracing() { } } - public void testDisableTracingUsingLegacySetting() { - Settings settings = Settings.builder().put(TRACING_APM_ENABLED_SETTING.getKey(), false).build(); - apmAgentSettings.initAgentSystemProperties(settings); - - verify(apmAgentSettings).setAgentSetting("recording", "false"); - assertWarnings("[tracing.apm.enabled] setting was deprecated in Elasticsearch and will be removed in a future release."); - } - public void testDisableMetrics() { for (boolean tracingEnabled : List.of(true, false)) { clearInvocations(apmAgentSettings, apmTelemetryProvider.getMeterService()); @@ -181,70 +155,18 @@ public void testSetAgentSettings() { verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true"); } - public void testSetAgentsSettingsWithLegacyPrefix() { - Settings settings = Settings.builder() - .put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true) - .put("tracing.apm.agent.span_compression_enabled", "true") - .build(); - apmAgentSettings.initAgentSystemProperties(settings); - - verify(apmAgentSettings).setAgentSetting("recording", "true"); - verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true"); - assertWarnings( - "[tracing.apm.agent.span_compression_enabled] setting was deprecated in Elasticsearch and will be removed in a future release." - ); - } - /** * Check that invalid or forbidden APM agent settings are rejected. */ public void testRejectForbiddenOrUnknownAgentSettings() { - List prefixes = List.of(APM_AGENT_SETTINGS.getKey(), "tracing.apm.agent."); - for (String prefix : prefixes) { - Settings settings = Settings.builder().put(prefix + "unknown", "true").build(); - Exception exception = expectThrows(IllegalArgumentException.class, () -> APM_AGENT_SETTINGS.getAsMap(settings)); - assertThat(exception.getMessage(), containsString("[" + prefix + "unknown]")); - } - // though, accept / ignore nested global_labels - for (String prefix : prefixes) { - Settings settings = Settings.builder().put(prefix + "global_labels.abc", "123").build(); - APMAgentSettings.APM_AGENT_SETTINGS.getAsMap(settings); - - if (prefix.startsWith("tracing.apm.agent.")) { - assertWarnings( - "[tracing.apm.agent.global_labels.abc] setting was deprecated in Elasticsearch and will be removed in a future release." - ); - } - } - } - - public void testTelemetryTracingNamesIncludeFallback() { - Settings settings = Settings.builder().put(TRACING_APM_NAMES_INCLUDE_SETTING.getKey(), "abc,xyz").build(); - - List included = TELEMETRY_TRACING_NAMES_INCLUDE_SETTING.get(settings); - - assertThat(included, containsInAnyOrder("abc", "xyz")); - assertWarnings("[tracing.apm.names.include] setting was deprecated in Elasticsearch and will be removed in a future release."); - } - - public void testTelemetryTracingNamesExcludeFallback() { - Settings settings = Settings.builder().put(TRACING_APM_NAMES_EXCLUDE_SETTING.getKey(), "abc,xyz").build(); - - List included = TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING.get(settings); - - assertThat(included, containsInAnyOrder("abc", "xyz")); - assertWarnings("[tracing.apm.names.exclude] setting was deprecated in Elasticsearch and will be removed in a future release."); - } - - public void testTelemetryTracingSanitizeFieldNamesFallback() { - Settings settings = Settings.builder().put(TRACING_APM_SANITIZE_FIELD_NAMES.getKey(), "abc,xyz").build(); - - List included = TELEMETRY_TRACING_SANITIZE_FIELD_NAMES.get(settings); + String prefix = APM_AGENT_SETTINGS.getKey(); + Settings settings = Settings.builder().put(prefix + "unknown", "true").build(); + Exception exception = expectThrows(IllegalArgumentException.class, () -> APM_AGENT_SETTINGS.getAsMap(settings)); + assertThat(exception.getMessage(), containsString("[" + prefix + "unknown]")); - assertThat(included, containsInAnyOrder("abc", "xyz")); - assertWarnings( - "[tracing.apm.sanitize_field_names] setting was deprecated in Elasticsearch and will be removed in a future release." - ); + // though, accept / ignore nested global_labels + var map = APMAgentSettings.APM_AGENT_SETTINGS.getAsMap(Settings.builder().put(prefix + "global_labels.abc", "123").build()); + assertThat(map, hasEntry("global_labels.abc", "123")); } public void testTelemetryTracingSanitizeFieldNamesFallbackDefault() { @@ -252,28 +174,6 @@ public void testTelemetryTracingSanitizeFieldNamesFallbackDefault() { assertThat(included, hasItem("password")); // and more defaults } - public void testTelemetrySecretTokenFallback() { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(TRACING_APM_SECRET_TOKEN_SETTING.getKey(), "verysecret"); - Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); - - try (SecureString secureString = TELEMETRY_SECRET_TOKEN_SETTING.get(settings)) { - assertEquals("verysecret", secureString.toString()); - } - assertWarnings("[tracing.apm.secret_token] setting was deprecated in Elasticsearch and will be removed in a future release."); - } - - public void testTelemetryApiKeyFallback() { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(TRACING_APM_API_KEY_SETTING.getKey(), "abc"); - Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); - - try (SecureString secureString = TELEMETRY_API_KEY_SETTING.get(settings)) { - assertEquals("abc", secureString.toString()); - } - assertWarnings("[tracing.apm.api_key] setting was deprecated in Elasticsearch and will be removed in a future release."); - } - /** * Check that invalid or forbidden APM agent settings are rejected if their last part resembles an allowed setting. */ From 3534ded9370af8887602eef1af189407cecdc3f7 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 27 Jan 2025 10:08:09 -0500 Subject: [PATCH 064/383] [ML] Adding dynamic filtering for EIS configuration (#120235) * Functionality for filtering task types based on acl info for EIS * Fixing compile and test errors * updating with chat_completion * Adding acl call * [CI] Auto commit changes from spotless * working run * Starting to rename * [CI] Auto commit changes from spotless * Writing some tests * rename authorizations endpoint and response fields * Fixing enabled task types bug * Adding timed listener tests * Fixing some test failures * Adding more tests and a mock gateway * Switch sparse embedding name from gateway * Adding supported streaming tasks tests * Trying to fix the javadoc * Removing commented code * Still fixing javadoc * Lets try a break this time * add AuthHandler, AuthRequest, and AuthResponseEntity tests * [CI] Auto commit changes from spotless * Adding tests * Speeding up test * Adding atomic ref * Refactoring * Addressing feedback * Forgot a fix * Removing todo --------- Co-authored-by: elasticsearchmachine Co-authored-by: Brendan Jugan --- .../inference/InferenceService.java | 4 +- .../InferenceServiceConfiguration.java | 25 +- .../InferenceServiceConfigurationTests.java | 41 +++ .../qa/inference-service-tests/build.gradle | 2 + .../inference/CreateFromDeploymentIT.java | 10 +- .../inference/InferenceBaseRestTest.java | 47 +-- .../xpack/inference/InferenceCrudIT.java | 176 ---------- .../inference/InferenceGetServicesIT.java | 269 ++++++++++++++++ ...icInferenceServiceAuthorizationServer.java | 83 +++++ .../xpack/inference/RetryRule.java | 81 +++++ .../xpack/inference/TextEmbeddingCrudIT.java | 4 +- .../xpack/inference/InferencePlugin.java | 14 +- .../AmazonBedrockRequestSender.java | 14 + .../http/retry/RetryingHttpSender.java | 2 +- .../http/sender/HttpRequestSender.java | 33 ++ .../external/http/sender/RequestTask.java | 49 +-- .../external/http/sender/Sender.java | 11 + .../external/http/sender/TimedListener.java | 74 +++++ ...cInferenceServiceAuthorizationRequest.java | 79 +++++ ...nceServiceAuthorizationResponseEntity.java | 179 +++++++++++ .../elastic/ElasticInferenceService.java | 135 ++++++-- .../ElasticInferenceServiceComponents.java | 4 +- .../ElasticInferenceServiceFeature.java | 1 + .../ElasticInferenceServiceSettings.java | 1 + .../ElasticInferenceServiceAuthorization.java | 74 +++++ ...cInferenceServiceAuthorizationHandler.java | 137 ++++++++ .../elser/HuggingFaceElserService.java | 4 +- .../telemetry/TraceContextHandler.java | 8 +- .../AmazonBedrockMockRequestSender.java | 14 + .../http/sender/HttpRequestSenderTests.java | 92 +++++- .../sender/RequestExecutorServiceTests.java | 5 +- .../http/sender/RequestTaskTests.java | 15 +- .../http/sender/TimedListenerTests.java | 152 +++++++++ ...renceServiceAuthorizationRequestTests.java | 39 +++ ...rviceAuthorizationResponseEntityTests.java | 64 ++++ .../elastic/ElasticInferenceServiceTests.java | 304 +++++++++++++++--- ...renceServiceAuthorizationHandlerTests.java | 265 +++++++++++++++ ...ticInferenceServiceAuthorizationTests.java | 61 ++++ 38 files changed, 2198 insertions(+), 374 deletions(-) create mode 100644 x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java create mode 100644 x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockElasticInferenceServiceAuthorizationServer.java create mode 100644 x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/RetryRule.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/TimedListener.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceAuthorizationRequest.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceAuthorizationResponseEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorization.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandler.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/TimedListenerTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceAuthorizationRequestTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceAuthorizationResponseEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandlerTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationTests.java diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index c8ed9e6b230ce..bea7ee52ecfb0 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -78,8 +78,8 @@ default void init(Client client) {} * Whether this service should be hidden from the API. Should be used for services * that are not ready to be used. */ - default Boolean hideFromConfigurationApi() { - return Boolean.FALSE; + default boolean hideFromConfigurationApi() { + return false; } /** diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceConfiguration.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceConfiguration.java index 5004186d03848..e82c1f8442207 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceServiceConfiguration.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceServiceConfiguration.java @@ -24,13 +24,11 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; -import java.util.ArrayList; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.stream.Collectors; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; @@ -80,14 +78,11 @@ public InferenceServiceConfiguration(StreamInput in) throws IOException { private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "inference_service_configuration", true, - args -> { - List taskTypes = (ArrayList) args[2]; - return new InferenceServiceConfiguration.Builder().setService((String) args[0]) - .setName((String) args[1]) - .setTaskTypes(EnumSet.copyOf(taskTypes.stream().map(TaskType::fromString).collect(Collectors.toList()))) - .setConfigurations((Map) args[3]) - .build(); - } + args -> new InferenceServiceConfiguration.Builder().setService((String) args[0]) + .setName((String) args[1]) + .setTaskTypes((List) args[2]) + .setConfigurations((Map) args[3]) + .build() ); static { @@ -195,6 +190,16 @@ public Builder setTaskTypes(EnumSet taskTypes) { return this; } + public Builder setTaskTypes(List taskTypes) { + var enumTaskTypes = EnumSet.noneOf(TaskType.class); + + for (var supportedTaskTypeString : taskTypes) { + enumTaskTypes.add(TaskType.fromStringOrStatusException(supportedTaskTypeString)); + } + this.taskTypes = enumTaskTypes; + return this; + } + public Builder setConfigurations(Map configurations) { this.configurations = configurations; return this; diff --git a/server/src/test/java/org/elasticsearch/inference/InferenceServiceConfigurationTests.java b/server/src/test/java/org/elasticsearch/inference/InferenceServiceConfigurationTests.java index 490ed68ab3e66..e4dfd43d43fbe 100644 --- a/server/src/test/java/org/elasticsearch/inference/InferenceServiceConfigurationTests.java +++ b/server/src/test/java/org/elasticsearch/inference/InferenceServiceConfigurationTests.java @@ -66,6 +66,47 @@ public void testToXContent() throws IOException { assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); } + public void testToXContent_EmptyTaskTypes() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "service": "some_provider", + "name": "Some Provider", + "task_types": [], + "configurations": { + "text_field_configuration": { + "description": "Wow, this tooltip is useful.", + "label": "Very important field", + "required": true, + "sensitive": true, + "updatable": false, + "type": "str" + }, + "numeric_field_configuration": { + "default_value": 3, + "description": "Wow, this tooltip is useful.", + "label": "Very important numeric field", + "required": true, + "sensitive": false, + "updatable": true, + "type": "int" + } + } + } + """); + + InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes( + new BytesArray(content), + XContentType.JSON + ); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + InferenceServiceConfiguration parsed; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { + parsed = InferenceServiceConfiguration.fromXContent(parser); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); + } + public void testToMap() { InferenceServiceConfiguration configField = InferenceServiceConfigurationTestUtils.getRandomServiceConfigurationField(); Map configFieldAsMap = configField.toMap(); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/build.gradle b/x-pack/plugin/inference/qa/inference-service-tests/build.gradle index 70149ec71e0e5..052cb8d7184f7 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/build.gradle +++ b/x-pack/plugin/inference/qa/inference-service-tests/build.gradle @@ -4,6 +4,8 @@ dependencies { javaRestTestImplementation project(path: xpackModule('core')) javaRestTestImplementation project(path: xpackModule('inference')) clusterPlugins project(':x-pack:plugin:inference:qa:test-service-plugin') + // Added this to have access to MockWebServer within the tests + javaRestTestImplementation(testArtifact(project(xpackModule('core')))) } tasks.named("javaRestTest").configure { diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java index 273b16d295a3d..0a2200ff912ac 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java @@ -29,7 +29,7 @@ public void testAttachToDeployment() throws IOException { CustomElandModelIT.createMlNodeTextExpansionModel(modelId, client()); var response = startMlNodeDeploymemnt(modelId, deploymentId); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); var inferenceId = "inference_on_existing_deployment"; var putModel = putModel(inferenceId, endpointConfig(deploymentId), TaskType.SPARSE_EMBEDDING); @@ -58,7 +58,7 @@ public void testAttachWithModelId() throws IOException { CustomElandModelIT.createMlNodeTextExpansionModel(modelId, client()); var response = startMlNodeDeploymemnt(modelId, deploymentId); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); var inferenceId = "inference_on_existing_deployment"; var putModel = putModel(inferenceId, endpointConfig(modelId, deploymentId), TaskType.SPARSE_EMBEDDING); @@ -93,7 +93,7 @@ public void testModelIdDoesNotMatch() throws IOException { CustomElandModelIT.createMlNodeTextExpansionModel(modelId, client()); var response = startMlNodeDeploymemnt(modelId, deploymentId); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); var inferenceId = "inference_on_existing_deployment"; var e = expectThrows( @@ -123,7 +123,7 @@ public void testNumAllocationsIsUpdated() throws IOException { CustomElandModelIT.createMlNodeTextExpansionModel(modelId, client()); var response = startMlNodeDeploymemnt(modelId, deploymentId); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); var inferenceId = "test_num_allocations_updated"; var putModel = putModel(inferenceId, endpointConfig(deploymentId), TaskType.SPARSE_EMBEDDING); @@ -145,7 +145,7 @@ public void testNumAllocationsIsUpdated() throws IOException { ) ); - assertOkOrCreated(updateMlNodeDeploymemnt(deploymentId, 2)); + assertStatusOkOrCreated(updateMlNodeDeploymemnt(deploymentId, 2)); var updatedServiceSettings = getModel(inferenceId).get("service_settings"); assertThat( diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index ccf1ccc3da23f..49b2f5b041b9e 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -52,7 +52,6 @@ public class InferenceBaseRestTest extends ESRestTestCase { .user("x_pack_rest_user", "x-pack-test-password") .feature(FeatureFlag.INFERENCE_UNIFIED_API_ENABLED) .build(); - @ClassRule public static MlModelServer mlModelServer = new MlModelServer(); @@ -175,20 +174,20 @@ static String mockDenseServiceModelConfig() { protected void deleteModel(String modelId) throws IOException { var request = new Request("DELETE", "_inference/" + modelId); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); } protected Response deleteModel(String modelId, String queryParams) throws IOException { var request = new Request("DELETE", "_inference/" + modelId + "?" + queryParams); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); return response; } protected void deleteModel(String modelId, TaskType taskType) throws IOException { var request = new Request("DELETE", Strings.format("_inference/%s/%s", taskType, modelId)); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); } protected void putSemanticText(String endpointId, String indexName) throws IOException { @@ -207,7 +206,7 @@ protected void putSemanticText(String endpointId, String indexName) throws IOExc """, endpointId); request.setJsonEntity(body); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); } protected void putSemanticText(String endpointId, String searchEndpointId, String indexName) throws IOException { @@ -227,7 +226,7 @@ protected void putSemanticText(String endpointId, String searchEndpointId, Strin """, endpointId, searchEndpointId); request.setJsonEntity(body); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); } protected Map putModel(String modelId, String modelConfig, TaskType taskType) throws IOException { @@ -260,7 +259,7 @@ protected Map putPipeline(String pipelineId, String modelId) thr protected void deletePipeline(String pipelineId) throws IOException { var request = new Request("DELETE", Strings.format("_ingest/pipeline/%s", pipelineId)); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); } /** @@ -275,7 +274,7 @@ Map putRequest(String endpoint, String body) throws IOException var request = new Request("PUT", endpoint); request.setJsonEntity(body); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); return entityAsMap(response); } @@ -283,7 +282,7 @@ Map postRequest(String endpoint, String body) throws IOException var request = new Request("POST", endpoint); request.setJsonEntity(body); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); return entityAsMap(response); } @@ -300,7 +299,7 @@ protected Map putE5TrainedModels() throws IOException { request.setJsonEntity(body); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); return entityAsMap(response); } @@ -308,7 +307,7 @@ protected Map deployE5TrainedModels() throws IOException { var request = new Request("POST", "_ml/trained_models/.multilingual-e5-small/deployment/_start?wait_for=fully_allocated"); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); return entityAsMap(response); } @@ -330,31 +329,13 @@ protected List> getAllModels() throws IOException { return (List>) getInternalAsMap("_inference/_all").get("endpoints"); } - protected List getAllServices() throws IOException { - var endpoint = Strings.format("_inference/_services"); - return getInternalAsList(endpoint); - } - - @SuppressWarnings("unchecked") - protected List getServices(TaskType taskType) throws IOException { - var endpoint = Strings.format("_inference/_services/%s", taskType); - return getInternalAsList(endpoint); - } - private Map getInternalAsMap(String endpoint) throws IOException { var request = new Request("GET", endpoint); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); return entityAsMap(response); } - private List getInternalAsList(String endpoint) throws IOException { - var request = new Request("GET", endpoint); - var response = client().performRequest(request); - assertOkOrCreated(response); - return entityAsList(response); - } - protected Map infer(String modelId, List input) throws IOException { var endpoint = Strings.format("_inference/%s", modelId); return inferInternal(endpoint, input, null, Map.of()); @@ -475,7 +456,7 @@ private Map inferInternal( ) throws IOException { var request = createInferenceRequest(endpoint, input, query, queryParameters); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); return entityAsMap(response); } @@ -511,7 +492,7 @@ protected void assertNonEmptyInferenceResults(Map resultMap, int } } - protected static void assertOkOrCreated(Response response) throws IOException { + static void assertStatusOkOrCreated(Response response) throws IOException { int statusCode = response.getStatusLine().getStatusCode(); // Once EntityUtils.toString(entity) is called the entity cannot be reused. // Avoid that call with check here. @@ -527,7 +508,7 @@ protected Map getTrainedModel(String inferenceEntityId) throws I var endpoint = Strings.format("_ml/trained_models/%s/_stats", inferenceEntityId); var request = new Request("GET", endpoint); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); return entityAsMap(response); } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 610fafb8390da..b786cd1298495 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -18,11 +18,8 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Locale; @@ -145,179 +142,6 @@ public void testApisWithoutTaskType() throws IOException { deleteModel(modelId); } - @SuppressWarnings("unchecked") - public void testGetServicesWithoutTaskType() throws IOException { - List services = getAllServices(); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - assertThat(services.size(), equalTo(19)); - } else { - assertThat(services.size(), equalTo(18)); - } - - String[] providers = new String[services.size()]; - for (int i = 0; i < services.size(); i++) { - Map serviceConfig = (Map) services.get(i); - providers[i] = (String) serviceConfig.get("service"); - } - - var providerList = new ArrayList<>( - Arrays.asList( - "alibabacloud-ai-search", - "amazonbedrock", - "anthropic", - "azureaistudio", - "azureopenai", - "cohere", - "elasticsearch", - "googleaistudio", - "googlevertexai", - "hugging_face", - "jinaai", - "mistral", - "openai", - "streaming_completion_test_service", - "test_reranking_service", - "test_service", - "text_embedding_test_service", - "watsonxai" - ) - ); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - providerList.add(6, "elastic"); - } - assertArrayEquals(providerList.toArray(), providers); - } - - @SuppressWarnings("unchecked") - public void testGetServicesWithTextEmbeddingTaskType() throws IOException { - List services = getServices(TaskType.TEXT_EMBEDDING); - assertThat(services.size(), equalTo(14)); - - String[] providers = new String[services.size()]; - for (int i = 0; i < services.size(); i++) { - Map serviceConfig = (Map) services.get(i); - providers[i] = (String) serviceConfig.get("service"); - } - - assertArrayEquals( - List.of( - "alibabacloud-ai-search", - "amazonbedrock", - "azureaistudio", - "azureopenai", - "cohere", - "elasticsearch", - "googleaistudio", - "googlevertexai", - "hugging_face", - "jinaai", - "mistral", - "openai", - "text_embedding_test_service", - "watsonxai" - ).toArray(), - providers - ); - } - - @SuppressWarnings("unchecked") - public void testGetServicesWithRerankTaskType() throws IOException { - List services = getServices(TaskType.RERANK); - assertThat(services.size(), equalTo(6)); - - String[] providers = new String[services.size()]; - for (int i = 0; i < services.size(); i++) { - Map serviceConfig = (Map) services.get(i); - providers[i] = (String) serviceConfig.get("service"); - } - - assertArrayEquals( - List.of("alibabacloud-ai-search", "cohere", "elasticsearch", "googlevertexai", "jinaai", "test_reranking_service").toArray(), - providers - ); - } - - @SuppressWarnings("unchecked") - public void testGetServicesWithCompletionTaskType() throws IOException { - List services = getServices(TaskType.COMPLETION); - assertThat(services.size(), equalTo(9)); - - String[] providers = new String[services.size()]; - for (int i = 0; i < services.size(); i++) { - Map serviceConfig = (Map) services.get(i); - providers[i] = (String) serviceConfig.get("service"); - } - - var providerList = new ArrayList<>( - List.of( - "alibabacloud-ai-search", - "amazonbedrock", - "anthropic", - "azureaistudio", - "azureopenai", - "cohere", - "googleaistudio", - "openai", - "streaming_completion_test_service" - ) - ); - - assertArrayEquals(providers, providerList.toArray()); - } - - @SuppressWarnings("unchecked") - public void testGetServicesWithChatCompletionTaskType() throws IOException { - List services = getServices(TaskType.CHAT_COMPLETION); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - assertThat(services.size(), equalTo(3)); - } else { - assertThat(services.size(), equalTo(2)); - } - - String[] providers = new String[services.size()]; - for (int i = 0; i < services.size(); i++) { - Map serviceConfig = (Map) services.get(i); - providers[i] = (String) serviceConfig.get("service"); - } - - var providerList = new ArrayList<>(List.of("openai", "streaming_completion_test_service")); - - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - providerList.addFirst("elastic"); - } - - assertArrayEquals(providers, providerList.toArray()); - } - - @SuppressWarnings("unchecked") - public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { - List services = getServices(TaskType.SPARSE_EMBEDDING); - - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - assertThat(services.size(), equalTo(5)); - } else { - assertThat(services.size(), equalTo(4)); - } - - String[] providers = new String[services.size()]; - for (int i = 0; i < services.size(); i++) { - Map serviceConfig = (Map) services.get(i); - providers[i] = (String) serviceConfig.get("service"); - } - - var providerList = new ArrayList<>(Arrays.asList("alibabacloud-ai-search", "elasticsearch", "hugging_face", "test_service")); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - providerList.add(1, "elastic"); - } - assertArrayEquals(providers, providerList.toArray()); - } - public void testSkipValidationAndStart() throws IOException { String openAiConfigWithBadApiKey = """ { diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java new file mode 100644 index 0000000000000..b448acd5f4a74 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java @@ -0,0 +1,269 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file has been contributed to by a Generative AI + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.client.Request; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.InferenceBaseRestTest.assertStatusOkOrCreated; +import static org.hamcrest.Matchers.equalTo; + +public class InferenceGetServicesIT extends ESRestTestCase { + + // The reason we're retrying is there's a race condition between the node retrieving the + // authorization response and running the test. Retrieving the authorization should be very fast since + // we're hosting a local mock server but it's possible it could respond slower. So in the even of a test failure + // we'll automatically retry after waiting a second. + @Rule + public RetryRule retry = new RetryRule(3, TimeValue.timeValueSeconds(1)); + + private static final MockElasticInferenceServiceAuthorizationServer mockEISServer = MockElasticInferenceServiceAuthorizationServer + .enabledWithSparseEmbeddingsAndChatCompletion(); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "true") + // Adding both settings unless one feature flag is disabled in a particular environment + .setting("xpack.inference.elastic.url", mockEISServer::getUrl) + // TODO remove this once we've removed DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG and EIS_GATEWAY_URL + .setting("xpack.inference.eis.gateway.url", mockEISServer::getUrl) + // This plugin is located in the inference/qa/test-service-plugin package, look for TestInferenceServicePlugin + .plugin("inference-service-test") + .user("x_pack_rest_user", "x-pack-test-password") + .feature(FeatureFlag.INFERENCE_UNIFIED_API_ENABLED) + .build(); + + // The reason we're doing this is to make sure the mock server is initialized first so we can get the address before communicating + // it to the cluster as a setting. + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(mockEISServer).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + @SuppressWarnings("unchecked") + public void testGetServicesWithoutTaskType() throws IOException { + List services = getAllServices(); + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { + assertThat(services.size(), equalTo(19)); + } else { + assertThat(services.size(), equalTo(18)); + } + + String[] providers = new String[services.size()]; + for (int i = 0; i < services.size(); i++) { + Map serviceConfig = (Map) services.get(i); + providers[i] = (String) serviceConfig.get("service"); + } + + var providerList = new ArrayList<>( + Arrays.asList( + "alibabacloud-ai-search", + "amazonbedrock", + "anthropic", + "azureaistudio", + "azureopenai", + "cohere", + "elasticsearch", + "googleaistudio", + "googlevertexai", + "hugging_face", + "jinaai", + "mistral", + "openai", + "streaming_completion_test_service", + "test_reranking_service", + "test_service", + "text_embedding_test_service", + "watsonxai" + ) + ); + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { + providerList.add(6, "elastic"); + } + assertArrayEquals(providerList.toArray(), providers); + } + + @SuppressWarnings("unchecked") + public void testGetServicesWithTextEmbeddingTaskType() throws IOException { + List services = getServices(TaskType.TEXT_EMBEDDING); + assertThat(services.size(), equalTo(14)); + + String[] providers = new String[services.size()]; + for (int i = 0; i < services.size(); i++) { + Map serviceConfig = (Map) services.get(i); + providers[i] = (String) serviceConfig.get("service"); + } + + assertArrayEquals( + List.of( + "alibabacloud-ai-search", + "amazonbedrock", + "azureaistudio", + "azureopenai", + "cohere", + "elasticsearch", + "googleaistudio", + "googlevertexai", + "hugging_face", + "jinaai", + "mistral", + "openai", + "text_embedding_test_service", + "watsonxai" + ).toArray(), + providers + ); + } + + @SuppressWarnings("unchecked") + public void testGetServicesWithRerankTaskType() throws IOException { + List services = getServices(TaskType.RERANK); + assertThat(services.size(), equalTo(6)); + + String[] providers = new String[services.size()]; + for (int i = 0; i < services.size(); i++) { + Map serviceConfig = (Map) services.get(i); + providers[i] = (String) serviceConfig.get("service"); + } + + assertArrayEquals( + List.of("alibabacloud-ai-search", "cohere", "elasticsearch", "googlevertexai", "jinaai", "test_reranking_service").toArray(), + providers + ); + } + + @SuppressWarnings("unchecked") + public void testGetServicesWithCompletionTaskType() throws IOException { + List services = getServices(TaskType.COMPLETION); + assertThat(services.size(), equalTo(9)); + + String[] providers = new String[services.size()]; + for (int i = 0; i < services.size(); i++) { + Map serviceConfig = (Map) services.get(i); + providers[i] = (String) serviceConfig.get("service"); + } + + var providerList = new ArrayList<>( + List.of( + "alibabacloud-ai-search", + "amazonbedrock", + "anthropic", + "azureaistudio", + "azureopenai", + "cohere", + "googleaistudio", + "openai", + "streaming_completion_test_service" + ) + ); + + assertArrayEquals(providers, providerList.toArray()); + } + + @SuppressWarnings("unchecked") + public void testGetServicesWithChatCompletionTaskType() throws IOException { + List services = getServices(TaskType.CHAT_COMPLETION); + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { + assertThat(services.size(), equalTo(3)); + } else { + assertThat(services.size(), equalTo(2)); + } + + String[] providers = new String[services.size()]; + for (int i = 0; i < services.size(); i++) { + Map serviceConfig = (Map) services.get(i); + providers[i] = (String) serviceConfig.get("service"); + } + + var providerList = new ArrayList<>(List.of("openai", "streaming_completion_test_service")); + + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { + providerList.addFirst("elastic"); + } + + assertArrayEquals(providers, providerList.toArray()); + } + + @SuppressWarnings("unchecked") + public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { + List services = getServices(TaskType.SPARSE_EMBEDDING); + + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { + assertThat(services.size(), equalTo(5)); + } else { + assertThat(services.size(), equalTo(4)); + } + + String[] providers = new String[services.size()]; + for (int i = 0; i < services.size(); i++) { + Map serviceConfig = (Map) services.get(i); + providers[i] = (String) serviceConfig.get("service"); + } + + var providerList = new ArrayList<>(Arrays.asList("alibabacloud-ai-search", "elasticsearch", "hugging_face", "test_service")); + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { + providerList.add(1, "elastic"); + } + assertArrayEquals(providers, providerList.toArray()); + } + + private List getAllServices() throws IOException { + var endpoint = Strings.format("_inference/_services"); + return getInternalAsList(endpoint); + } + + private List getServices(TaskType taskType) throws IOException { + var endpoint = Strings.format("_inference/_services/%s", taskType); + return getInternalAsList(endpoint); + } + + private List getInternalAsList(String endpoint) throws IOException { + var request = new Request("GET", endpoint); + var response = client().performRequest(request); + assertStatusOkOrCreated(response); + return entityAsList(response); + } +} diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockElasticInferenceServiceAuthorizationServer.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockElasticInferenceServiceAuthorizationServer.java new file mode 100644 index 0000000000000..8960a7e1b0258 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockElasticInferenceServiceAuthorizationServer.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.Strings; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; + +import static org.elasticsearch.core.Strings.format; + +public class MockElasticInferenceServiceAuthorizationServer implements TestRule { + + private static final Logger logger = LogManager.getLogger(MockElasticInferenceServiceAuthorizationServer.class); + private final MockWebServer webServer = new MockWebServer(); + + public static MockElasticInferenceServiceAuthorizationServer enabledWithSparseEmbeddingsAndChatCompletion() { + var server = new MockElasticInferenceServiceAuthorizationServer(); + + String responseJson = """ + { + "models": [ + { + "model_name": "model-a", + "task_types": ["embed/text/sparse", "chat"] + } + ] + } + """; + + server.webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + return server; + } + + public static MockElasticInferenceServiceAuthorizationServer disabled() { + var server = new MockElasticInferenceServiceAuthorizationServer(); + + String responseJson = """ + { + "models": [] + } + """; + + server.webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + return server; + } + + public String getUrl() { + return format("http://%s:%s", webServer.getHostName(), webServer.getPort()); + } + + @Override + public Statement apply(Statement statement, Description description) { + return new Statement() { + @Override + public void evaluate() throws Throwable { + try { + logger.info("Starting mock EIS gateway"); + webServer.start(); + logger.info(Strings.format("Started mock EIS gateway with address: %s", getUrl())); + } catch (Exception e) { + logger.warn("Failed to start mock EIS gateway", e); + } + + try { + statement.evaluate(); + } finally { + logger.info(Strings.format("Stopping mock EIS gateway address: %s", getUrl())); + webServer.close(); + } + } + }; + } +} diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/RetryRule.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/RetryRule.java new file mode 100644 index 0000000000000..d8398718ef117 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/RetryRule.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.TimeValue; +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; + +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +/** + * Provides a way to retry a failed test. To use this functionality add something like the following to your test class: + *
+ * {@literal @}Rule + *
+ * public RetryRule retry = new RetryRule(3, TimeValue.timeValueSeconds(1)); + *
+ * See {@link InferenceGetServicesIT#retry} for an example. + */ +public class RetryRule implements TestRule { + private static final Logger logger = LogManager.getLogger(RetryRule.class); + private final int maxAttempts; + private final TimeValue retryDelay; + + public RetryRule(int maxAttempts, TimeValue retryDelay) { + this.maxAttempts = maxAttempts; + this.retryDelay = Objects.requireNonNull(retryDelay); + } + + @Override + public Statement apply(Statement statement, Description description) { + return new Statement() { + @Override + public void evaluate() throws Throwable { + Throwable lastThrowable = null; + for (int i = 0; i < maxAttempts; i++) { + try { + logger.info(Strings.format("Running test [%s] attempt [%d/%d]", description.getMethodName(), i + 1, maxAttempts)); + statement.evaluate(); + logger.info( + Strings.format("Test [%s] succeeded on attempt [%d/%d]", description.getMethodName(), i + 1, maxAttempts) + ); + // Test succeeded so we'll return + return; + } catch (Throwable t) { + logger.info( + Strings.format( + "Test [%s] failed with exception: %s, attempt [%d/%d]", + description.getMethodName(), + t.getMessage(), + i + 1, + maxAttempts + ) + ); + lastThrowable = t; + // if this was the last iteration then let's skip sleeping + if (i < maxAttempts - 1) { + TimeUnit.MICROSECONDS.sleep(retryDelay.millis()); + } + } + } + + // if the test failed we should have the throwable, so let's bubble up that failure + if (lastThrowable != null) { + logger.info(Strings.format("Test [%s] failed and exceeded retry limit, failing test.", description.getMethodName())); + throw lastThrowable; + } + } + }; + } +} diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java index d8c2d678d0ef9..865a1a61fa783 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java @@ -92,7 +92,7 @@ private Map deleteTextEmbeddingModel(String inferenceEntityId) t var endpoint = Strings.format("_inference/%s/%s", "text_embedding", inferenceEntityId); var request = new Request("DELETE", endpoint); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); return entityAsMap(response); } @@ -102,7 +102,7 @@ private Map putTextEmbeddingModel(String inferenceEntityId, Stri request.setJsonEntity(jsonEntity); var response = client().performRequest(request); - assertOkOrCreated(response); + assertStatusOkOrCreated(response); return entityAsMap(response); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 830eab8c42366..b007aa8bfa1f8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -113,6 +113,7 @@ import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceComponents; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings; +import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorizationHandler; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; import org.elasticsearch.xpack.inference.services.googleaistudio.GoogleAiStudioService; import org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiService; @@ -282,14 +283,23 @@ public Collection createComponents(PluginServices services) { ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); String elasticInferenceUrl = this.getElasticInferenceServiceUrl(inferenceServiceSettings); - elasticInferenceServiceComponents.set(new ElasticInferenceServiceComponents(elasticInferenceUrl)); + + var elasticInferenceServiceComponentsInstance = new ElasticInferenceServiceComponents(elasticInferenceUrl); + elasticInferenceServiceComponents.set(elasticInferenceServiceComponentsInstance); + + var authorizationHandler = new ElasticInferenceServiceAuthorizationHandler( + elasticInferenceServiceComponentsInstance.elasticInferenceServiceUrl(), + services.threadPool() + ); inferenceServices.add( () -> List.of( context -> new ElasticInferenceService( elasicInferenceServiceFactory.get(), serviceComponents.get(), - elasticInferenceServiceComponents.get() + elasticInferenceServiceComponentsInstance, + modelRegistry, + authorizationHandler ) ) ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java index a8d85d896d684..ec4550b036d23 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.external.amazonbedrock; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.service.ClusterService; @@ -14,12 +15,14 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; import org.elasticsearch.xpack.inference.external.http.sender.AmazonBedrockRequestExecutorService; import org.elasticsearch.xpack.inference.external.http.sender.AmazonBedrockRequestManager; import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.RequestExecutorServiceSettings; import org.elasticsearch.xpack.inference.external.http.sender.RequestManager; import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.services.ServiceComponents; import java.io.IOException; @@ -123,6 +126,17 @@ public void send( listener.onFailure(new ElasticsearchException("Amazon Bedrock request sender did not receive a valid request request manager")); } + @Override + public void sendWithoutQueuing( + Logger logger, + Request request, + ResponseHandler responseHandler, + TimeValue timeout, + ActionListener listener + ) { + throw new UnsupportedOperationException("not implemented"); + } + @Override public void close() throws IOException { executorService.shutdown(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java index 36208c1b8cdcd..1c303f6e965cf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java @@ -35,7 +35,7 @@ public class RetryingHttpSender implements RequestSender { - static final int MAX_RETIES = 3; + public static final int MAX_RETIES = 3; private final HttpClient httpClient; private final ThrottlerManager throttlerManager; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java index d1e309a774ab7..42671b8166537 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java @@ -7,7 +7,9 @@ package org.elasticsearch.xpack.inference.external.http.sender; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; @@ -17,8 +19,10 @@ import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.RequestExecutor; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; import org.elasticsearch.xpack.inference.external.http.retry.RetryingHttpSender; +import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.services.ServiceComponents; import java.io.IOException; @@ -74,6 +78,7 @@ public Sender createSender() { private final RequestExecutor service; private final AtomicBoolean started = new AtomicBoolean(false); private final CountDownLatch startCompleted = new CountDownLatch(1); + private final RequestSender requestSender; private HttpRequestSender( ThreadPool threadPool, @@ -84,6 +89,7 @@ private HttpRequestSender( ) { this.threadPool = Objects.requireNonNull(threadPool); this.manager = Objects.requireNonNull(httpClientManager); + this.requestSender = Objects.requireNonNull(requestSender); service = new RequestExecutorService( threadPool, startCompleted, @@ -141,4 +147,31 @@ public void send( waitForStartToComplete(); service.execute(requestCreator, inferenceInputs, timeout, listener); } + + /** + * This method sends a request and parses the response. It does not leverage any queuing or + * rate limiting logic. This method should only be used for requests that are not sent often. + * + * @param logger A logger to use for messages + * @param request A request to be sent + * @param responseHandler A handler for parsing the response + * @param timeout the maximum time the request should wait for a response before timing out. If null, the timeout is ignored + * @param listener a listener to handle the response + */ + public void sendWithoutQueuing( + Logger logger, + Request request, + ResponseHandler responseHandler, + @Nullable TimeValue timeout, + ActionListener listener + ) { + assert started.get() : "call start() before sending a request"; + waitForStartToComplete(); + + var preservedListener = ContextPreservingActionListener.wrapPreservingContext(listener, threadPool.getThreadContext()); + var timedListener = new TimedListener<>(timeout, preservedListener, threadPool); + + threadPool.executor(UTILITY_THREAD_POOL_NAME) + .execute(() -> requestSender.send(logger, request, timedListener::hasCompleted, responseHandler, timedListener.getListener())); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java index e5c29adeb9176..cba9bf73a9e99 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java @@ -7,28 +7,20 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.ListenerTimeouts; -import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import java.util.Objects; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; -import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; - class RequestTask implements RejectableTask { - private final AtomicBoolean finished = new AtomicBoolean(); private final RequestManager requestCreator; private final InferenceInputs inferenceInputs; - private final ActionListener listener; + private final TimedListener timedListener; RequestTask( RequestManager requestCreator, @@ -38,44 +30,13 @@ class RequestTask implements RejectableTask { ActionListener listener ) { this.requestCreator = Objects.requireNonNull(requestCreator); - this.listener = getListener(Objects.requireNonNull(listener), timeout, Objects.requireNonNull(threadPool)); + this.timedListener = new TimedListener<>(timeout, listener, threadPool); this.inferenceInputs = Objects.requireNonNull(inferenceInputs); } - private ActionListener getListener( - ActionListener origListener, - @Nullable TimeValue timeout, - ThreadPool threadPool - ) { - ActionListener notificationListener = ActionListener.wrap(result -> { - finished.set(true); - origListener.onResponse(result); - }, e -> { - finished.set(true); - origListener.onFailure(e); - }); - - if (timeout == null) { - return notificationListener; - } - - return ListenerTimeouts.wrapWithTimeout( - threadPool, - timeout, - threadPool.executor(UTILITY_THREAD_POOL_NAME), - notificationListener, - (ignored) -> notificationListener.onFailure( - new ElasticsearchStatusException( - Strings.format("Request timed out waiting to be sent after [%s]", timeout), - RestStatus.REQUEST_TIMEOUT - ) - ) - ); - } - @Override public boolean hasCompleted() { - return finished.get(); + return timedListener.hasCompleted(); } @Override @@ -90,12 +51,12 @@ public InferenceInputs getInferenceInputs() { @Override public ActionListener getListener() { - return listener; + return timedListener.getListener(); } @Override public void onRejection(Exception e) { - listener.onFailure(e); + timedListener.getListener().onFailure(e); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java index 5a3af3d4a377f..3975a554586b7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java @@ -7,10 +7,13 @@ package org.elasticsearch.xpack.inference.external.http.sender; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.request.Request; import java.io.Closeable; @@ -23,4 +26,12 @@ void send( @Nullable TimeValue timeout, ActionListener listener ); + + void sendWithoutQueuing( + Logger logger, + Request request, + ResponseHandler responseHandler, + @Nullable TimeValue timeout, + ActionListener listener + ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/TimedListener.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/TimedListener.java new file mode 100644 index 0000000000000..429fc357b9559 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/TimedListener.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ListenerTimeouts; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.Objects; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; + +/** + * Provides a way to set a timeout on the listener. If the time expires, the original listener's + * {@link ActionListener#onFailure(Exception)} is called with an error indicating there was a timeout. + * + * @param the type of the value that is passed in {@link ActionListener#onResponse(Object)} + */ +public class TimedListener { + + private final ActionListener listenerWithTimeout; + private final AtomicBoolean completed = new AtomicBoolean(); + + public TimedListener(@Nullable TimeValue timeout, ActionListener listener, ThreadPool threadPool) { + listenerWithTimeout = getListener(Objects.requireNonNull(listener), timeout, Objects.requireNonNull(threadPool)); + } + + private ActionListener getListener( + ActionListener origListener, + @Nullable TimeValue timeout, + ThreadPool threadPool + ) { + ActionListener notificationListener = ActionListener.wrap(result -> { + completed.set(true); + origListener.onResponse(result); + }, e -> { + completed.set(true); + origListener.onFailure(e); + }); + + if (timeout == null) { + return notificationListener; + } + + return ListenerTimeouts.wrapWithTimeout( + threadPool, + timeout, + threadPool.executor(UTILITY_THREAD_POOL_NAME), + notificationListener, + (ignored) -> notificationListener.onFailure( + new ElasticsearchStatusException(Strings.format("Request timed out after [%s]", timeout), RestStatus.REQUEST_TIMEOUT) + ) + ); + } + + public boolean hasCompleted() { + return completed.get(); + } + + public ActionListener getListener() { + return listenerWithTimeout; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceAuthorizationRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceAuthorizationRequest.java new file mode 100644 index 0000000000000..d46313755be00 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceAuthorizationRequest.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.elastic; + +import org.apache.http.client.methods.HttpGet; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService; +import org.elasticsearch.xpack.inference.telemetry.TraceContext; +import org.elasticsearch.xpack.inference.telemetry.TraceContextHandler; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Objects; + +public class ElasticInferenceServiceAuthorizationRequest implements ElasticInferenceServiceRequest { + + private final URI uri; + private final TraceContextHandler traceContextHandler; + + public ElasticInferenceServiceAuthorizationRequest(String url, TraceContext traceContext) { + this.uri = createUri(Objects.requireNonNull(url)); + this.traceContextHandler = new TraceContextHandler(traceContext); + } + + private URI createUri(String url) throws ElasticsearchStatusException { + try { + // TODO, consider transforming the base URL into a URI for better error handling. + return new URI(url + "/api/v1/authorizations"); + } catch (URISyntaxException e) { + throw new ElasticsearchStatusException( + "Failed to create URI for service [" + ElasticInferenceService.NAME + "]: " + e.getMessage(), + RestStatus.BAD_REQUEST, + e + ); + } + } + + @Override + public HttpRequest createHttpRequest() { + var httpGet = new HttpGet(uri); + traceContextHandler.propagateTraceContext(httpGet); + + return new HttpRequest(httpGet, getInferenceEntityId()); + } + + public TraceContext getTraceContext() { + return traceContextHandler.traceContext(); + } + + @Override + public String getInferenceEntityId() { + // TODO look into refactoring so we don't even need to return this, look at the RetryingHttpSender to fix this + return ""; + } + + @Override + public URI getURI() { + return this.uri; + } + + @Override + public Request truncate() { + return this; + } + + @Override + public boolean[] getTruncationInfo() { + return null; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceAuthorizationResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceAuthorizationResponseEntity.java new file mode 100644 index 0000000000000..5912deb006440 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceAuthorizationResponseEntity.java @@ -0,0 +1,179 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.elastic; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.util.EnumSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class ElasticInferenceServiceAuthorizationResponseEntity implements InferenceServiceResults { + + public static final String NAME = "elastic_inference_service_auth_results"; + private static final Map ELASTIC_INFERENCE_SERVICE_TASK_TYPE_MAPPING = Map.of( + "embed/text/sparse", + TaskType.SPARSE_EMBEDDING, + "chat", + TaskType.CHAT_COMPLETION + ); + + @SuppressWarnings("unchecked") + public static ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + ElasticInferenceServiceAuthorizationResponseEntity.class.getSimpleName(), + args -> new ElasticInferenceServiceAuthorizationResponseEntity((List) args[0]) + ); + + static { + PARSER.declareObjectArray(constructorArg(), AuthorizedModel.AUTHORIZED_MODEL_PARSER::apply, new ParseField("models")); + } + + public record AuthorizedModel(String modelName, EnumSet taskTypes) implements Writeable, ToXContentObject { + + @SuppressWarnings("unchecked") + public static ConstructingObjectParser AUTHORIZED_MODEL_PARSER = new ConstructingObjectParser<>( + AuthorizedModel.class.getSimpleName(), + args -> new AuthorizedModel((String) args[0], toTaskTypes((List) args[1])) + ); + + static { + AUTHORIZED_MODEL_PARSER.declareString(constructorArg(), new ParseField("model_name")); + AUTHORIZED_MODEL_PARSER.declareStringArray(constructorArg(), new ParseField("task_types")); + } + + private static EnumSet toTaskTypes(List stringTaskTypes) { + var taskTypes = EnumSet.noneOf(TaskType.class); + for (String taskType : stringTaskTypes) { + var mappedTaskType = ELASTIC_INFERENCE_SERVICE_TASK_TYPE_MAPPING.get(taskType); + if (mappedTaskType != null) { + taskTypes.add(mappedTaskType); + } + } + + return taskTypes; + } + + public AuthorizedModel(StreamInput in) throws IOException { + this(in.readString(), in.readEnumSet(TaskType.class)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(modelName); + out.writeEnumSet(taskTypes); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field("model_name", modelName); + builder.field("task_types", taskTypes.stream().map(TaskType::toString).collect(Collectors.toList())); + + builder.endObject(); + + return builder; + } + } + + private final List authorizedModels; + + public ElasticInferenceServiceAuthorizationResponseEntity(List authorizedModels) { + this.authorizedModels = Objects.requireNonNull(authorizedModels); + } + + /** + * Create an empty response + */ + public ElasticInferenceServiceAuthorizationResponseEntity() { + this(List.of()); + } + + public ElasticInferenceServiceAuthorizationResponseEntity(StreamInput in) throws IOException { + this(in.readCollectionAsList(AuthorizedModel::new)); + } + + public static ElasticInferenceServiceAuthorizationResponseEntity fromResponse(Request request, HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + return PARSER.apply(jsonParser, null); + } + } + + public List getAuthorizedModels() { + return authorizedModels; + } + + @Override + public Iterator toXContentChunked(ToXContent.Params params) { + throw new UnsupportedOperationException(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(authorizedModels); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public List transformToCoordinationFormat() { + throw new UnsupportedOperationException("Not implemented"); + } + + @Override + public List transformToLegacyFormat() { + throw new UnsupportedOperationException("not implemented"); + } + + @Override + public Map asMap() { + throw new UnsupportedOperationException("Not implemented"); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ElasticInferenceServiceAuthorizationResponseEntity that = (ElasticInferenceServiceAuthorizationResponseEntity) o; + return Objects.equals(authorizedModels, that.authorizedModels); + } + + @Override + public int hashCode() { + return Objects.hash(authorizedModels); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index a8a0053796e8c..29f1e7cf70e77 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -39,10 +39,13 @@ import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.UnifiedChatInput; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorization; +import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorizationHandler; import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionModel; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.elasticsearch.xpack.inference.telemetry.TraceContext; @@ -52,7 +55,11 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.xpack.core.inference.results.ResultUtils.createInvalidChunkedResultException; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; @@ -70,31 +77,95 @@ public class ElasticInferenceService extends SenderService { public static final String NAME = "elastic"; public static final String ELASTIC_INFERENCE_SERVICE_IDENTIFIER = "Elastic Inference Service"; - private final ElasticInferenceServiceComponents elasticInferenceServiceComponents; - - // The task types exposed via the _inference/_services API - private static final EnumSet SUPPORTED_TASK_TYPES_FOR_SERVICES_API = EnumSet.of( - TaskType.SPARSE_EMBEDDING, - TaskType.CHAT_COMPLETION - ); + private static final EnumSet IMPLEMENTED_TASK_TYPES = EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION); private static final String SERVICE_NAME = "Elastic"; + /** * The task types that the {@link InferenceAction.Request} can accept. */ private static final EnumSet SUPPORTED_INFERENCE_ACTION_TASK_TYPES = EnumSet.of(TaskType.SPARSE_EMBEDDING); + private final ElasticInferenceServiceComponents elasticInferenceServiceComponents; + private Configuration configuration; + private final AtomicReference> enabledTaskTypesRef = new AtomicReference<>(EnumSet.noneOf(TaskType.class)); + private final ModelRegistry modelRegistry; + private final ElasticInferenceServiceAuthorizationHandler authorizationHandler; + private final CountDownLatch authorizationCompletedLatch = new CountDownLatch(1); + public ElasticInferenceService( HttpRequestSender.Factory factory, ServiceComponents serviceComponents, - ElasticInferenceServiceComponents elasticInferenceServiceComponents + ElasticInferenceServiceComponents elasticInferenceServiceComponents, + ModelRegistry modelRegistry, + ElasticInferenceServiceAuthorizationHandler authorizationHandler ) { super(factory, serviceComponents); - this.elasticInferenceServiceComponents = elasticInferenceServiceComponents; + this.elasticInferenceServiceComponents = Objects.requireNonNull(elasticInferenceServiceComponents); + this.modelRegistry = Objects.requireNonNull(modelRegistry); + this.authorizationHandler = Objects.requireNonNull(authorizationHandler); + + configuration = new Configuration(enabledTaskTypesRef.get()); + + getAuthorization(); + } + + private void getAuthorization() { + try { + ActionListener listener = ActionListener.wrap(result -> { + setEnabledTaskTypes(result); + authorizationCompletedLatch.countDown(); + }, e -> { + // we don't need to do anything if there was a failure, everything is disabled by default + authorizationCompletedLatch.countDown(); + }); + + authorizationHandler.getAuthorization(listener, getSender()); + } catch (Exception e) { + // we don't need to do anything if there was a failure, everything is disabled by default + authorizationCompletedLatch.countDown(); + } + } + + private synchronized void setEnabledTaskTypes(ElasticInferenceServiceAuthorization auth) { + enabledTaskTypesRef.set(filterTaskTypesByAuthorization(auth)); + configuration = new Configuration(enabledTaskTypesRef.get()); + + defaultConfigIds().forEach(modelRegistry::addDefaultIds); + } + + private static EnumSet filterTaskTypesByAuthorization(ElasticInferenceServiceAuthorization auth) { + var implementedTaskTypes = EnumSet.copyOf(IMPLEMENTED_TASK_TYPES); + implementedTaskTypes.retainAll(auth.enabledTaskTypes()); + return implementedTaskTypes; + } + + // Default for testing + void waitForAuthorizationToComplete(TimeValue waitTime) { + try { + if (authorizationCompletedLatch.await(waitTime.getSeconds(), TimeUnit.SECONDS) == false) { + throw new IllegalStateException("The wait time has expired for authorization to complete."); + } + } catch (InterruptedException e) { + throw new IllegalStateException("Waiting for authorization to complete was interrupted"); + } + } + + @Override + public synchronized Set supportedStreamingTasks() { + var enabledStreamingTaskTypes = EnumSet.of(TaskType.CHAT_COMPLETION); + enabledStreamingTaskTypes.retainAll(enabledTaskTypesRef.get()); + + if (enabledStreamingTaskTypes.isEmpty() == false) { + enabledStreamingTaskTypes.add(TaskType.ANY); + } + + return enabledStreamingTaskTypes; } @Override - public Set supportedStreamingTasks() { - return EnumSet.of(TaskType.CHAT_COMPLETION, TaskType.ANY); + public synchronized List defaultConfigIds() { + // TODO once we have the enabledTaskTypes figure out which default endpoints we should expose + return List.of(); } @Override @@ -221,13 +292,18 @@ public void parseRequestConfig( } @Override - public InferenceServiceConfiguration getConfiguration() { - return Configuration.get(); + public synchronized InferenceServiceConfiguration getConfiguration() { + return configuration.get(); + } + + @Override + public synchronized EnumSet supportedTaskTypes() { + return enabledTaskTypesRef.get(); } @Override - public EnumSet supportedTaskTypes() { - return SUPPORTED_TASK_TYPES_FOR_SERVICES_API; + public synchronized boolean hideFromConfigurationApi() { + return enabledTaskTypesRef.get().isEmpty(); } private static ElasticInferenceServiceModel createModel( @@ -369,17 +445,22 @@ private TraceContext getCurrentTraceInfo() { } public static class Configuration { - public static InferenceServiceConfiguration get() { - return configuration.getOrCompute(); + + private final EnumSet enabledTaskTypes; + private final LazyInitializable configuration; + + public Configuration(EnumSet enabledTaskTypes) { + this.enabledTaskTypes = enabledTaskTypes; + configuration = initConfiguration(); } - private static final LazyInitializable configuration = new LazyInitializable<>( - () -> { + private LazyInitializable initConfiguration() { + return new LazyInitializable<>(() -> { var configurationMap = new HashMap(); configurationMap.put( MODEL_ID, - new SettingsConfiguration.Builder(SUPPORTED_TASK_TYPES_FOR_SERVICES_API).setDescription( + new SettingsConfiguration.Builder(EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION)).setDescription( "The name of the model to use for the inference task." ) .setLabel("Model ID") @@ -403,14 +484,20 @@ public static InferenceServiceConfiguration get() { .build() ); - configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(SUPPORTED_TASK_TYPES_FOR_SERVICES_API)); + configurationMap.putAll( + RateLimitSettings.toSettingsConfiguration(EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION)) + ); return new InferenceServiceConfiguration.Builder().setService(NAME) .setName(SERVICE_NAME) - .setTaskTypes(SUPPORTED_TASK_TYPES_FOR_SERVICES_API) + .setTaskTypes(enabledTaskTypes) .setConfigurations(configurationMap) .build(); - } - ); + }); + } + + public InferenceServiceConfiguration get() { + return configuration.getOrCompute(); + } } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java index c5b2cb693df13..837581667882d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java @@ -7,4 +7,6 @@ package org.elasticsearch.xpack.inference.services.elastic; -public record ElasticInferenceServiceComponents(String elasticInferenceServiceUrl) {} +import org.elasticsearch.core.Nullable; + +public record ElasticInferenceServiceComponents(@Nullable String elasticInferenceServiceUrl) {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java index 530efee4a3d45..623c25222446c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java @@ -15,6 +15,7 @@ */ public class ElasticInferenceServiceFeature { + // TODO when we remove this also look in InferenceGetServicesIT and remove references to the deprecated URL setting @Deprecated public static final FeatureFlag DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("eis"); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java index 0c1a032dc8926..3b6b1088cc9cf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java @@ -20,6 +20,7 @@ */ public class ElasticInferenceServiceSettings { + // TODO when we remove this look at InferenceGetServicesIT and remove the setting there as well @Deprecated static final Setting EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorization.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorization.java new file mode 100644 index 0000000000000..eac64021ac85a --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorization.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elastic.authorization; + +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.response.elastic.ElasticInferenceServiceAuthorizationResponseEntity; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Provides a structure for governing which models (if any) a cluster has access to according to the upstream Elastic Inference Service. + * @param enabledModels a mapping of model ids to a set of {@link TaskType} to indicate which models are available and for which task types + */ +public record ElasticInferenceServiceAuthorization(Map> enabledModels) { + + /** + * Converts an authorization response from Elastic Inference Service into the {@link ElasticInferenceServiceAuthorization} format. + * + * @param responseEntity the {@link ElasticInferenceServiceAuthorizationResponseEntity} response from the upstream gateway. + * @return a new {@link ElasticInferenceServiceAuthorization} + */ + public static ElasticInferenceServiceAuthorization of(ElasticInferenceServiceAuthorizationResponseEntity responseEntity) { + var enabledModels = new HashMap>(); + + for (var model : responseEntity.getAuthorizedModels()) { + // if there are no task types we'll ignore the model because it's likely we didn't understand + // the task type and don't support it anyway + if (model.taskTypes().isEmpty() == false) { + enabledModels.put(model.modelName(), model.taskTypes()); + } + } + + return new ElasticInferenceServiceAuthorization(enabledModels); + } + + /** + * Returns an object indicating that the cluster has no access to Elastic Inference Service. + */ + public static ElasticInferenceServiceAuthorization newDisabledService() { + return new ElasticInferenceServiceAuthorization(); + } + + public ElasticInferenceServiceAuthorization { + Objects.requireNonNull(enabledModels); + + for (var taskTypes : enabledModels.values()) { + if (taskTypes.isEmpty()) { + throw new IllegalArgumentException("Authorization task types must not be empty"); + } + } + } + + private ElasticInferenceServiceAuthorization() { + this(Map.of()); + } + + public boolean isEnabled() { + return enabledModels.isEmpty() == false; + } + + public EnumSet enabledTaskTypes() { + return enabledModels.values().stream().flatMap(Set::stream).collect(Collectors.toCollection(() -> EnumSet.noneOf(TaskType.class))); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandler.java new file mode 100644 index 0000000000000..f78b5357caeb3 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandler.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elastic.authorization; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchWrapperException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.elastic.ElasticInferenceServiceResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.elastic.ElasticInferenceServiceAuthorizationRequest; +import org.elasticsearch.xpack.inference.external.response.elastic.ElasticInferenceServiceAuthorizationResponseEntity; +import org.elasticsearch.xpack.inference.telemetry.TraceContext; + +import java.util.Locale; +import java.util.Objects; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.core.inference.action.InferenceAction.Request.DEFAULT_TIMEOUT; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; + +/** + * Handles retrieving the authorization information from Elastic Inference Service. + */ +public class ElasticInferenceServiceAuthorizationHandler { + + private static final String FAILED_TO_RETRIEVE_MESSAGE = + "Failed to retrieve the authorization information from the Elastic Inference Service."; + private static final ResponseHandler AUTH_RESPONSE_HANDLER = createAuthResponseHandler(); + + private static ResponseHandler createAuthResponseHandler() { + return new ElasticInferenceServiceResponseHandler( + String.format(Locale.ROOT, "%s sparse embeddings", ELASTIC_INFERENCE_SERVICE_IDENTIFIER), + ElasticInferenceServiceAuthorizationResponseEntity::fromResponse + ); + } + + private final String baseUrl; + private final ThreadPool threadPool; + private final Logger logger; + private final CountDownLatch requestCompleteLatch = new CountDownLatch(1); + + public ElasticInferenceServiceAuthorizationHandler(@Nullable String baseUrl, ThreadPool threadPool) { + this.baseUrl = baseUrl; + this.threadPool = Objects.requireNonNull(threadPool); + logger = LogManager.getLogger(ElasticInferenceServiceAuthorizationHandler.class); + } + + // only use for testing + ElasticInferenceServiceAuthorizationHandler(@Nullable String baseUrl, ThreadPool threadPool, Logger logger) { + this.baseUrl = baseUrl; + this.threadPool = Objects.requireNonNull(threadPool); + this.logger = Objects.requireNonNull(logger); + } + + /** + * Retrieve the authorization information from Elastic Inference Service + * @param listener a listener to receive the response + * @param sender a {@link Sender} for making the request to the Elastic Inference Service + */ + public void getAuthorization(ActionListener listener, Sender sender) { + try { + logger.debug("Retrieving authorization information from the Elastic Inference Service."); + + if (Strings.isNullOrEmpty(baseUrl)) { + logger.warn("The base URL for the authorization service is not valid, rejecting authorization."); + listener.onResponse(ElasticInferenceServiceAuthorization.newDisabledService()); + return; + } + + // ensure that the sender is initialized + sender.start(); + + ActionListener newListener = ActionListener.wrap(results -> { + if (results instanceof ElasticInferenceServiceAuthorizationResponseEntity authResponseEntity) { + listener.onResponse(ElasticInferenceServiceAuthorization.of(authResponseEntity)); + } else { + logger.warn( + Strings.format( + FAILED_TO_RETRIEVE_MESSAGE + " Received an invalid response type: %s", + results.getClass().getSimpleName() + ) + ); + listener.onResponse(ElasticInferenceServiceAuthorization.newDisabledService()); + } + requestCompleteLatch.countDown(); + }, e -> { + Throwable exception = e; + if (e instanceof ElasticsearchWrapperException wrapperException) { + exception = wrapperException.getCause(); + } + + logger.warn(Strings.format(FAILED_TO_RETRIEVE_MESSAGE + " Encountered an exception: %s", exception)); + listener.onResponse(ElasticInferenceServiceAuthorization.newDisabledService()); + requestCompleteLatch.countDown(); + }); + + var request = new ElasticInferenceServiceAuthorizationRequest(baseUrl, getCurrentTraceInfo()); + + sender.sendWithoutQueuing(logger, request, AUTH_RESPONSE_HANDLER, DEFAULT_TIMEOUT, newListener); + } catch (Exception e) { + logger.warn(Strings.format("Retrieving the authorization information encountered an exception: %s", e)); + requestCompleteLatch.countDown(); + } + } + + private TraceContext getCurrentTraceInfo() { + var traceParent = threadPool.getThreadContext().getHeader(Task.TRACE_PARENT); + var traceState = threadPool.getThreadContext().getHeader(Task.TRACE_STATE); + + return new TraceContext(traceParent, traceState); + } + + // Default because should only be used for testing + void waitForAuthRequestCompletion(TimeValue timeValue) throws IllegalStateException { + try { + if (requestCompleteLatch.await(timeValue.getMillis(), TimeUnit.MILLISECONDS) == false) { + throw new IllegalStateException("The wait time has expired for authorization to complete."); + } + } catch (InterruptedException e) { + throw new IllegalStateException("Waiting for authorization to complete was interrupted"); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java index 9c76cc5c41fb1..79001f17a4e96 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java @@ -151,8 +151,8 @@ public InferenceServiceConfiguration getConfiguration() { } @Override - public Boolean hideFromConfigurationApi() { - return Boolean.TRUE; + public boolean hideFromConfigurationApi() { + return true; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/TraceContextHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/TraceContextHandler.java index 92fe214d821db..7452317189208 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/TraceContextHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/TraceContextHandler.java @@ -7,12 +7,12 @@ package org.elasticsearch.xpack.inference.telemetry; -import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpRequestBase; import org.elasticsearch.tasks.Task; public record TraceContextHandler(TraceContext traceContext) { - public void propagateTraceContext(HttpPost httpPost) { + public void propagateTraceContext(HttpRequestBase httpRequest) { if (traceContext == null) { return; } @@ -21,11 +21,11 @@ public void propagateTraceContext(HttpPost httpPost) { var traceState = traceContext.traceState(); if (traceParent != null) { - httpPost.setHeader(Task.TRACE_PARENT_HTTP_HEADER, traceParent); + httpRequest.setHeader(Task.TRACE_PARENT_HTTP_HEADER, traceParent); } if (traceState != null) { - httpPost.setHeader(Task.TRACE_STATE, traceState); + httpRequest.setHeader(Task.TRACE_STATE, traceState); } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java index 929aefeeef6b9..ed5aa5ba7bea9 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java @@ -7,16 +7,19 @@ package org.elasticsearch.xpack.inference.external.amazonbedrock; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; import org.elasticsearch.xpack.inference.external.http.sender.ChatCompletionInput; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.RequestManager; import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.services.ServiceComponents; import java.io.IOException; @@ -92,6 +95,17 @@ public void send( } } + @Override + public void sendWithoutQueuing( + Logger logger, + Request request, + ResponseHandler responseHandler, + TimeValue timeout, + ActionListener listener + ) { + throw new UnsupportedOperationException("not supported"); + } + @Override public void close() throws IOException { // do nothing diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java index b3e7db6009204..48df0d8ee2c4f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java @@ -8,27 +8,37 @@ package org.elasticsearch.xpack.inference.external.http.sender; import org.apache.http.HttpHeaders; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.elastic.ElasticInferenceServiceResponseHandler; import org.elasticsearch.xpack.inference.external.http.HttpClient; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.external.request.elastic.ElasticInferenceServiceAuthorizationRequest; +import org.elasticsearch.xpack.inference.external.response.elastic.ElasticInferenceServiceAuthorizationResponseEntity; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.services.ServiceComponentsTests; +import org.elasticsearch.xpack.inference.telemetry.TraceContext; import org.junit.After; import org.junit.Before; import java.io.IOException; +import java.util.EnumSet; import java.util.List; +import java.util.Locale; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; @@ -40,6 +50,7 @@ import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; @@ -128,6 +139,50 @@ public void testCreateSender_SendsRequestAndReceivesResponse() throws Exception } } + public void testSendWithoutQueuing_SendsRequestAndReceivesResponse() throws Exception { + var senderFactory = createSenderFactory(clientManager, threadRef); + + try (var sender = createSender(senderFactory)) { + sender.start(); + + String responseJson = """ + { + "models": [ + { + "model_name": "model-a", + "task_types": ["embed/text/sparse", "chat"] + } + ] + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + PlainActionFuture listener = new PlainActionFuture<>(); + var request = new ElasticInferenceServiceAuthorizationRequest(getUrl(webServer), new TraceContext("", "")); + var responseHandler = new ElasticInferenceServiceResponseHandler( + String.format(Locale.ROOT, "%s sparse embeddings", ELASTIC_INFERENCE_SERVICE_IDENTIFIER), + ElasticInferenceServiceAuthorizationResponseEntity::fromResponse + ); + + sender.sendWithoutQueuing(mock(Logger.class), request, responseHandler, null, listener); + + var result = listener.actionGet(TIMEOUT); + assertThat(result, instanceOf(ElasticInferenceServiceAuthorizationResponseEntity.class)); + var authResponse = (ElasticInferenceServiceAuthorizationResponseEntity) result; + assertThat( + authResponse.getAuthorizedModels(), + is( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-a", + EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION) + ) + ) + ) + ); + } + } + public void testHttpRequestSender_Throws_WhenCallingSendBeforeStart() throws Exception { var senderFactory = new HttpRequestSender.Factory( ServiceComponentsTests.createWithEmptySettings(threadPool), @@ -164,10 +219,7 @@ public void testHttpRequestSender_Throws_WhenATimeoutOccurs() throws Exception { var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); - assertThat( - thrownException.getMessage(), - is(format("Request timed out waiting to be sent after [%s]", TimeValue.timeValueNanos(1))) - ); + assertThat(thrownException.getMessage(), is(format("Request timed out after [%s]", TimeValue.timeValueNanos(1)))); assertThat(thrownException.status().getStatus(), is(408)); } } @@ -190,10 +242,36 @@ public void testHttpRequestSenderWithTimeout_Throws_WhenATimeoutOccurs() throws var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); - assertThat( - thrownException.getMessage(), - is(format("Request timed out waiting to be sent after [%s]", TimeValue.timeValueNanos(1))) + assertThat(thrownException.getMessage(), is(format("Request timed out after [%s]", TimeValue.timeValueNanos(1)))); + assertThat(thrownException.status().getStatus(), is(408)); + } + } + + public void testSendWithoutQueuingWithTimeout_Throws_WhenATimeoutOccurs() throws Exception { + var mockManager = mock(HttpClientManager.class); + when(mockManager.getHttpClient()).thenReturn(mock(HttpClient.class)); + + var senderFactory = new HttpRequestSender.Factory( + ServiceComponentsTests.createWithEmptySettings(threadPool), + mockManager, + mockClusterServiceEmpty() + ); + + try (var sender = senderFactory.createSender()) { + sender.start(); + + PlainActionFuture listener = new PlainActionFuture<>(); + sender.sendWithoutQueuing( + mock(Logger.class), + mock(Request.class), + mock(ResponseHandler.class), + TimeValue.timeValueNanos(1), + listener ); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Request timed out after [%s]", TimeValue.timeValueNanos(1)))); assertThat(thrownException.status().getStatus(), is(408)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java index 7e29fad56812d..85bef9c1b39a4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java @@ -240,10 +240,7 @@ public void testExecute_CallsOnFailure_WhenRequestTimesOut() { var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); - assertThat( - thrownException.getMessage(), - is(format("Request timed out waiting to be sent after [%s]", TimeValue.timeValueNanos(1))) - ); + assertThat(thrownException.getMessage(), is(format("Request timed out after [%s]", TimeValue.timeValueNanos(1)))); assertThat(thrownException.status().getStatus(), is(408)); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java index e37a1a213569e..e9508b051241d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java @@ -87,10 +87,7 @@ public void testRequest_ReturnsTimeoutException() { ); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); - assertThat( - thrownException.getMessage(), - is(format("Request timed out waiting to be sent after [%s]", TimeValue.timeValueMillis(1))) - ); + assertThat(thrownException.getMessage(), is(format("Request timed out after [%s]", TimeValue.timeValueMillis(1)))); assertTrue(requestTask.hasCompleted()); assertTrue(requestTask.getRequestCompletedFunction().get()); assertThat(thrownException.status().getStatus(), is(408)); @@ -117,10 +114,7 @@ public void testRequest_DoesNotCallOnFailureTwiceWhenTimingOut() throws Exceptio ArgumentCaptor argument = ArgumentCaptor.forClass(Exception.class); verify(listener, times(1)).onFailure(argument.capture()); - assertThat( - argument.getValue().getMessage(), - is(format("Request timed out waiting to be sent after [%s]", TimeValue.timeValueMillis(1))) - ); + assertThat(argument.getValue().getMessage(), is(format("Request timed out after [%s]", TimeValue.timeValueMillis(1)))); assertTrue(requestTask.hasCompleted()); assertTrue(requestTask.getRequestCompletedFunction().get()); @@ -149,10 +143,7 @@ public void testRequest_DoesNotCallOnResponseAfterTimingOut() throws Exception { ArgumentCaptor argument = ArgumentCaptor.forClass(Exception.class); verify(listener, times(1)).onFailure(argument.capture()); - assertThat( - argument.getValue().getMessage(), - is(format("Request timed out waiting to be sent after [%s]", TimeValue.timeValueMillis(1))) - ); + assertThat(argument.getValue().getMessage(), is(format("Request timed out after [%s]", TimeValue.timeValueMillis(1)))); assertTrue(requestTask.hasCompleted()); assertTrue(requestTask.getRequestCompletedFunction().get()); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/TimedListenerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/TimedListenerTests.java new file mode 100644 index 0000000000000..30ae7a7f65313 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/TimedListenerTests.java @@ -0,0 +1,152 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; +import org.mockito.ArgumentCaptor; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class TimedListenerTests extends ESTestCase { + + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private ThreadPool threadPool; + + @Before + public void init() throws Exception { + threadPool = createThreadPool(inferenceUtilityPool()); + } + + @After + public void shutdown() { + terminate(threadPool); + } + + public void testExecuting_DoesNotCallOnFailureForTimeout_AfterIllegalArgumentException() { + AtomicReference onTimeout = new AtomicReference<>(); + var mockThreadPool = mockThreadPoolForTimeout(onTimeout, threadPool); + + @SuppressWarnings("unchecked") + ActionListener listener = mock(ActionListener.class); + var timedListener = new TimedListener<>(TimeValue.timeValueMillis(1), listener, mockThreadPool); + + timedListener.getListener().onFailure(new IllegalArgumentException("failed")); + verify(listener, times(1)).onFailure(any()); + assertTrue(timedListener.hasCompleted()); + + onTimeout.get().run(); + verifyNoMoreInteractions(listener); + } + + public void testRequest_ReturnsTimeoutException() { + PlainActionFuture listener = new PlainActionFuture<>(); + var timedListener = new TimedListener<>(TimeValue.timeValueMillis(1), listener, threadPool); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat(thrownException.getMessage(), is(format("Request timed out after [%s]", TimeValue.timeValueMillis(1)))); + assertTrue(timedListener.hasCompleted()); + assertThat(thrownException.status().getStatus(), is(408)); + } + + public void testRequest_DoesNotCallOnFailureTwiceWhenTimingOut() throws Exception { + @SuppressWarnings("unchecked") + ActionListener listener = mock(ActionListener.class); + var calledOnFailureLatch = new CountDownLatch(1); + doAnswer(invocation -> { + calledOnFailureLatch.countDown(); + return Void.TYPE; + }).when(listener).onFailure(any()); + + var timedListener = new TimedListener<>(TimeValue.timeValueMillis(1), listener, threadPool); + + calledOnFailureLatch.await(TIMEOUT.millis(), TimeUnit.MILLISECONDS); + + ArgumentCaptor argument = ArgumentCaptor.forClass(Exception.class); + verify(listener, times(1)).onFailure(argument.capture()); + assertThat(argument.getValue().getMessage(), is(format("Request timed out after [%s]", TimeValue.timeValueMillis(1)))); + assertTrue(timedListener.hasCompleted()); + + timedListener.getListener().onFailure(new IllegalArgumentException("failed")); + verifyNoMoreInteractions(listener); + } + + public void testRequest_DoesNotCallOnResponseAfterTimingOut() throws Exception { + @SuppressWarnings("unchecked") + ActionListener listener = mock(ActionListener.class); + var calledOnFailureLatch = new CountDownLatch(1); + doAnswer(invocation -> { + calledOnFailureLatch.countDown(); + return Void.TYPE; + }).when(listener).onFailure(any()); + + var timedListener = new TimedListener<>(TimeValue.timeValueMillis(1), listener, threadPool); + + calledOnFailureLatch.await(TIMEOUT.millis(), TimeUnit.MILLISECONDS); + + ArgumentCaptor argument = ArgumentCaptor.forClass(Exception.class); + verify(listener, times(1)).onFailure(argument.capture()); + assertThat(argument.getValue().getMessage(), is(format("Request timed out after [%s]", TimeValue.timeValueMillis(1)))); + assertTrue(timedListener.hasCompleted()); + + timedListener.getListener().onResponse(mock(InferenceServiceResults.class)); + verifyNoMoreInteractions(listener); + } + + public void testRequest_DoesNotCallOnFailureForTimeout_AfterAlreadyCallingOnResponse() throws Exception { + AtomicReference onTimeout = new AtomicReference<>(); + var mockThreadPool = mockThreadPoolForTimeout(onTimeout, threadPool); + + @SuppressWarnings("unchecked") + ActionListener listener = mock(ActionListener.class); + var timedListener = new TimedListener<>(TimeValue.timeValueMillis(1), listener, mockThreadPool); + + timedListener.getListener().onResponse(mock(InferenceServiceResults.class)); + verify(listener, times(1)).onResponse(any()); + assertTrue(timedListener.hasCompleted()); + + onTimeout.get().run(); + verifyNoMoreInteractions(listener); + } + + public static ThreadPool mockThreadPoolForTimeout(AtomicReference onTimeoutRunnable, ThreadPool threadPool) { + var mockThreadPool = mock(ThreadPool.class); + when(mockThreadPool.executor(any())).thenReturn(mock(ExecutorService.class)); + when(mockThreadPool.getThreadContext()).thenReturn(threadPool.getThreadContext()); + + doAnswer(invocation -> { + Runnable runnable = (Runnable) invocation.getArguments()[0]; + onTimeoutRunnable.set(runnable); + return mock(Scheduler.ScheduledCancellable.class); + }).when(mockThreadPool).schedule(any(Runnable.class), any(), any()); + + return mockThreadPool; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceAuthorizationRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceAuthorizationRequestTests.java new file mode 100644 index 0000000000000..66819e10c55ba --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceAuthorizationRequestTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.elastic; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.telemetry.TraceContext; +import org.junit.Before; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class ElasticInferenceServiceAuthorizationRequestTests extends ESTestCase { + + private TraceContext traceContext; + + @Before + public void init() { + traceContext = new TraceContext("dummyTraceParent", "dummyTraceState"); + } + + public void testCreateUriThrowsForInvalidBaseUrl() { + String invalidUrl = "http://invalid-url^"; + + ElasticsearchStatusException exception = assertThrows( + ElasticsearchStatusException.class, + () -> new ElasticInferenceServiceAuthorizationRequest(invalidUrl, traceContext) + ); + + assertThat(exception.status(), is(RestStatus.BAD_REQUEST)); + assertThat(exception.getMessage(), containsString("Failed to create URI for service")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceAuthorizationResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceAuthorizationResponseEntityTests.java new file mode 100644 index 0000000000000..6948a1da47c4c --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceAuthorizationResponseEntityTests.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.elastic; + +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.EnumSet; +import java.util.List; + +import static org.hamcrest.Matchers.is; + +public class ElasticInferenceServiceAuthorizationResponseEntityTests extends ESTestCase { + + public void testParseAllFields() throws IOException { + String json = """ + { + "models": [ + { + "model_name": "test_model", + "task_types": ["embed/text/sparse", "chat"] + } + ] + } + """; + + try (var parser = createParser(JsonXContent.jsonXContent, json)) { + var entity = ElasticInferenceServiceAuthorizationResponseEntity.PARSER.apply(parser, null); + var expected = new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "test_model", + EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION) + ) + ) + ); + + assertThat(entity, is(expected)); + } + } + + public void testParsing_EmptyModels() throws IOException { + String json = """ + { + "models": [] + } + """; + + try (var parser = createParser(JsonXContent.jsonXContent, json)) { + var entity = ElasticInferenceServiceAuthorizationResponseEntity.PARSER.apply(parser, null); + var expected = new ElasticInferenceServiceAuthorizationResponseEntity(List.of()); + + assertThat(entity, is(expected)); + } + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index 5e7e93b1f5a75..098f69f80a8a2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -38,8 +39,12 @@ import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorization; +import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorizationHandler; +import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorizationTests; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; @@ -47,6 +52,7 @@ import org.junit.Before; import java.io.IOException; +import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -67,6 +73,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -296,14 +304,7 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists public void testCheckModelConfig_ReturnsNewModelReference() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - - try ( - var service = new ElasticInferenceService( - senderFactory, - createWithEmptySettings(threadPool), - new ElasticInferenceServiceComponents(getUrl(webServer)) - ) - ) { + try (var service = createService(senderFactory, getUrl(webServer))) { var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer)); PlainActionFuture listener = new PlainActionFuture<>(); service.checkModelConfig(model, listener); @@ -321,13 +322,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotAValidModel() throws IOException var mockModel = getInvalidModel("model_id", "service_name", TaskType.SPARSE_EMBEDDING); - try ( - var service = new ElasticInferenceService( - factory, - createWithEmptySettings(threadPool), - new ElasticInferenceServiceComponents(null) - ) - ) { + try (var service = createService(factory)) { PlainActionFuture listener = new PlainActionFuture<>(); service.infer( mockModel, @@ -355,6 +350,12 @@ public void testInfer_ThrowsErrorWhenModelIsNotAValidModel() throws IOException verifyNoMoreInteractions(sender); } + private ModelRegistry mockModelRegistry() { + var client = mock(Client.class); + when(client.threadPool()).thenReturn(threadPool); + return new ModelRegistry(client); + } + public void testInfer_ThrowsErrorWhenTaskTypeIsNotValid() throws IOException { var sender = mock(Sender.class); @@ -363,13 +364,7 @@ public void testInfer_ThrowsErrorWhenTaskTypeIsNotValid() throws IOException { var mockModel = getInvalidModel("model_id", "service_name", TaskType.TEXT_EMBEDDING); - try ( - var service = new ElasticInferenceService( - factory, - createWithEmptySettings(threadPool), - new ElasticInferenceServiceComponents(null) - ) - ) { + try (var service = createService(factory)) { PlainActionFuture listener = new PlainActionFuture<>(); service.infer( mockModel, @@ -408,13 +403,7 @@ public void testInfer_ThrowsErrorWhenTaskTypeIsNotValid_ChatCompletion() throws var mockModel = getInvalidModel("model_id", "service_name", TaskType.CHAT_COMPLETION); - try ( - var service = new ElasticInferenceService( - factory, - createWithEmptySettings(threadPool), - new ElasticInferenceServiceComponents(null) - ) - ) { + try (var service = createService(factory)) { PlainActionFuture listener = new PlainActionFuture<>(); service.infer( mockModel, @@ -451,13 +440,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); var eisGatewayUrl = getUrl(webServer); - try ( - var service = new ElasticInferenceService( - senderFactory, - createWithEmptySettings(threadPool), - new ElasticInferenceServiceComponents(eisGatewayUrl) - ) - ) { + try (var service = createService(senderFactory, eisGatewayUrl)) { String responseJson = """ { "data": [ @@ -508,13 +491,7 @@ public void testChunkedInfer_PassesThrough() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); var eisGatewayUrl = getUrl(webServer); - try ( - var service = new ElasticInferenceService( - senderFactory, - createWithEmptySettings(threadPool), - new ElasticInferenceServiceComponents(eisGatewayUrl) - ) - ) { + try (var service = createService(senderFactory, eisGatewayUrl)) { String responseJson = """ { "data": [ @@ -568,8 +545,38 @@ public void testChunkedInfer_PassesThrough() throws IOException { } } + public void testHideFromConfigurationApi_ReturnsTrue_WithNoAvailableModels() throws Exception { + try (var service = createServiceWithMockSender(ElasticInferenceServiceAuthorization.newDisabledService())) { + assertTrue(service.hideFromConfigurationApi()); + } + } + + public void testHideFromConfigurationApi_ReturnsTrue_WithModelTaskTypesThatAreNotImplemented() throws Exception { + try ( + var service = createServiceWithMockSender( + new ElasticInferenceServiceAuthorization(Map.of("model-1", EnumSet.of(TaskType.TEXT_EMBEDDING))) + ) + ) { + assertTrue(service.hideFromConfigurationApi()); + } + } + + public void testHideFromConfigurationApi_ReturnsFalse_WithAvailableModels() throws Exception { + try ( + var service = createServiceWithMockSender( + new ElasticInferenceServiceAuthorization(Map.of("model-1", EnumSet.of(TaskType.CHAT_COMPLETION))) + ) + ) { + assertFalse(service.hideFromConfigurationApi()); + } + } + public void testGetConfiguration() throws Exception { - try (var service = createServiceWithMockSender()) { + try ( + var service = createServiceWithMockSender( + new ElasticInferenceServiceAuthorization(Map.of("model-1", EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION))) + ) + ) { String content = XContentHelper.stripWhitespace(""" { "service": "elastic", @@ -621,11 +628,218 @@ public void testGetConfiguration() throws Exception { } } + public void testGetConfiguration_WithoutSupportedTaskTypes() throws Exception { + try (var service = createServiceWithMockSender(ElasticInferenceServiceAuthorization.newDisabledService())) { + String content = XContentHelper.stripWhitespace(""" + { + "service": "elastic", + "name": "Elastic", + "task_types": [], + "configurations": { + "rate_limit.requests_per_minute": { + "description": "Minimize the number of rate limit errors.", + "label": "Rate Limit", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["sparse_embedding" , "chat_completion"] + }, + "model_id": { + "description": "The name of the model to use for the inference task.", + "label": "Model ID", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["sparse_embedding" , "chat_completion"] + }, + "max_input_tokens": { + "description": "Allows you to specify the maximum number of tokens per input.", + "label": "Maximum Input Tokens", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["sparse_embedding"] + } + } + } + """); + InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes( + new BytesArray(content), + XContentType.JSON + ); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + InferenceServiceConfiguration serviceConfiguration = service.getConfiguration(); + assertToXContentEquivalent( + originalBytes, + toXContent(serviceConfiguration, XContentType.JSON, humanReadable), + XContentType.JSON + ); + } + } + + public void testGetConfiguration_WithoutSupportedTaskTypes_WhenModelsReturnTaskOutsideOfImplementation() throws Exception { + try ( + var service = createServiceWithMockSender( + // this service doesn't yet support text embedding so we should still have no task types + new ElasticInferenceServiceAuthorization(Map.of("model-1", EnumSet.of(TaskType.TEXT_EMBEDDING))) + ) + ) { + String content = XContentHelper.stripWhitespace(""" + { + "service": "elastic", + "name": "Elastic", + "task_types": [], + "configurations": { + "rate_limit.requests_per_minute": { + "description": "Minimize the number of rate limit errors.", + "label": "Rate Limit", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["sparse_embedding" , "chat_completion"] + }, + "model_id": { + "description": "The name of the model to use for the inference task.", + "label": "Model ID", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["sparse_embedding" , "chat_completion"] + }, + "max_input_tokens": { + "description": "Allows you to specify the maximum number of tokens per input.", + "label": "Maximum Input Tokens", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["sparse_embedding"] + } + } + } + """); + InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes( + new BytesArray(content), + XContentType.JSON + ); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + InferenceServiceConfiguration serviceConfiguration = service.getConfiguration(); + assertToXContentEquivalent( + originalBytes, + toXContent(serviceConfiguration, XContentType.JSON, humanReadable), + XContentType.JSON + ); + } + } + + public void testSupportedStreamingTasks_ReturnsChatCompletion_WhenAuthRespondsWithAValidModel() throws Exception { + String responseJson = """ + { + "models": [ + { + "model_name": "model-a", + "task_types": ["embed/text/sparse", "chat"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = createServiceWithAuthHandler(senderFactory, getUrl(webServer))) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.CHAT_COMPLETION, TaskType.ANY))); + assertTrue(service.defaultConfigIds().isEmpty()); + } + } + + public void testSupportedStreamingTasks_ReturnsEmpty_WhenAuthRespondsWithoutChatCompletion() throws Exception { + String responseJson = """ + { + "models": [ + { + "model_name": "model-a", + "task_types": ["embed/text/sparse"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = createServiceWithAuthHandler(senderFactory, getUrl(webServer))) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedStreamingTasks(), is(EnumSet.noneOf(TaskType.class))); + assertTrue(service.defaultConfigIds().isEmpty()); + } + } + private ElasticInferenceService createServiceWithMockSender() { + return createServiceWithMockSender(ElasticInferenceServiceAuthorizationTests.createEnabledAuth()); + } + + private ElasticInferenceService createServiceWithMockSender(ElasticInferenceServiceAuthorization auth) { + var mockAuthHandler = mock(ElasticInferenceServiceAuthorizationHandler.class); + doAnswer(invocation -> { + ActionListener listener = invocation.getArgument(0); + listener.onResponse(auth); + return Void.TYPE; + }).when(mockAuthHandler).getAuthorization(any(), any()); + return new ElasticInferenceService( mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool), - new ElasticInferenceServiceComponents(null) + new ElasticInferenceServiceComponents(null), + mockModelRegistry(), + mockAuthHandler + ); + } + + private ElasticInferenceService createService(HttpRequestSender.Factory senderFactory) { + return createService(senderFactory, ElasticInferenceServiceAuthorizationTests.createEnabledAuth(), null); + } + + private ElasticInferenceService createService(HttpRequestSender.Factory senderFactory, String gatewayUrl) { + return createService(senderFactory, ElasticInferenceServiceAuthorizationTests.createEnabledAuth(), gatewayUrl); + } + + private ElasticInferenceService createService( + HttpRequestSender.Factory senderFactory, + ElasticInferenceServiceAuthorization auth, + String gatewayUrl + ) { + var mockAuthHandler = mock(ElasticInferenceServiceAuthorizationHandler.class); + doAnswer(invocation -> { + ActionListener listener = invocation.getArgument(0); + listener.onResponse(auth); + return Void.TYPE; + }).when(mockAuthHandler).getAuthorization(any(), any()); + + return new ElasticInferenceService( + senderFactory, + createWithEmptySettings(threadPool), + new ElasticInferenceServiceComponents(gatewayUrl), + mockModelRegistry(), + mockAuthHandler + ); + } + + private ElasticInferenceService createServiceWithAuthHandler(HttpRequestSender.Factory senderFactory, String eisGatewayUrl) { + return new ElasticInferenceService( + senderFactory, + createWithEmptySettings(threadPool), + new ElasticInferenceServiceComponents(eisGatewayUrl), + mockModelRegistry(), + new ElasticInferenceServiceAuthorizationHandler(eisGatewayUrl, threadPool) ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandlerTests.java new file mode 100644 index 0000000000000..43cac4c54aa3c --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandlerTests.java @@ -0,0 +1,265 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elastic.authorization; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.junit.After; +import org.junit.Before; +import org.mockito.ArgumentCaptor; + +import java.io.IOException; +import java.util.EnumSet; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.external.http.retry.RetryingHttpSender.MAX_RETIES; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class ElasticInferenceServiceAuthorizationHandlerTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(inferenceUtilityPool()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testDoesNotAttempt_ToRetrieveAuthorization_IfBaseUrlIsNull() throws Exception { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + var logger = mock(Logger.class); + var authHandler = new ElasticInferenceServiceAuthorizationHandler(null, threadPool, logger); + + try (var sender = senderFactory.createSender()) { + PlainActionFuture listener = new PlainActionFuture<>(); + authHandler.getAuthorization(listener, sender); + + var authResponse = listener.actionGet(TIMEOUT); + assertTrue(authResponse.enabledTaskTypes().isEmpty()); + assertFalse(authResponse.isEnabled()); + + var loggerArgsCaptor = ArgumentCaptor.forClass(String.class); + verify(logger).warn(loggerArgsCaptor.capture()); + var message = loggerArgsCaptor.getValue(); + assertThat(message, is("The base URL for the authorization service is not valid, rejecting authorization.")); + } + } + + public void testDoesNotAttempt_ToRetrieveAuthorization_IfBaseUrlIsEmpty() throws Exception { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + var logger = mock(Logger.class); + var authHandler = new ElasticInferenceServiceAuthorizationHandler("", threadPool, logger); + + try (var sender = senderFactory.createSender()) { + PlainActionFuture listener = new PlainActionFuture<>(); + authHandler.getAuthorization(listener, sender); + + var authResponse = listener.actionGet(TIMEOUT); + assertTrue(authResponse.enabledTaskTypes().isEmpty()); + assertFalse(authResponse.isEnabled()); + + var loggerArgsCaptor = ArgumentCaptor.forClass(String.class); + verify(logger).warn(loggerArgsCaptor.capture()); + var message = loggerArgsCaptor.getValue(); + assertThat(message, is("The base URL for the authorization service is not valid, rejecting authorization.")); + } + } + + public void testGetAuthorization_FailsWhenAnInvalidFieldIsFound() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + var eisGatewayUrl = getUrl(webServer); + var logger = mock(Logger.class); + var authHandler = new ElasticInferenceServiceAuthorizationHandler(eisGatewayUrl, threadPool, logger); + + try (var sender = senderFactory.createSender()) { + String responseJson = """ + { + "models": [ + { + "invalid-field": "model-a", + "task-types": ["embed/text/sparse", "chat"] + } + ] + } + """; + + queueWebServerResponsesForRetries(responseJson); + + PlainActionFuture listener = new PlainActionFuture<>(); + authHandler.getAuthorization(listener, sender); + + var authResponse = listener.actionGet(TIMEOUT); + assertTrue(authResponse.enabledTaskTypes().isEmpty()); + assertFalse(authResponse.isEnabled()); + + var loggerArgsCaptor = ArgumentCaptor.forClass(String.class); + verify(logger).warn(loggerArgsCaptor.capture()); + var message = loggerArgsCaptor.getValue(); + assertThat( + message, + is( + "Failed to retrieve the authorization information from the Elastic Inference Service." + + " Encountered an exception: org.elasticsearch.xcontent.XContentParseException: [4:28] " + + "[ElasticInferenceServiceAuthorizationResponseEntity] failed to parse field [models]" + ) + ); + } + } + + /** + * Queues the required number of responses to handle the retries of the internal sender. + */ + private void queueWebServerResponsesForRetries(String responseJson) { + for (int i = 0; i < MAX_RETIES; i++) { + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + } + } + + public void testGetAuthorization_ReturnsAValidResponse() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + var eisGatewayUrl = getUrl(webServer); + var logger = mock(Logger.class); + var authHandler = new ElasticInferenceServiceAuthorizationHandler(eisGatewayUrl, threadPool, logger); + + try (var sender = senderFactory.createSender()) { + String responseJson = """ + { + "models": [ + { + "model_name": "model-a", + "task_types": ["embed/text/sparse", "chat"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + PlainActionFuture listener = new PlainActionFuture<>(); + authHandler.getAuthorization(listener, sender); + + var authResponse = listener.actionGet(TIMEOUT); + assertThat(authResponse.enabledTaskTypes(), is(EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION))); + assertTrue(authResponse.isEnabled()); + + var loggerArgsCaptor = ArgumentCaptor.forClass(String.class); + verify(logger, times(1)).debug(loggerArgsCaptor.capture()); + + var message = loggerArgsCaptor.getValue(); + assertThat(message, is("Retrieving authorization information from the Elastic Inference Service.")); + verifyNoMoreInteractions(logger); + } + } + + @SuppressWarnings("unchecked") + public void testGetAuthorization_OnResponseCalledOnce() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + var eisGatewayUrl = getUrl(webServer); + var logger = mock(Logger.class); + var authHandler = new ElasticInferenceServiceAuthorizationHandler(eisGatewayUrl, threadPool, logger); + + ActionListener listener = mock(ActionListener.class); + String responseJson = """ + { + "models": [ + { + "model_name": "model-a", + "task_types": ["embed/text/sparse", "chat"] + } + ] + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + try (var sender = senderFactory.createSender()) { + authHandler.getAuthorization(listener, sender); + authHandler.waitForAuthRequestCompletion(TIMEOUT); + + verify(listener, times(1)).onResponse(any()); + var loggerArgsCaptor = ArgumentCaptor.forClass(String.class); + verify(logger, times(1)).debug(loggerArgsCaptor.capture()); + + var message = loggerArgsCaptor.getValue(); + assertThat(message, is("Retrieving authorization information from the Elastic Inference Service.")); + verifyNoMoreInteractions(logger); + } + } + + public void testGetAuthorization_InvalidResponse() throws IOException { + var senderMock = mock(Sender.class); + var senderFactory = mock(HttpRequestSender.Factory.class); + when(senderFactory.createSender()).thenReturn(senderMock); + + doAnswer(invocationOnMock -> { + ActionListener listener = invocationOnMock.getArgument(4); + listener.onResponse(new ChatCompletionResults(List.of(new ChatCompletionResults.Result("awesome")))); + return Void.TYPE; + }).when(senderMock).sendWithoutQueuing(any(), any(), any(), any(), any()); + + var logger = mock(Logger.class); + var authHandler = new ElasticInferenceServiceAuthorizationHandler("abc", threadPool, logger); + + try (var sender = senderFactory.createSender()) { + PlainActionFuture listener = new PlainActionFuture<>(); + + authHandler.getAuthorization(listener, sender); + var result = listener.actionGet(TIMEOUT); + + assertThat(result, is(ElasticInferenceServiceAuthorization.newDisabledService())); + + var loggerArgsCaptor = ArgumentCaptor.forClass(String.class); + verify(logger).warn(loggerArgsCaptor.capture()); + var message = loggerArgsCaptor.getValue(); + assertThat( + message, + is( + "Failed to retrieve the authorization information from the Elastic Inference Service." + + " Received an invalid response type: ChatCompletionResults" + ) + ); + } + + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationTests.java new file mode 100644 index 0000000000000..20b52cb7bb314 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elastic.authorization; + +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.response.elastic.ElasticInferenceServiceAuthorizationResponseEntity; + +import java.util.EnumSet; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class ElasticInferenceServiceAuthorizationTests extends ESTestCase { + public static ElasticInferenceServiceAuthorization createEnabledAuth() { + return new ElasticInferenceServiceAuthorization(Map.of("model-1", EnumSet.of(TaskType.TEXT_EMBEDDING))); + } + + public void testIsEnabled_ReturnsFalse_WithEmptyMap() { + assertFalse(ElasticInferenceServiceAuthorization.newDisabledService().isEnabled()); + } + + public void testExcludes_ModelsWithoutTaskTypes() { + var response = new ElasticInferenceServiceAuthorizationResponseEntity( + List.of(new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel("model-1", EnumSet.noneOf(TaskType.class))) + ); + var auth = ElasticInferenceServiceAuthorization.of(response); + assertTrue(auth.enabledTaskTypes().isEmpty()); + assertFalse(auth.isEnabled()); + } + + public void testConstructor_WithModelWithoutTaskTypes_ThrowsException() { + expectThrows( + IllegalArgumentException.class, + () -> new ElasticInferenceServiceAuthorization(Map.of("model-1", EnumSet.noneOf(TaskType.class))) + ); + } + + public void testEnabledTaskTypes_MergesFromSeparateModels() { + assertThat( + new ElasticInferenceServiceAuthorization( + Map.of("model-1", EnumSet.of(TaskType.TEXT_EMBEDDING), "model-2", EnumSet.of(TaskType.SPARSE_EMBEDDING)) + ).enabledTaskTypes(), + is(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING)) + ); + } + + public void testEnabledTaskTypes_FromSingleEntry() { + assertThat( + new ElasticInferenceServiceAuthorization(Map.of("model-1", EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING))) + .enabledTaskTypes(), + is(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING)) + ); + } +} From 96baac881dfe5e60669b986104e8fe465901f83b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 02:09:15 +1100 Subject: [PATCH 065/383] Mute org.elasticsearch.packaging.test.DockerTests test140CgroupOsStatsAreAvailable #120914 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 5d490ec0bda2d..7347fb41989d8 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -260,6 +260,9 @@ tests: - class: org.elasticsearch.packaging.test.DockerTests method: test050BasicApiTests issue: https://github.com/elastic/elasticsearch/issues/120911 +- class: org.elasticsearch.packaging.test.DockerTests + method: test140CgroupOsStatsAreAvailable + issue: https://github.com/elastic/elasticsearch/issues/120914 # Examples: # From 8de9539e291de80977183231e3b9b820d27cd127 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 27 Jan 2025 17:12:42 +0200 Subject: [PATCH 066/383] Lazy initialization for `SyntheticSourceSupport.loader()` (#120896) * Lazy initialization for `SyntheticSourceSupport.loader()` * [CI] Auto commit changes from spotless * add missing --------- Co-authored-by: elasticsearchmachine --- .../extras/MatchOnlyTextFieldMapper.java | 14 ++++---- .../mapper/extras/ScaledFloatFieldMapper.java | 14 ++++---- .../AnnotatedTextFieldMapper.java | 8 ++--- .../index/mapper/BinaryFieldMapper.java | 6 ++-- .../index/mapper/BooleanFieldMapper.java | 14 ++++---- .../index/mapper/DateFieldMapper.java | 14 ++++---- .../index/mapper/DocCountFieldMapper.java | 2 +- .../index/mapper/DocumentParser.java | 2 +- .../index/mapper/FieldMapper.java | 18 +++++++++- .../index/mapper/GeoPointFieldMapper.java | 20 +++++------ .../mapper/IgnoredSourceFieldMapper.java | 2 +- .../index/mapper/IpFieldMapper.java | 35 ++++++++++--------- .../index/mapper/KeywordFieldMapper.java | 2 +- .../index/mapper/MetadataFieldMapper.java | 2 +- .../index/mapper/NumberFieldMapper.java | 2 +- .../index/mapper/RangeFieldMapper.java | 6 ++-- .../index/mapper/TextFieldMapper.java | 8 ++--- .../flattened/FlattenedFieldMapper.java | 14 ++++---- .../vectors/DenseVectorFieldMapper.java | 10 +++--- .../vectors/SparseVectorFieldMapper.java | 2 +- .../index/mapper/DocumentParserTests.java | 6 ++-- .../mapper/HistogramFieldMapper.java | 14 ++++---- .../AggregateDoubleMetricFieldMapper.java | 14 ++++---- .../mapper/ConstantKeywordFieldMapper.java | 8 ++--- .../CountedKeywordFieldMapper.java | 5 +-- .../unsignedlong/UnsignedLongFieldMapper.java | 14 ++++---- .../VersionStringFieldMapper.java | 26 +++++++------- .../mapper/RankVectorsFieldMapper.java | 2 +- .../wildcard/mapper/WildcardFieldMapper.java | 28 +++++++-------- 29 files changed, 159 insertions(+), 153 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java index 5904169308fab..b103a23772a47 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java @@ -463,13 +463,13 @@ public MatchOnlyTextFieldType fieldType() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { - var loader = new StringStoredFieldFieldLoader(fieldType().storedFieldNameForSyntheticSource(), fieldType().name(), leafName()) { - @Override - protected void write(XContentBuilder b, Object value) throws IOException { - b.value((String) value); + return new SyntheticSourceSupport.Native( + () -> new StringStoredFieldFieldLoader(fieldType().storedFieldNameForSyntheticSource(), fieldType().name(), leafName()) { + @Override + protected void write(XContentBuilder b, Object value) throws IOException { + b.value((String) value); + } } - }; - - return new SyntheticSourceSupport.Native(loader); + ); } } diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java index b845545133e19..a91ca66faa405 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java @@ -707,14 +707,14 @@ public int docValueCount() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { if (hasDocValues) { - var loader = new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed.value()) { - @Override - protected void writeValue(XContentBuilder b, long value) throws IOException { - b.value(decodeForSyntheticSource(value, scalingFactor)); + return new SyntheticSourceSupport.Native( + () -> new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed.value()) { + @Override + protected void writeValue(XContentBuilder b, long value) throws IOException { + b.value(decodeForSyntheticSource(value, scalingFactor)); + } } - }; - - return new SyntheticSourceSupport.Native(loader); + ); } return super.syntheticSourceSupport(); diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java index 4b2006430b89e..8ee639ffc8431 100644 --- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java @@ -560,19 +560,17 @@ public FieldMapper.Builder getMergeBuilder() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { if (fieldType.stored()) { - var loader = new StringStoredFieldFieldLoader(fullPath(), leafName()) { + return new SyntheticSourceSupport.Native(() -> new StringStoredFieldFieldLoader(fullPath(), leafName()) { @Override protected void write(XContentBuilder b, Object value) throws IOException { b.value((String) value); } - }; - - return new SyntheticSourceSupport.Native(loader); + }); } var kwd = TextFieldMapper.SyntheticSourceHelper.getKeywordFieldMapperForSyntheticSource(this); if (kwd != null) { - return new SyntheticSourceSupport.Native(kwd.syntheticFieldLoader(fullPath(), leafName())); + return new SyntheticSourceSupport.Native(() -> kwd.syntheticFieldLoader(fullPath(), leafName())); } return super.syntheticSourceSupport(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java index 87c123d71aae5..2093909876567 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java @@ -195,7 +195,7 @@ protected String contentType() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { if (hasDocValues) { - var loader = new BinaryDocValuesSyntheticFieldLoader(fullPath()) { + return new SyntheticSourceSupport.Native(() -> new BinaryDocValuesSyntheticFieldLoader(fullPath()) { @Override protected void writeValue(XContentBuilder b, BytesRef value) throws IOException { var in = new ByteArrayStreamInput(); @@ -221,9 +221,7 @@ protected void writeValue(XContentBuilder b, BytesRef value) throws IOException b.endArray(); } } - }; - - return new SyntheticSourceSupport.Native(loader); + }); } return super.syntheticSourceSupport(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 77de1654cf4ba..8100b83463c93 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -546,14 +546,14 @@ protected String contentType() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { if (hasDocValues) { - var loader = new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed.value()) { - @Override - protected void writeValue(XContentBuilder b, long value) throws IOException { - b.value(value == 1); + return new SyntheticSourceSupport.Native( + () -> new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed.value()) { + @Override + protected void writeValue(XContentBuilder b, long value) throws IOException { + b.value(value == 1); + } } - }; - - return new SyntheticSourceSupport.Native(loader); + ); } return super.syntheticSourceSupport(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index fb17f1c7ebaf8..6d04926d865a1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -1011,14 +1011,14 @@ public Long getNullValue() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { if (hasDocValues) { - var loader = new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed) { - @Override - protected void writeValue(XContentBuilder b, long value) throws IOException { - b.value(fieldType().format(value, fieldType().dateTimeFormatter())); + return new SyntheticSourceSupport.Native( + () -> new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed) { + @Override + protected void writeValue(XContentBuilder b, long value) throws IOException { + b.value(fieldType().format(value, fieldType().dateTimeFormatter())); + } } - }; - - return new SyntheticSourceSupport.Native(loader); + ); } return super.syntheticSourceSupport(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java index d57564ca5d696..52828c5e430ba 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java @@ -128,7 +128,7 @@ public static IndexableField field(int count) { @Override protected SyntheticSourceSupport syntheticSourceSupport() { - return new SyntheticSourceSupport.Native(new SyntheticFieldLoader()); + return new SyntheticSourceSupport.Native(SyntheticFieldLoader::new); } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index ac3f019636b66..d45cafd252428 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -1023,7 +1023,7 @@ protected String contentType() { protected SyntheticSourceSupport syntheticSourceSupport() { // Opt out of fallback synthetic source implementation // since there is custom logic in #parseCreateField(). - return new SyntheticSourceSupport.Native(SourceLoader.SyntheticFieldLoader.NOTHING); + return new SyntheticSourceSupport.Native(() -> SourceLoader.SyntheticFieldLoader.NOTHING); } }; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index c0c3c7193998a..7c1f3678a5dc9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -560,11 +560,27 @@ public SourceLoader.SyntheticFieldLoader loader() { SyntheticSourceSupport FALLBACK = new Fallback(); - record Native(SourceLoader.SyntheticFieldLoader loader) implements SyntheticSourceSupport { + final class Native implements SyntheticSourceSupport { + Supplier loaderSupplier; + private SourceLoader.SyntheticFieldLoader loader; + + @SuppressWarnings("checkstyle:RedundantModifier") + public Native(Supplier loaderSupplier) { + this.loaderSupplier = loaderSupplier; + } + @Override public SyntheticSourceMode mode() { return SyntheticSourceMode.NATIVE; } + + @Override + public SourceLoader.SyntheticFieldLoader loader() { + if (loader == null) { + loader = loaderSupplier.get(); + } + return loader; + } } SyntheticSourceMode mode(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index 83259d70ae278..b71a33976d72d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -619,17 +619,17 @@ protected void onMalformedValue(DocumentParserContext context, XContentBuilder m @Override protected SyntheticSourceSupport syntheticSourceSupport() { if (fieldType().hasDocValues()) { - var loader = new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed()) { - final GeoPoint point = new GeoPoint(); - - @Override - protected void writeValue(XContentBuilder b, long value) throws IOException { - point.reset(GeoEncodingUtils.decodeLatitude((int) (value >>> 32)), GeoEncodingUtils.decodeLongitude((int) value)); - point.toXContent(b, ToXContent.EMPTY_PARAMS); + return new SyntheticSourceSupport.Native( + () -> new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed()) { + final GeoPoint point = new GeoPoint(); + + @Override + protected void writeValue(XContentBuilder b, long value) throws IOException { + point.reset(GeoEncodingUtils.decodeLatitude((int) (value >>> 32)), GeoEncodingUtils.decodeLongitude((int) value)); + point.toXContent(b, ToXContent.EMPTY_PARAMS); + } } - }; - - return new SyntheticSourceSupport.Native(loader); + ); } return super.syntheticSourceSupport(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java index bdb3d97d4c187..5f553ac8d2252 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java @@ -233,7 +233,7 @@ protected SyntheticSourceSupport syntheticSourceSupport() { // not being available. // We would like to have an option to lose some values in synthetic source // but have search not fail. - return new SyntheticSourceSupport.Native(new SourceLoader.SyntheticFieldLoader() { + return new SyntheticSourceSupport.Native(() -> new SourceLoader.SyntheticFieldLoader() { @Override public Stream> storedFieldLoaders() { if (indexSettings.getSkipIgnoredSourceRead()) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 0204a2b10013a..3ded3d2699b21 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -608,26 +608,27 @@ public void doValidate(MappingLookup lookup) { @Override protected SyntheticSourceSupport syntheticSourceSupport() { if (hasDocValues) { - var layers = new ArrayList(); - layers.add(new SortedSetDocValuesSyntheticFieldLoaderLayer(fullPath()) { - @Override - protected BytesRef convert(BytesRef value) { - byte[] bytes = Arrays.copyOfRange(value.bytes, value.offset, value.offset + value.length); - return new BytesRef(NetworkAddress.format(InetAddressPoint.decode(bytes))); - } + return new SyntheticSourceSupport.Native(() -> { + var layers = new ArrayList(); + layers.add(new SortedSetDocValuesSyntheticFieldLoaderLayer(fullPath()) { + @Override + protected BytesRef convert(BytesRef value) { + byte[] bytes = Arrays.copyOfRange(value.bytes, value.offset, value.offset + value.length); + return new BytesRef(NetworkAddress.format(InetAddressPoint.decode(bytes))); + } - @Override - protected BytesRef preserve(BytesRef value) { - // No need to copy because convert has made a deep copy - return value; + @Override + protected BytesRef preserve(BytesRef value) { + // No need to copy because convert has made a deep copy + return value; + } + }); + + if (ignoreMalformed) { + layers.add(new CompositeSyntheticFieldLoader.MalformedValuesLayer(fullPath())); } + return new CompositeSyntheticFieldLoader(leafName(), fullPath(), layers); }); - - if (ignoreMalformed) { - layers.add(new CompositeSyntheticFieldLoader.MalformedValuesLayer(fullPath())); - } - - return new SyntheticSourceSupport.Native(new CompositeSyntheticFieldLoader(leafName(), fullPath(), layers)); } return super.syntheticSourceSupport(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index bdcf9bf98279f..7b14739d36246 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -1044,7 +1044,7 @@ protected SyntheticSourceSupport syntheticSourceSupport() { } if (fieldType.stored() || hasDocValues) { - return new SyntheticSourceSupport.Native(syntheticFieldLoader(fullPath(), leafName())); + return new SyntheticSourceSupport.Native(() -> syntheticFieldLoader(fullPath(), leafName())); } return super.syntheticSourceSupport(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java index 033742b3b57fc..aa74190604750 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java @@ -221,6 +221,6 @@ public void postParse(DocumentParserContext context) throws IOException { @Override protected SyntheticSourceSupport syntheticSourceSupport() { - return new SyntheticSourceSupport.Native(SourceLoader.SyntheticFieldLoader.NOTHING); + return new SyntheticSourceSupport.Native(() -> SourceLoader.SyntheticFieldLoader.NOTHING); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 1b8fb3dfc101c..76528ccf0667e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -2030,7 +2030,7 @@ public void doValidate(MappingLookup lookup) { @Override protected SyntheticSourceSupport syntheticSourceSupport() { if (hasDocValues) { - return new SyntheticSourceSupport.Native(type.syntheticFieldLoader(fullPath(), leafName(), ignoreMalformed.value())); + return new SyntheticSourceSupport.Native(() -> type.syntheticFieldLoader(fullPath(), leafName(), ignoreMalformed.value())); } return super.syntheticSourceSupport(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java index 461ad74a9434d..79a0e6b1fdbc4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java @@ -466,7 +466,7 @@ private static Range parseIpRangeFromCidr(final XContentParser parser) throws IO @Override protected SyntheticSourceSupport syntheticSourceSupport() { if (hasDocValues) { - var loader = new BinaryDocValuesSyntheticFieldLoader(fullPath()) { + return new SyntheticSourceSupport.Native(() -> new BinaryDocValuesSyntheticFieldLoader(fullPath()) { @Override protected void writeValue(XContentBuilder b, BytesRef value) throws IOException { List ranges = type.decodeRanges(value); @@ -486,9 +486,7 @@ protected void writeValue(XContentBuilder b, BytesRef value) throws IOException b.endArray(); } } - }; - - return new SyntheticSourceSupport.Native(loader); + }); } return super.syntheticSourceSupport(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index b7323f2c0e3e4..01b275e0a382e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -1478,19 +1478,17 @@ protected void doXContentBody(XContentBuilder builder, Params params) throws IOE @Override protected SyntheticSourceSupport syntheticSourceSupport() { if (store) { - var loader = new StringStoredFieldFieldLoader(fullPath(), leafName()) { + return new SyntheticSourceSupport.Native(() -> new StringStoredFieldFieldLoader(fullPath(), leafName()) { @Override protected void write(XContentBuilder b, Object value) throws IOException { b.value((String) value); } - }; - - return new SyntheticSourceSupport.Native(loader); + }); } var kwd = SyntheticSourceHelper.getKeywordFieldMapperForSyntheticSource(this); if (kwd != null) { - return new SyntheticSourceSupport.Native(kwd.syntheticFieldLoader(fullPath(), leafName())); + return new SyntheticSourceSupport.Native(() -> kwd.syntheticFieldLoader(fullPath(), leafName())); } return super.syntheticSourceSupport(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java index 7ef12f6dd30d2..f70a47caa080c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java @@ -911,14 +911,14 @@ public FieldMapper.Builder getMergeBuilder() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { if (fieldType().hasDocValues()) { - var loader = new FlattenedSortedSetDocValuesSyntheticFieldLoader( - fullPath(), - fullPath() + KEYED_FIELD_SUFFIX, - ignoreAbove() < Integer.MAX_VALUE ? fullPath() + KEYED_IGNORED_VALUES_FIELD_SUFFIX : null, - leafName() + return new SyntheticSourceSupport.Native( + () -> new FlattenedSortedSetDocValuesSyntheticFieldLoader( + fullPath(), + fullPath() + KEYED_FIELD_SUFFIX, + ignoreAbove() < Integer.MAX_VALUE ? fullPath() + KEYED_IGNORED_VALUES_FIELD_SUFFIX : null, + leafName() + ) ); - - return new SyntheticSourceSupport.Native(loader); } return super.syntheticSourceSupport(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 5edff48577efc..0d514408c912f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -2372,11 +2372,11 @@ public String toString() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { - var loader = fieldType().indexed - ? new IndexedSyntheticFieldLoader(indexCreatedVersion, fieldType().similarity) - : new DocValuesSyntheticFieldLoader(indexCreatedVersion); - - return new SyntheticSourceSupport.Native(loader); + return new SyntheticSourceSupport.Native( + () -> fieldType().indexed + ? new IndexedSyntheticFieldLoader(indexCreatedVersion, fieldType().similarity) + : new DocValuesSyntheticFieldLoader(indexCreatedVersion) + ); } private class IndexedSyntheticFieldLoader extends SourceLoader.DocValuesBasedSyntheticFieldLoader { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java index c73f4fef23361..fdbb03f51743e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java @@ -164,7 +164,7 @@ private SparseVectorFieldMapper(String simpleName, MappedFieldType mappedFieldTy @Override protected SyntheticSourceSupport syntheticSourceSupport() { if (fieldType().isStored()) { - return new SyntheticSourceSupport.Native(new SparseVectorSyntheticFieldLoader(fullPath(), leafName())); + return new SyntheticSourceSupport.Native(() -> new SparseVectorSyntheticFieldLoader(fullPath(), leafName())); } return super.syntheticSourceSupport(); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index 5b32350cbf4b8..c394d1a82e28e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -3303,15 +3303,13 @@ protected String contentType() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { - var loader = new StringStoredFieldFieldLoader(fullPath(), leafName()) { + return new SyntheticSourceSupport.Native(() -> new StringStoredFieldFieldLoader(fullPath(), leafName()) { @Override protected void write(XContentBuilder b, Object value) throws IOException { BytesRef ref = (BytesRef) value; b.utf8Value(ref.bytes, ref.offset, ref.length); } - }; - - return new SyntheticSourceSupport.Native(loader); + }); } private static final TypeParser PARSER = new FixedTypeParser(c -> new MockMetadataMapper()); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java index d597d7b59f240..fcf2d572f2d14 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java @@ -497,14 +497,14 @@ public long count() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { - var loader = new CompositeSyntheticFieldLoader( - leafName(), - fullPath(), - new HistogramSyntheticFieldLoader(), - new CompositeSyntheticFieldLoader.MalformedValuesLayer(fullPath()) + return new SyntheticSourceSupport.Native( + () -> new CompositeSyntheticFieldLoader( + leafName(), + fullPath(), + new HistogramSyntheticFieldLoader(), + new CompositeSyntheticFieldLoader.MalformedValuesLayer(fullPath()) + ) ); - - return new SyntheticSourceSupport.Native(loader); } private class HistogramSyntheticFieldLoader implements CompositeSyntheticFieldLoader.DocValuesLayer { diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java index 6944f91042311..df4a0aed01bc2 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java @@ -709,14 +709,14 @@ public FieldMapper.Builder getMergeBuilder() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { - var loader = new CompositeSyntheticFieldLoader( - leafName(), - fullPath(), - new AggregateMetricSyntheticFieldLoader(fullPath(), metrics), - new CompositeSyntheticFieldLoader.MalformedValuesLayer(fullPath()) + return new SyntheticSourceSupport.Native( + () -> new CompositeSyntheticFieldLoader( + leafName(), + fullPath(), + new AggregateMetricSyntheticFieldLoader(fullPath(), metrics), + new CompositeSyntheticFieldLoader.MalformedValuesLayer(fullPath()) + ) ); - - return new SyntheticSourceSupport.Native(loader); } public static class AggregateMetricSyntheticFieldLoader implements CompositeSyntheticFieldLoader.DocValuesLayer { diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index fbc89fd563cc7..5a667df9ffbbf 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -359,16 +359,14 @@ protected SyntheticSourceSupport syntheticSourceSupport() { String const_value = fieldType().value(); if (const_value == null) { - return new SyntheticSourceSupport.Native(SourceLoader.SyntheticFieldLoader.NOTHING); + return new SyntheticSourceSupport.Native(() -> SourceLoader.SyntheticFieldLoader.NOTHING); } - var loader = new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), false) { + return new SyntheticSourceSupport.Native(() -> new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), false) { @Override protected void writeValue(XContentBuilder b, long ignored) throws IOException { b.value(const_value); } - }; - - return new SyntheticSourceSupport.Native(loader); + }); } } diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java index d38fa456582b3..76ea7cab59ffc 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java @@ -504,8 +504,9 @@ protected SyntheticSourceSupport syntheticSourceSupport() { return super.syntheticSourceSupport(); } - var loader = new CountedKeywordFieldSyntheticSourceLoader(fullPath(), countFieldMapper.fullPath(), leafName()); - return new SyntheticSourceSupport.Native(loader); + return new SyntheticSourceSupport.Native( + () -> new CountedKeywordFieldSyntheticSourceLoader(fullPath(), countFieldMapper.fullPath(), leafName()) + ); } } diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index ec04bfdd058f9..38afd235d6eae 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -754,14 +754,14 @@ public void doValidate(MappingLookup lookup) { @Override protected SyntheticSourceSupport syntheticSourceSupport() { if (hasDocValues) { - var loader = new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed()) { - @Override - protected void writeValue(XContentBuilder b, long value) throws IOException { - b.value(DocValueFormat.UNSIGNED_LONG_SHIFTED.format(value)); + return new SyntheticSourceSupport.Native( + () -> new SortedNumericDocValuesSyntheticFieldLoader(fullPath(), leafName(), ignoreMalformed()) { + @Override + protected void writeValue(XContentBuilder b, long value) throws IOException { + b.value(DocValueFormat.UNSIGNED_LONG_SHIFTED.format(value)); + } } - }; - - return new SyntheticSourceSupport.Native(loader); + ); } return super.syntheticSourceSupport(); diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java index 0bdb5fc0101a5..d0f2de17cadd1 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java @@ -436,19 +436,19 @@ public FieldMapper.Builder getMergeBuilder() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { - var loader = new CompositeSyntheticFieldLoader(leafName(), fullPath(), new SortedSetDocValuesSyntheticFieldLoaderLayer(fullPath()) { - @Override - protected BytesRef convert(BytesRef value) { - return VersionEncoder.decodeVersion(value); - } - - @Override - protected BytesRef preserve(BytesRef value) { - // Convert copies the underlying bytes - return value; - } - }); + return new SyntheticSourceSupport.Native( + () -> new CompositeSyntheticFieldLoader(leafName(), fullPath(), new SortedSetDocValuesSyntheticFieldLoaderLayer(fullPath()) { + @Override + protected BytesRef convert(BytesRef value) { + return VersionEncoder.decodeVersion(value); + } - return new SyntheticSourceSupport.Native(loader); + @Override + protected BytesRef preserve(BytesRef value) { + // Convert copies the underlying bytes + return value; + } + }) + ); } } diff --git a/x-pack/plugin/rank-vectors/src/main/java/org/elasticsearch/xpack/rank/vectors/mapper/RankVectorsFieldMapper.java b/x-pack/plugin/rank-vectors/src/main/java/org/elasticsearch/xpack/rank/vectors/mapper/RankVectorsFieldMapper.java index 873d67e76b04a..a595eedaf4b8d 100644 --- a/x-pack/plugin/rank-vectors/src/main/java/org/elasticsearch/xpack/rank/vectors/mapper/RankVectorsFieldMapper.java +++ b/x-pack/plugin/rank-vectors/src/main/java/org/elasticsearch/xpack/rank/vectors/mapper/RankVectorsFieldMapper.java @@ -375,7 +375,7 @@ public FieldMapper.Builder getMergeBuilder() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { - return new SyntheticSourceSupport.Native(new DocValuesSyntheticFieldLoader()); + return new SyntheticSourceSupport.Native(DocValuesSyntheticFieldLoader::new); } private class DocValuesSyntheticFieldLoader extends SourceLoader.DocValuesBasedSyntheticFieldLoader { diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index f87c73273a7ac..3e374cab327d3 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -990,20 +990,20 @@ public FieldMapper.Builder getMergeBuilder() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { - var layers = new ArrayList(); - layers.add(new WildcardSyntheticFieldLoader()); - if (ignoreAbove != Integer.MAX_VALUE) { - layers.add(new CompositeSyntheticFieldLoader.StoredFieldLayer(originalName()) { - @Override - protected void writeValue(Object value, XContentBuilder b) throws IOException { - BytesRef r = (BytesRef) value; - b.utf8Value(r.bytes, r.offset, r.length); - } - }); - } - - var loader = new CompositeSyntheticFieldLoader(leafName(), fullPath(), layers); - return new SyntheticSourceSupport.Native(loader); + return new SyntheticSourceSupport.Native(() -> { + var layers = new ArrayList(); + layers.add(new WildcardSyntheticFieldLoader()); + if (ignoreAbove != Integer.MAX_VALUE) { + layers.add(new CompositeSyntheticFieldLoader.StoredFieldLayer(originalName()) { + @Override + protected void writeValue(Object value, XContentBuilder b) throws IOException { + BytesRef r = (BytesRef) value; + b.utf8Value(r.bytes, r.offset, r.length); + } + }); + } + return new CompositeSyntheticFieldLoader(leafName(), fullPath(), layers); + }); } private class WildcardSyntheticFieldLoader implements CompositeSyntheticFieldLoader.DocValuesLayer { From 1f2824c5fccedc71cec1162e95f50eb308119dee Mon Sep 17 00:00:00 2001 From: Dan Rubinstein Date: Mon, 27 Jan 2025 10:31:23 -0500 Subject: [PATCH 067/383] Removing default model ID from ElasticsearchInternalService configuration (#120818) * Removing default model ID from ElasticsearchInternalService configuration * Update docs/changelog/120818.yaml * Delete docs/changelog/120818.yaml --------- Co-authored-by: Elastic Machine --- .../services/elasticsearch/ElasticsearchInternalService.java | 5 +++-- .../elasticsearch/ElasticsearchInternalServiceTests.java | 1 - 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index c538b9acf1321..9807a079140db 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -1147,8 +1147,9 @@ public static InferenceServiceConfiguration get() { configurationMap.put( MODEL_ID, - new SettingsConfiguration.Builder(supportedTaskTypes).setDefaultValue(MULTILINGUAL_E5_SMALL_MODEL_ID) - .setDescription("The name of the model to use for the inference task.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( + "The name of the model to use for the inference task." + ) .setLabel("Model ID") .setRequired(true) .setSensitive(false) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 93b884a87fba2..3b634f45dc751 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -1578,7 +1578,6 @@ public void testGetConfiguration() throws Exception { "supported_task_types": ["text_embedding", "sparse_embedding", "rerank"] }, "model_id": { - "default_value": ".multilingual-e5-small", "description": "The name of the model to use for the inference task.", "label": "Model ID", "required": true, From 04c53e248679e8e4d08923c0e7a957ba92575de7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 02:35:14 +1100 Subject: [PATCH 068/383] Mute org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT test {p0=data_stream/140_data_stream_aliases/Create data stream alias} #120920 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 7347fb41989d8..8447f80c4b948 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -263,6 +263,9 @@ tests: - class: org.elasticsearch.packaging.test.DockerTests method: test140CgroupOsStatsAreAvailable issue: https://github.com/elastic/elasticsearch/issues/120914 +- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT + method: test {p0=data_stream/140_data_stream_aliases/Create data stream alias} + issue: https://github.com/elastic/elasticsearch/issues/120920 # Examples: # From 0fabaf7716a700c671355e4069366257524f0f26 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Mon, 27 Jan 2025 17:42:49 +0200 Subject: [PATCH 069/383] [Bug] Data stream stats fails when there is a concrete index in the cluster (#120901) --- .../DataStreamsStatsTransportAction.java | 10 ++++++++ .../datastreams/DataStreamsStatsTests.java | 24 +++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java index 4d6eead07b94f..3e207ab30ed69 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java @@ -132,6 +132,16 @@ protected void shardOperation( }); } + @Override + protected String[] resolveConcreteIndexNames(ClusterState clusterState, DataStreamsStatsAction.Request request) { + return DataStreamsActionUtil.resolveConcreteIndexNames( + indexNameExpressionResolver, + clusterState, + request.indices(), + request.indicesOptions() + ).toArray(String[]::new); + } + @Override protected DataStreamsStatsAction.DataStreamShardStats readShardResult(StreamInput in) throws IOException { return new DataStreamsStatsAction.DataStreamShardStats(in); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamsStatsTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamsStatsTests.java index e32636fe40d84..e8fbb46b430ba 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamsStatsTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamsStatsTests.java @@ -11,6 +11,8 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.template.delete.TransportDeleteComposableIndexTemplateAction; @@ -56,6 +58,7 @@ protected Collection> getPlugins() { } private final Set createdDataStreams = new HashSet<>(); + private final Set createdStandAloneIndices = new HashSet<>(); @Override @After @@ -66,6 +69,12 @@ public void tearDown() throws Exception { } createdDataStreams.clear(); } + if (createdStandAloneIndices.isEmpty() == false) { + for (String indexName : createdStandAloneIndices) { + client().admin().indices().delete(new DeleteIndexRequest(indexName)); + } + createdStandAloneIndices.clear(); + } super.tearDown(); } @@ -80,6 +89,7 @@ public void testStatsNoDataStream() throws Exception { } public void testStatsEmptyDataStream() throws Exception { + maybeCreateCreatedStandAloneIndicesIndex(); String dataStreamName = createDataStream(); DataStreamsStatsAction.Response stats = getDataStreamsStats(); @@ -97,6 +107,7 @@ public void testStatsEmptyDataStream() throws Exception { } public void testStatsExistingDataStream() throws Exception { + maybeCreateCreatedStandAloneIndicesIndex(); String dataStreamName = createDataStream(); long timestamp = createDocument(dataStreamName); @@ -115,6 +126,7 @@ public void testStatsExistingDataStream() throws Exception { } public void testStatsExistingDataStreamWithFailureStores() throws Exception { + maybeCreateCreatedStandAloneIndicesIndex(); String dataStreamName = createDataStream(false, true); createFailedDocument(dataStreamName); @@ -137,6 +149,7 @@ public void testStatsExistingDataStreamWithFailureStores() throws Exception { } public void testStatsExistingHiddenDataStream() throws Exception { + maybeCreateCreatedStandAloneIndicesIndex(); String dataStreamName = createDataStream(true, false); long timestamp = createDocument(dataStreamName); @@ -155,6 +168,7 @@ public void testStatsExistingHiddenDataStream() throws Exception { } public void testStatsClosedBackingIndexDataStream() throws Exception { + maybeCreateCreatedStandAloneIndicesIndex(); String dataStreamName = createDataStream(); createDocument(dataStreamName); assertTrue(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).get().isAcknowledged()); @@ -198,6 +212,7 @@ public void testStatsClosedBackingIndexDataStream() throws Exception { } public void testStatsRolledDataStream() throws Exception { + maybeCreateCreatedStandAloneIndicesIndex(); String dataStreamName = createDataStream(); long timestamp = createDocument(dataStreamName); assertTrue(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).get().isAcknowledged()); @@ -218,6 +233,7 @@ public void testStatsRolledDataStream() throws Exception { } public void testStatsMultipleDataStreams() throws Exception { + maybeCreateCreatedStandAloneIndicesIndex(); for (int dataStreamCount = 0; dataStreamCount < (2 + randomInt(3)); dataStreamCount++) { createDataStream(); } @@ -284,6 +300,14 @@ private String createDataStream(boolean hidden, boolean failureStore) throws Exc return dataStreamName; } + private void maybeCreateCreatedStandAloneIndicesIndex() { + if (randomBoolean()) { + String indexName = randomAlphaOfLength(10).toLowerCase(Locale.getDefault()); + assertAcked(client().admin().indices().create(new CreateIndexRequest(indexName))); + createdStandAloneIndices.add(indexName); + } + } + private long createDocument(String dataStreamName) throws Exception { // Get some randomized but reasonable timestamps on the data since not all of it is guaranteed to arrive in order. long timeSeed = System.currentTimeMillis(); From 22099bad4988a2725206a2ef4e5ea75ab1edf718 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 27 Jan 2025 17:47:40 +0200 Subject: [PATCH 070/383] [TEST] Restore copy_to, double and float randomized testing (#120906) Partial rollback of #120859, these data types seem fine. --- .../elasticsearch/logsdb/datageneration/FieldType.java | 8 +++++++- .../datasource/DefaultMappingParametersHandler.java | 6 ++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java index 13c802fcd5809..07744851aba3e 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java @@ -11,6 +11,8 @@ import org.elasticsearch.logsdb.datageneration.datasource.DataSource; import org.elasticsearch.logsdb.datageneration.fields.leaf.ByteFieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.fields.leaf.DoubleFieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.fields.leaf.FloatFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.IntegerFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.KeywordFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.LongFieldDataGenerator; @@ -26,7 +28,9 @@ public enum FieldType { UNSIGNED_LONG("unsigned_long"), INTEGER("integer"), SHORT("short"), - BYTE("byte"); + BYTE("byte"), + DOUBLE("double"), + FLOAT("float"); private final String name; @@ -42,6 +46,8 @@ public FieldDataGenerator generator(String fieldName, DataSource dataSource) { case INTEGER -> new IntegerFieldDataGenerator(fieldName, dataSource); case SHORT -> new ShortFieldDataGenerator(fieldName, dataSource); case BYTE -> new ByteFieldDataGenerator(fieldName, dataSource); + case DOUBLE -> new DoubleFieldDataGenerator(fieldName, dataSource); + case FLOAT -> new FloatFieldDataGenerator(fieldName, dataSource); }; } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java index 832a3205cfcb9..db13867fe71ad 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java @@ -32,8 +32,7 @@ public DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceReques return new DataSourceResponse.LeafMappingParametersGenerator(switch (request.fieldType()) { case KEYWORD -> keywordMapping(request, map); - case LONG, INTEGER, SHORT, BYTE, UNSIGNED_LONG -> plain(map); - + case LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, UNSIGNED_LONG -> plain(map); }); } @@ -61,8 +60,7 @@ private Supplier> keywordMapping( .collect(Collectors.toSet()); if (options.isEmpty() == false) { - // TODO: re-enable once #120831 is resolved - // injected.put("copy_to", ESTestCase.randomFrom(options)); + injected.put("copy_to", ESTestCase.randomFrom(options)); } } From bc575453874e50500d2f8e65458a8d1dca836a0b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 02:49:39 +1100 Subject: [PATCH 071/383] Mute org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT testReservedStatePersistsOnRestart #120923 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 8447f80c4b948..70c7a3051b56f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -266,6 +266,9 @@ tests: - class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT method: test {p0=data_stream/140_data_stream_aliases/Create data stream alias} issue: https://github.com/elastic/elasticsearch/issues/120920 +- class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT + method: testReservedStatePersistsOnRestart + issue: https://github.com/elastic/elasticsearch/issues/120923 # Examples: # From 9d90d05a50501556842e5b256d3699c9738fc6b6 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 27 Jan 2025 07:54:23 -0800 Subject: [PATCH 072/383] Allow unmapped fields in MultiClustersIT tests (#120873) The message_id field may be unmapped if documents were indexed into some indices but not all. This change specifies the unmapped type for message_id, allowing it to be sorted in such cases. Closes #120796 --- .../java/org/elasticsearch/datastreams/MultiClustersIT.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/MultiClustersIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/MultiClustersIT.java index 2f80a230d937a..1c60f115520c1 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/MultiClustersIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/MultiClustersIT.java @@ -184,7 +184,11 @@ public void testSource() throws IOException { final boolean sorted = randomBoolean(); if (sorted) { searchSource.startArray("sort"); - searchSource.value("message_id"); + searchSource.startObject(); + searchSource.startObject("message_id"); + searchSource.field("unmapped_type", "long"); // message_id can be unmapped if no doc is indexed. + searchSource.endObject(); + searchSource.endObject(); searchSource.endArray(); } final Predicate filterHost; From 022b841a452a622edc9c47f262bcd65913709004 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Mon, 27 Jan 2025 11:03:06 -0500 Subject: [PATCH 073/383] Optimize IngestCtxMap construction (#120833) --- docs/changelog/120833.yaml | 5 ++ .../ingest/common/ConvertProcessorTests.java | 14 +++--- .../ingest/common/DissectProcessorTests.java | 8 ++-- .../ingest/common/ForEachProcessorTests.java | 23 ++++++---- .../common/JsonProcessorFactoryTests.java | 2 +- .../ingest/common/JsonProcessorTests.java | 9 ++-- .../ingest/common/KeyValueProcessorTests.java | 4 +- .../ingest/common/SortProcessorTests.java | 7 ++- .../common/TerminateProcessorTests.java | 3 +- ...gDatabasesWhilePerformingGeoLookupsIT.java | 4 +- .../geoip/GeoIpProcessorFactoryTests.java | 17 ++++--- .../elasticsearch/ingest/IngestCtxMap.java | 6 ++- .../ingest/ConditionalProcessorTests.java | 9 ++-- .../ingest/IngestCtxMapTests.java | 12 +++++ .../ingest/IngestDocumentTests.java | 46 +++++++++++++++++++ .../xpack/enrich/GeoMatchProcessorTests.java | 3 +- .../xpack/enrich/MatchProcessorTests.java | 15 ++++-- .../xpack/redact/RedactProcessorTests.java | 4 +- 18 files changed, 133 insertions(+), 58 deletions(-) create mode 100644 docs/changelog/120833.yaml diff --git a/docs/changelog/120833.yaml b/docs/changelog/120833.yaml new file mode 100644 index 0000000000000..5d55f7e881449 --- /dev/null +++ b/docs/changelog/120833.yaml @@ -0,0 +1,5 @@ +pr: 120833 +summary: Optimize `IngestCtxMap` construction +area: Ingest Node +type: enhancement +issues: [] diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java index 1d116c14c8985..e3b7ecaa77422 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java @@ -527,7 +527,7 @@ public void testAutoConvertNotString() throws Exception { } default -> throw new UnsupportedOperationException(); } - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Map.of("field", randomValue)); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>(Map.of("field", randomValue))); Processor processor = new ConvertProcessor(randomAlphaOfLength(10), null, "field", "field", Type.AUTO, false); processor.execute(ingestDocument); Object convertedValue = ingestDocument.getFieldValue("field", Object.class); @@ -536,7 +536,7 @@ public void testAutoConvertNotString() throws Exception { public void testAutoConvertStringNotMatched() throws Exception { String value = "notAnIntFloatOrBool"; - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Map.of("field", value)); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>(Map.of("field", value))); Processor processor = new ConvertProcessor(randomAlphaOfLength(10), null, "field", "field", Type.AUTO, false); processor.execute(ingestDocument); Object convertedValue = ingestDocument.getFieldValue("field", Object.class); @@ -546,7 +546,7 @@ public void testAutoConvertStringNotMatched() throws Exception { public void testAutoConvertMatchBoolean() throws Exception { boolean randomBoolean = randomBoolean(); String booleanString = Boolean.toString(randomBoolean); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Map.of("field", booleanString)); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>(Map.of("field", booleanString))); Processor processor = new ConvertProcessor(randomAlphaOfLength(10), null, "field", "field", Type.AUTO, false); processor.execute(ingestDocument); Object convertedValue = ingestDocument.getFieldValue("field", Object.class); @@ -556,7 +556,7 @@ public void testAutoConvertMatchBoolean() throws Exception { public void testAutoConvertMatchInteger() throws Exception { int randomInt = randomInt(); String randomString = Integer.toString(randomInt); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Map.of("field", randomString)); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>(Map.of("field", randomString))); Processor processor = new ConvertProcessor(randomAlphaOfLength(10), null, "field", "field", Type.AUTO, false); processor.execute(ingestDocument); Object convertedValue = ingestDocument.getFieldValue("field", Object.class); @@ -566,7 +566,7 @@ public void testAutoConvertMatchInteger() throws Exception { public void testAutoConvertMatchLong() throws Exception { long randomLong = randomLong(); String randomString = Long.toString(randomLong); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Map.of("field", randomString)); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>(Map.of("field", randomString))); Processor processor = new ConvertProcessor(randomAlphaOfLength(10), null, "field", "field", Type.AUTO, false); processor.execute(ingestDocument); Object convertedValue = ingestDocument.getFieldValue("field", Object.class); @@ -577,7 +577,7 @@ public void testAutoConvertDoubleNotMatched() throws Exception { double randomDouble = randomDouble(); String randomString = Double.toString(randomDouble); float randomFloat = Float.parseFloat(randomString); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Map.of("field", randomString)); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>(Map.of("field", randomString))); Processor processor = new ConvertProcessor(randomAlphaOfLength(10), null, "field", "field", Type.AUTO, false); processor.execute(ingestDocument); Object convertedValue = ingestDocument.getFieldValue("field", Object.class); @@ -588,7 +588,7 @@ public void testAutoConvertDoubleNotMatched() throws Exception { public void testAutoConvertMatchFloat() throws Exception { float randomFloat = randomFloat(); String randomString = Float.toString(randomFloat); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Map.of("field", randomString)); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>(Map.of("field", randomString))); Processor processor = new ConvertProcessor(randomAlphaOfLength(10), null, "field", "field", Type.AUTO, false); processor.execute(ingestDocument); Object convertedValue = ingestDocument.getFieldValue("field", Object.class); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DissectProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DissectProcessorTests.java index 6bfaf46a746f4..65c1b60fb07c5 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DissectProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DissectProcessorTests.java @@ -30,7 +30,7 @@ public class DissectProcessorTests extends ESTestCase { public void testMatch() { - IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, Map.of("message", "foo,bar,baz")); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, new HashMap<>(Map.of("message", "foo,bar,baz"))); DissectProcessor dissectProcessor = new DissectProcessor("", null, "message", "%{a},%{b},%{c}", "", true); dissectProcessor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("a", String.class), equalTo("foo")); @@ -45,7 +45,7 @@ public void testMatchOverwrite() { 1, null, null, - Map.of("message", "foo,bar,baz", "a", "willgetstompped") + new HashMap<>(Map.of("message", "foo,bar,baz", "a", "willgetstompped")) ); assertThat(ingestDocument.getFieldValue("a", String.class), equalTo("willgetstompped")); DissectProcessor dissectProcessor = new DissectProcessor("", null, "message", "%{a},%{b},%{c}", "", true); @@ -62,7 +62,7 @@ public void testAdvancedMatch() { 1, null, null, - Map.of("message", "foo bar,,,,,,,baz nope:notagain 😊 🐇 🙃") + new HashMap<>(Map.of("message", "foo bar,,,,,,,baz nope:notagain 😊 🐇 🙃")) ); DissectProcessor dissectProcessor = new DissectProcessor( "", @@ -81,7 +81,7 @@ public void testAdvancedMatch() { } public void testMiss() { - IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, Map.of("message", "foo:bar,baz")); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, new HashMap<>(Map.of("message", "foo:bar,baz"))); DissectProcessor dissectProcessor = new DissectProcessor("", null, "message", "%{a},%{b},%{c}", "", true); DissectException e = expectThrows(DissectException.class, () -> dissectProcessor.execute(ingestDocument)); assertThat(e.getMessage(), containsString("Unable to find match for dissect pattern")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java index 04b60a6e25dd1..c293edc73de99 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java @@ -39,7 +39,7 @@ public void testExecuteWithAsyncProcessor() throws Exception { values.add("foo"); values.add("bar"); values.add("baz"); - IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, Map.of("values", values)); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, new HashMap<>(Map.of("values", values))); ForEachProcessor processor = new ForEachProcessor("_tag", null, "values", new AsyncUpperCaseProcessor("_ingest._value"), false); execProcessor(processor, ingestDocument, (result, e) -> {}); @@ -55,7 +55,14 @@ public void testExecuteWithAsyncProcessor() throws Exception { } public void testExecuteWithFailure() { - IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, Map.of("values", List.of("a", "b", "c"))); + IngestDocument ingestDocument = new IngestDocument( + "_index", + "_id", + 1, + null, + null, + new HashMap<>(Map.of("values", List.of("a", "b", "c"))) + ); TestProcessor testProcessor = new TestProcessor(id -> { if ("c".equals(id.getFieldValue("_ingest._value", String.class))) { @@ -173,7 +180,7 @@ public String getDescription() { int numValues = randomIntBetween(1, 10000); List values = IntStream.range(0, numValues).mapToObj(i -> "").toList(); - IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, Map.of("values", values)); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, new HashMap<>(Map.of("values", values))); ForEachProcessor processor = new ForEachProcessor("_tag", null, "values", innerProcessor, false); execProcessor(processor, ingestDocument, (result, e) -> {}); @@ -189,7 +196,7 @@ public void testModifyFieldsOutsideArray() { values.add("string"); values.add(1); values.add(null); - IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, Map.of("values", values)); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, new HashMap<>(Map.of("values", values))); TemplateScript.Factory template = new TestTemplateService.MockTemplateScript.Factory("errors"); @@ -282,7 +289,7 @@ public void testNestedForEachWithMapIteration() { Map innerMap3 = Map.of("foo3", 7, "bar3", 8, "baz3", 9, "otherKey", 42); Map outerMap = Map.of("foo", innerMap1, "bar", innerMap2, "baz", innerMap3); - IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, Map.of("field", outerMap)); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, new HashMap<>(Map.of("field", outerMap))); List visitedKeys = new ArrayList<>(); List visitedValues = new ArrayList<>(); @@ -361,7 +368,7 @@ public void testRemovingFromTheSameField() { public void testMapIteration() { Map mapValue = Map.of("foo", 1, "bar", 2, "baz", 3); - IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, Map.of("field", mapValue)); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, new HashMap<>(Map.of("field", mapValue))); List encounteredKeys = new ArrayList<>(); List encounteredValues = new ArrayList<>(); @@ -390,7 +397,7 @@ public void testMapIteration() { public void testRemovalOfMapKey() { Map mapValue = Map.of("foo", 1, "bar", 2, "baz", 3); - IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, Map.of("field", mapValue)); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, new HashMap<>(Map.of("field", mapValue))); List encounteredKeys = new ArrayList<>(); List encounteredValues = new ArrayList<>(); @@ -419,7 +426,7 @@ public void testMapIterationWithAsyncProcessor() throws Exception { Map innerMap3 = Map.of("foo3", 7, "bar3", 8, "baz3", 9, "otherKey", 42); Map outerMap = Map.of("foo", innerMap1, "bar", innerMap2, "baz", innerMap3); - IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, Map.of("field", outerMap)); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1, null, null, new HashMap<>(Map.of("field", outerMap))); List visitedKeys = new ArrayList<>(); List visitedValues = new ArrayList<>(); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorFactoryTests.java index 92474172a6612..6e9346f19f732 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorFactoryTests.java @@ -74,7 +74,7 @@ public void testCreateWithMissingField() throws Exception { public void testCreateWithStrictParsingParameter() throws Exception { String fieldName = randomAlphaOfLength(10); String processorTag = randomAlphaOfLength(10); - IngestDocument document = new IngestDocument("_index", "_id", 1, null, null, Map.of(fieldName, "123 \"foo\"")); + IngestDocument document = new IngestDocument("_index", "_id", 1, null, null, new HashMap<>(Map.of(fieldName, "123 \"foo\""))); { Map strictConfig = new HashMap<>(); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java index 42db55d14c2b7..ffdebed8e524a 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java @@ -170,12 +170,9 @@ public void testDuplicateKeys() throws Exception { String processorTag = randomAlphaOfLength(3); JsonProcessor lenientJsonProcessor = new JsonProcessor(processorTag, null, "a", null, true, REPLACE, true); - Map document = new HashMap<>(); - String json = "{\"a\": 1, \"a\": 2}"; - document.put("a", json); - document.put("c", "see"); + Map document = Map.of("a", "{\"a\": 1, \"a\": 2}", "c", "see"); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>(document)); lenientJsonProcessor.execute(ingestDocument); Map sourceAndMetadata = ingestDocument.getSourceAndMetadata(); @@ -185,7 +182,7 @@ public void testDuplicateKeys() throws Exception { JsonProcessor strictJsonProcessor = new JsonProcessor(processorTag, null, "a", null, true, REPLACE, false); Exception exception = expectThrows( IllegalArgumentException.class, - () -> strictJsonProcessor.execute(RandomDocumentPicks.randomIngestDocument(random(), document)) + () -> strictJsonProcessor.execute(RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>(document))) ); assertThat(exception.getMessage(), containsString("Duplicate field 'a'")); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java index e8c554b3aea18..9ae4be1efce7e 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java @@ -40,7 +40,7 @@ public void test() throws Exception { } public void testRootTarget() throws Exception { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Map.of()); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestDocument.setFieldValue("myField", "first=hello&second=world&second=universe"); Processor processor = createKvProcessor("myField", "&", "=", null, null, null, false); processor.execute(ingestDocument); @@ -49,7 +49,7 @@ public void testRootTarget() throws Exception { } public void testKeySameAsSourceField() throws Exception { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Map.of()); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestDocument.setFieldValue("first", "first=hello"); Processor processor = createKvProcessor("first", "&", "=", null, null, null, false); processor.execute(ingestDocument); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SortProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SortProcessorTests.java index d6806afe9c45e..1e2b8c8d5a582 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SortProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SortProcessorTests.java @@ -19,7 +19,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; -import java.util.Map; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -266,7 +265,7 @@ public void testSortNullValue() throws Exception { } public void testDescendingSortWithTargetField() throws Exception { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Map.of()); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); int numItems = randomIntBetween(1, 10); List fieldValue = new ArrayList<>(numItems); List expectedResult = new ArrayList<>(numItems); @@ -286,7 +285,7 @@ public void testDescendingSortWithTargetField() throws Exception { } public void testAscendingSortWithTargetField() throws Exception { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Map.of()); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); int numItems = randomIntBetween(1, 10); List fieldValue = new ArrayList<>(numItems); List expectedResult = new ArrayList<>(numItems); @@ -306,7 +305,7 @@ public void testAscendingSortWithTargetField() throws Exception { } public void testSortWithTargetFieldLeavesOriginalUntouched() throws Exception { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Map.of()); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); List fieldValue = List.of(1, 5, 4); List expectedResult = new ArrayList<>(fieldValue); Collections.sort(expectedResult); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TerminateProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TerminateProcessorTests.java index 1888f8366edd3..ada8d2bcab520 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TerminateProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TerminateProcessorTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.ingest.ValueSource; import org.elasticsearch.test.ESTestCase; +import java.util.HashMap; import java.util.Map; import static org.elasticsearch.ingest.RandomDocumentPicks.randomIngestDocument; @@ -48,7 +49,7 @@ public void testTerminateInPipeline() throws Exception { ) ) ); - IngestDocument input = randomIngestDocument(random(), Map.of("foo", "bar")); + IngestDocument input = randomIngestDocument(random(), new HashMap<>(Map.of("foo", "bar"))); PipelineOutput output = new PipelineOutput(); pipeline.execute(input, output::set); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java index 0499b0f94106b..d41f4ac341724 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java @@ -99,7 +99,7 @@ public void test() throws Exception { 1L, "routing", VersionType.EXTERNAL, - Map.of("_field", "89.160.20.128") + new HashMap<>(Map.of("_field", "89.160.20.128")) ); processor1.execute(document1); assertThat(document1.getSourceAndMetadata().get("geoip"), notNullValue()); @@ -109,7 +109,7 @@ public void test() throws Exception { 1L, "routing", VersionType.EXTERNAL, - Map.of("_field", "89.160.20.128") + new HashMap<>(Map.of("_field", "89.160.20.128")) ); processor2.execute(document2); assertThat(document2.getSourceAndMetadata().get("geoip"), notNullValue()); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 34003b79fc18b..7908c5a6741bc 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -387,7 +387,7 @@ public void testLazyLoading() throws Exception { assertNull(lazyLoader.databaseReader.get()); } - final Map field = Map.of("_field", "1.1.1.1"); + final Map field = new HashMap<>(Map.of("_field", "1.1.1.1")); final IngestDocument document = new IngestDocument("index", "id", 1L, "routing", VersionType.EXTERNAL, field); Map config = new HashMap<>(); @@ -456,7 +456,7 @@ public void testLoadingCustomDatabase() throws IOException { assertNull(lazyLoader.databaseReader.get()); } - final Map field = Map.of("_field", "1.1.1.1"); + final Map field = new HashMap<>(Map.of("_field", "1.1.1.1")); final IngestDocument document = new IngestDocument("index", "id", 1L, "routing", VersionType.EXTERNAL, field); Map config = new HashMap<>(); @@ -500,7 +500,7 @@ public void testDefaultDatabaseWithTaskPresent() throws Exception { GeoIpProcessor processor = (GeoIpProcessor) factory.create(null, processorTag, null, config); - processor.execute(RandomDocumentPicks.randomIngestDocument(random(), Map.of("_field", "89.160.20.128"))); + processor.execute(RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>(Map.of("_field", "89.160.20.128")))); } public void testUpdateDatabaseWhileIngesting() throws Exception { @@ -508,17 +508,16 @@ public void testUpdateDatabaseWhileIngesting() throws Exception { Map config = new HashMap<>(); config.put("field", "source_field"); GeoIpProcessor processor = (GeoIpProcessor) factory.create(null, null, null, config); - Map document = new HashMap<>(); - document.put("source_field", "89.160.20.128"); + Map document = Map.of("source_field", "89.160.20.128"); { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>(document)); processor.execute(ingestDocument); Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("geoip"); assertThat(geoData.get("city_name"), equalTo("Tumba")); } { copyDatabase("GeoLite2-City-Test.mmdb", geoipTmpDir); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>(document)); databaseNodeService.updateDatabase("GeoLite2-City.mmdb", "md5", geoipTmpDir.resolve("GeoLite2-City-Test.mmdb")); processor.execute(ingestDocument); Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("geoip"); @@ -526,7 +525,7 @@ public void testUpdateDatabaseWhileIngesting() throws Exception { } { // No databases are available, so assume that databases still need to be downloaded and therefore not fail: - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>(document)); databaseNodeService.removeStaleEntries(List.of("GeoLite2-City.mmdb")); configDatabases.updateDatabase(geoIpConfigDir.resolve("GeoLite2-City.mmdb"), false); processor.execute(ingestDocument); @@ -536,7 +535,7 @@ public void testUpdateDatabaseWhileIngesting() throws Exception { { // There are databases available, but not the right one, so tag: databaseNodeService.updateDatabase("GeoLite2-City-Test.mmdb", "md5", geoipTmpDir.resolve("GeoLite2-City-Test.mmdb")); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>(document)); processor.execute(ingestDocument); assertThat(ingestDocument.getSourceAndMetadata(), hasEntry("tags", List.of("_geoip_database_unavailable_GeoLite2-City.mmdb"))); } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestCtxMap.java b/server/src/main/java/org/elasticsearch/ingest/IngestCtxMap.java index a5a1612246a29..6472f484fa690 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestCtxMap.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestCtxMap.java @@ -13,7 +13,6 @@ import org.elasticsearch.script.CtxMap; import java.time.ZonedDateTime; -import java.util.HashMap; import java.util.Map; /** @@ -33,6 +32,9 @@ final class IngestCtxMap extends CtxMap { /** * Create an IngestCtxMap with the given metadata, source and default validators + *

+ * The passed-in source map is used directly (that is, it's neither shallowly nor deeply copied). mutation-like methods (e.g. setters, + * put, etc.) may rely on the map being mutable, and will fail if the passed-in map isn't mutable. */ IngestCtxMap( String index, @@ -43,7 +45,7 @@ final class IngestCtxMap extends CtxMap { ZonedDateTime timestamp, Map source ) { - super(new HashMap<>(source), new IngestDocMetadata(index, id, version, routing, versionType, timestamp)); + super(source, new IngestDocMetadata(index, id, version, routing, versionType, timestamp)); } /** diff --git a/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java index 698a310ea56a7..fe1300b1d2645 100644 --- a/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java @@ -63,7 +63,6 @@ public void testChecksCondition() throws Exception { new HashMap<>(ScriptModule.CORE_CONTEXTS), () -> 1L ); - Map document = new HashMap<>(); LongSupplier relativeTimeProvider = mock(LongSupplier.class); when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(1), 0L, TimeUnit.MILLISECONDS.toNanos(2)); ConditionalProcessor processor = new ConditionalProcessor( @@ -102,7 +101,7 @@ public String getDescription() { // false, never call processor never increments metrics String falseValue = "falsy"; - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestDocument.setFieldValue(conditionalField, falseValue); execProcessor(processor, ingestDocument, (result, e) -> {}); assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(falseValue)); @@ -110,21 +109,21 @@ public String getDescription() { assertStats(processor, 0, 0, 0); assertEquals(scriptName, processor.getCondition()); - ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestDocument.setFieldValue(conditionalField, falseValue); ingestDocument.setFieldValue("error", true); execProcessor(processor, ingestDocument, (result, e) -> {}); assertStats(processor, 0, 0, 0); // true, always call processor and increments metrics - ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestDocument.setFieldValue(conditionalField, trueValue); execProcessor(processor, ingestDocument, (result, e) -> {}); assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(trueValue)); assertThat(ingestDocument.getSourceAndMetadata().get("foo"), is("bar")); assertStats(processor, 1, 0, 1); - ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestDocument.setFieldValue(conditionalField, trueValue); ingestDocument.setFieldValue("error", true); IngestDocument finalIngestDocument = ingestDocument; diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestCtxMapTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestCtxMapTests.java index 33c3ae889040b..2c439dd846da0 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestCtxMapTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestCtxMapTests.java @@ -21,6 +21,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.sameInstance; public class IngestCtxMapTests extends ESTestCase { @@ -341,6 +342,17 @@ public void testGetOrDefault() { assertThat(map.getOrDefault("baz", "quux"), equalTo("quux")); } + public void testSourceHashMapIsNotCopied() { + // a ctxMap will, as an optimization, just use the passed-in map reference + Map source = Map.of("index", "id"); + + map = new IngestCtxMap(source, new IngestDocMetadata(Map.of("_version", 5L), null)); + assertThat(map.getSource(), sameInstance(source)); + + map = new IngestCtxMap(null, null, 10L, null, null, null, source); + assertThat(map.getSource(), sameInstance(source)); + } + private static class TestEntry implements Map.Entry { String key; Object value; diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index 985474b85143a..08105a3a3523f 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -9,7 +9,10 @@ package org.elasticsearch.ingest; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; import org.hamcrest.Matchers; import org.junit.Before; @@ -1128,4 +1131,47 @@ public void testIndexHistory() { assertFalse(ingestDocument.updateIndexHistory(index1)); assertThat(ingestDocument.getIndexHistory(), Matchers.contains(index1, index2)); } + + public void testSourceHashMapIsNotCopied() { + // an ingest document's ctxMap will, as an optimization, just use the passed-in map reference + { + Map source = new HashMap<>(Map.of("foo", 1)); + IngestDocument document = new IngestDocument("index", "id", 1, null, null, source); + assertThat(document.getSource(), sameInstance(source)); + assertThat(document.getCtxMap().getSource(), sameInstance(source)); + } + + { + Map source = XContentHelper.convertToMap(new BytesArray("{ \"foo\": 1 }"), false, XContentType.JSON).v2(); + IngestDocument document = new IngestDocument("index", "id", 1, null, null, source); + assertThat(document.getSource(), sameInstance(source)); + assertThat(document.getCtxMap().getSource(), sameInstance(source)); + } + + { + Map source = Map.of("foo", 1); + IngestDocument document = new IngestDocument("index", "id", 1, null, null, source); + assertThat(document.getSource(), sameInstance(source)); + assertThat(document.getCtxMap().getSource(), sameInstance(source)); + } + + // a cloned ingest document will copy the map, though + { + Map source = Map.of("foo", 1); + IngestDocument document1 = new IngestDocument("index", "id", 1, null, null, source); + document1.getIngestMetadata().put("bar", 2); + IngestDocument document2 = new IngestDocument(document1); + assertThat(document2.getCtxMap().getMetadata(), equalTo(document1.getCtxMap().getMetadata())); + assertThat(document2.getSource(), not(sameInstance(source))); + assertThat(document2.getCtxMap().getMetadata(), equalTo(document1.getCtxMap().getMetadata())); + assertThat(document2.getCtxMap().getSource(), not(sameInstance(source))); + + // it also copies these other nearby maps + assertThat(document2.getIngestMetadata(), equalTo(document1.getIngestMetadata())); + assertThat(document2.getIngestMetadata(), not(sameInstance(document1.getIngestMetadata()))); + + assertThat(document2.getCtxMap().getMetadata(), not(sameInstance(document1.getCtxMap().getMetadata()))); + assertThat(document2.getCtxMap().getMetadata(), not(sameInstance(document1.getCtxMap().getMetadata()))); + } + } } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java index fcf2bc3c14292..f122e34db5488 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.test.ESTestCase; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.BiConsumer; @@ -100,7 +101,7 @@ private void testBasicsForFieldValue(Object fieldValue, Geometry expectedGeometr 1L, "_routing", VersionType.INTERNAL, - Map.of("location", fieldValue) + new HashMap<>(Map.of("location", fieldValue)) ); // Run IngestDocument[] holder = new IngestDocument[1]; diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java index b4d3ec15d31d3..4e3496e1a5838 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java @@ -56,7 +56,7 @@ public void testBasics() throws Exception { 1L, "_routing", VersionType.INTERNAL, - Map.of("domain", "elastic.co") + new HashMap<>(Map.of("domain", "elastic.co")) ); // Run IngestDocument[] holder = new IngestDocument[1]; @@ -158,7 +158,7 @@ public void testSearchFailure() throws Exception { 1L, "_routing", VersionType.INTERNAL, - Map.of("domain", "elastic.com") + new HashMap<>(Map.of("domain", "elastic.com")) ); // Run IngestDocument[] resultHolder = new IngestDocument[1]; @@ -308,7 +308,14 @@ public void testNumericValue() { "domain", 1 ); - IngestDocument ingestDocument = new IngestDocument("_index", "_id", 1L, "_routing", VersionType.INTERNAL, Map.of("domain", 2)); + IngestDocument ingestDocument = new IngestDocument( + "_index", + "_id", + 1L, + "_routing", + VersionType.INTERNAL, + new HashMap<>(Map.of("domain", 2)) + ); // Execute IngestDocument[] holder = new IngestDocument[1]; @@ -351,7 +358,7 @@ public void testArray() { 1L, "_routing", VersionType.INTERNAL, - Map.of("domain", List.of("1", "2")) + new HashMap<>(Map.of("domain", List.of("1", "2"))) ); // Execute diff --git a/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorTests.java b/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorTests.java index 3f44957201ef0..76bf99d170a8f 100644 --- a/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorTests.java +++ b/x-pack/plugin/redact/src/test/java/org/elasticsearch/xpack/redact/RedactProcessorTests.java @@ -571,7 +571,7 @@ public void testMergeOverlappingReplacements_transitiveOverlaps() { } } - private IngestDocument createIngestDoc(Map source) { - return new IngestDocument("index", "id", 0L, "routing", VersionType.INTERNAL, source); + private static IngestDocument createIngestDoc(Map source) { + return new IngestDocument("index", "id", 0L, "routing", VersionType.INTERNAL, new HashMap<>(source)); } } From 2ca2bbfae42ee1296bfadb40aa9dd2aff667b35f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 03:03:42 +1100 Subject: [PATCH 074/383] Mute org.elasticsearch.packaging.test.DockerTests test070BindMountCustomPathConfAndJvmOptions #120910 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 70c7a3051b56f..986ddff789732 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -269,6 +269,9 @@ tests: - class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT method: testReservedStatePersistsOnRestart issue: https://github.com/elastic/elasticsearch/issues/120923 +- class: org.elasticsearch.packaging.test.DockerTests + method: test070BindMountCustomPathConfAndJvmOptions + issue: https://github.com/elastic/elasticsearch/issues/120910 # Examples: # From 695bf75272ff0491bced7c1d6733d8376c13c706 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 27 Jan 2025 18:43:51 +0200 Subject: [PATCH 075/383] [TEST] Cover custom sorting and routing in randomized testing (#120584) * Cover custom sorting and routing in randomized testing * [CI] Auto commit changes from spotless * fix reindex tests * fix reindex tests * refactor classes * comment * more refactoring * more refactoring * restore tests with static mappings * reduce diff * reduce diff * Restore single-element array removal in synthetic source * Revert "Restore single-element array removal in synthetic source" This reverts commit e8e99e1c662214c4f14e1037f37815e0666d322b. * [CI] Auto commit changes from spotless --------- Co-authored-by: elasticsearchmachine --- .../logsdb/qa/AbstractChallengeRestTest.java | 41 +---- .../xpack/logsdb/qa/BulkChallengeRestIT.java | 77 ++++++++ ...=> BulkDynamicMappingChallengeRestIT.java} | 5 +- .../qa/BulkStaticMappingChallengeRestIT.java | 121 +++++++++++++ ...a => BulkStoredSourceChallengeRestIT.java} | 2 +- .../logsdb/qa/ReindexChallengeRestIT.java | 28 ++- ...ardVersusLogsIndexModeChallengeRestIT.java | 167 +++++------------- ...ogsIndexModeRandomDataChallengeRestIT.java | 61 ------- 8 files changed, 279 insertions(+), 223 deletions(-) create mode 100644 x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkChallengeRestIT.java rename x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/{StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java => BulkDynamicMappingChallengeRestIT.java} (76%) create mode 100644 x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkStaticMappingChallengeRestIT.java rename x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/{StandardVersusLogsStoredSourceChallengeRestIT.java => BulkStoredSourceChallengeRestIT.java} (85%) delete mode 100644 x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/AbstractChallengeRestTest.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/AbstractChallengeRestTest.java index 60c7d07115ef2..4b22e518307d0 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/AbstractChallengeRestTest.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/AbstractChallengeRestTest.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.core.Tuple; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -221,44 +220,16 @@ private XContentBuilder createContenderMappings() throws IOException { public abstract void contenderMappings(XContentBuilder builder) throws IOException; - public void baselineSettings(Settings.Builder builder) {} + public abstract void baselineSettings(Settings.Builder builder); - public void contenderSettings(Settings.Builder builder) {} + public abstract void contenderSettings(Settings.Builder builder); public void commonSettings(Settings.Builder builder) {} - private Response indexDocuments( - final String dataStreamName, - final CheckedSupplier, IOException> documentsSupplier - ) throws IOException { - final StringBuilder sb = new StringBuilder(); - int id = 0; - for (var document : documentsSupplier.get()) { - sb.append(Strings.format("{ \"create\": { \"_id\" : \"%d\" } }", id)).append("\n"); - sb.append(Strings.toString(document)).append("\n"); - id++; - } - var request = new Request("POST", "/" + dataStreamName + "/_bulk"); - request.setJsonEntity(sb.toString()); - request.addParameter("refresh", "true"); - return client.performRequest(request); - } - - public Response indexBaselineDocuments(final CheckedSupplier, IOException> documentsSupplier) throws IOException { - return indexDocuments(getBaselineDataStreamName(), documentsSupplier); - } - - public Response indexContenderDocuments(final CheckedSupplier, IOException> documentsSupplier) - throws IOException { - return indexDocuments(getContenderDataStreamName(), documentsSupplier); - } - - public Tuple indexDocuments( - final CheckedSupplier, IOException> baselineSupplier, - final CheckedSupplier, IOException> contenderSupplier - ) throws IOException { - return new Tuple<>(indexBaselineDocuments(baselineSupplier), indexContenderDocuments(contenderSupplier)); - } + public abstract void indexDocuments( + CheckedSupplier, IOException> baselineSupplier, + CheckedSupplier, IOException> contenderSupplier + ) throws IOException; public Response queryBaseline(final SearchSourceBuilder search) throws IOException { return query(search, this::getBaselineDataStreamName); diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkChallengeRestIT.java new file mode 100644 index 0000000000000..3f5b69c170f51 --- /dev/null +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkChallengeRestIT.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb.qa; + +import org.elasticsearch.common.CheckedSupplier; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * Challenge test that uses bulk indexing for both baseline and contender sides. + * We index same documents into an index with standard index mode and an index with logsdb index mode. + * Then we verify that results of common operations are the same modulo knows differences like synthetic source + * modifications. + */ +public class BulkChallengeRestIT extends StandardVersusLogsIndexModeChallengeRestIT { + + public BulkChallengeRestIT() {} + + protected BulkChallengeRestIT(DataGenerationHelper dataGenerationHelper) { + super(dataGenerationHelper); + } + + @Override + public void indexDocuments( + final CheckedSupplier, IOException> baselineSupplier, + final CheckedSupplier, IOException> contenderSupplier + ) throws IOException { + var contenderResponseEntity = indexContenderDocuments(contenderSupplier); + indexBaselineDocuments(baselineSupplier, contenderResponseEntity); + } + + private Map indexContenderDocuments(final CheckedSupplier, IOException> documentsSupplier) + throws IOException { + final StringBuilder sb = new StringBuilder(); + int id = 0; + for (var document : documentsSupplier.get()) { + if (autoGenerateId()) { + sb.append("{ \"create\": { } }\n"); + } else { + sb.append(Strings.format("{ \"create\": { \"_id\" : \"%d\" } }\n", id)); + } + sb.append(Strings.toString(document)).append("\n"); + id++; + } + return performBulkRequest(sb.toString(), false); + } + + @SuppressWarnings("unchecked") + private void indexBaselineDocuments( + final CheckedSupplier, IOException> documentsSupplier, + final Map contenderResponseEntity + ) throws IOException { + final StringBuilder sb = new StringBuilder(); + int id = 0; + final List> items = (List>) contenderResponseEntity.get("items"); + for (var document : documentsSupplier.get()) { + if (autoGenerateId()) { + var contenderId = ((Map) items.get(id).get("create")).get("_id"); + sb.append(Strings.format("{ \"create\": { \"_id\" : \"%s\" } }\n", contenderId)); + } else { + sb.append(Strings.format("{ \"create\": { \"_id\" : \"%d\" } }\n", id)); + } + sb.append(Strings.toString(document)).append("\n"); + id++; + } + performBulkRequest(sb.toString(), true); + } +} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkDynamicMappingChallengeRestIT.java similarity index 76% rename from x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkDynamicMappingChallengeRestIT.java index c1f97823b963a..d9870ed054b28 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkDynamicMappingChallengeRestIT.java @@ -9,9 +9,8 @@ import org.elasticsearch.common.settings.Settings; -public class StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT extends - StandardVersusLogsIndexModeRandomDataChallengeRestIT { - public StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT() { +public class BulkDynamicMappingChallengeRestIT extends BulkChallengeRestIT { + public BulkDynamicMappingChallengeRestIT() { super(new DataGenerationHelper(builder -> builder.withFullyDynamicMapping(true))); } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkStaticMappingChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkStaticMappingChallengeRestIT.java new file mode 100644 index 0000000000000..20d2d91977080 --- /dev/null +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkStaticMappingChallengeRestIT.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb.qa; + +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; + +import java.io.IOException; +import java.time.Instant; + +/** + * This test uses simple mapping and document structure in order to allow easier debugging of the test itself. + */ +public class BulkStaticMappingChallengeRestIT extends BulkChallengeRestIT { + public BulkStaticMappingChallengeRestIT() {} + + @Override + public void baselineMappings(XContentBuilder builder) throws IOException { + if (fullyDynamicMapping == false) { + builder.startObject() + .startObject("properties") + + .startObject("@timestamp") + .field("type", "date") + .endObject() + + .startObject("host.name") + .field("type", "keyword") + .field("ignore_above", randomIntBetween(1000, 1200)) + .endObject() + + .startObject("message") + .field("type", "keyword") + .field("ignore_above", randomIntBetween(1000, 1200)) + .endObject() + + .startObject("method") + .field("type", "keyword") + .field("ignore_above", randomIntBetween(1000, 1200)) + .endObject() + + .startObject("memory_usage_bytes") + .field("type", "long") + .field("ignore_malformed", randomBoolean()) + .endObject() + + .endObject() + + .endObject(); + } else { + // We want dynamic mapping, but we need host.name to be a keyword instead of text to support aggregations. + builder.startObject() + .startObject("properties") + + .startObject("host.name") + .field("type", "keyword") + .field("ignore_above", randomIntBetween(1000, 1200)) + .endObject() + + .endObject() + .endObject(); + } + } + + @Override + public void contenderMappings(XContentBuilder builder) throws IOException { + builder.startObject(); + builder.field("subobjects", false); + + if (fullyDynamicMapping == false) { + builder.startObject("properties") + + .startObject("@timestamp") + .field("type", "date") + .endObject() + + .startObject("host.name") + .field("type", "keyword") + .field("ignore_above", randomIntBetween(1000, 1200)) + .endObject() + + .startObject("message") + .field("type", "keyword") + .field("ignore_above", randomIntBetween(1000, 1200)) + .endObject() + + .startObject("method") + .field("type", "keyword") + .field("ignore_above", randomIntBetween(1000, 1200)) + .endObject() + + .startObject("memory_usage_bytes") + .field("type", "long") + .field("ignore_malformed", randomBoolean()) + .endObject() + + .endObject(); + } + + builder.endObject(); + } + + @Override + protected XContentBuilder generateDocument(final Instant timestamp) throws IOException { + return XContentFactory.jsonBuilder() + .startObject() + .field("@timestamp", DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(timestamp)) + .field("host.name", randomFrom("foo", "bar", "baz")) + .field("message", randomFrom("a message", "another message", "still another message", "one more message")) + .field("method", randomFrom("put", "post", "get")) + .field("memory_usage_bytes", randomLongBetween(1000, 2000)) + .endObject(); + } +} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsStoredSourceChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkStoredSourceChallengeRestIT.java similarity index 85% rename from x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsStoredSourceChallengeRestIT.java rename to x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkStoredSourceChallengeRestIT.java index 2f018b7dc0b38..ab5be0fe5760c 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsStoredSourceChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkStoredSourceChallengeRestIT.java @@ -13,7 +13,7 @@ * This test compares behavior of a standard mode data stream and a logsdb data stream using stored source. * There should be no differences between such two data streams. */ -public class StandardVersusLogsStoredSourceChallengeRestIT extends StandardVersusLogsIndexModeRandomDataChallengeRestIT { +public class BulkStoredSourceChallengeRestIT extends BulkChallengeRestIT { @Override public void contenderSettings(Settings.Builder builder) { super.contenderSettings(builder); diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/ReindexChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/ReindexChallengeRestIT.java index 83344b688ff8c..4833fbee42ddb 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/ReindexChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/ReindexChallengeRestIT.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.logsdb.qa; import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; import org.elasticsearch.common.CheckedSupplier; +import org.elasticsearch.common.Strings; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -18,9 +18,29 @@ import static org.hamcrest.Matchers.equalTo; -public abstract class ReindexChallengeRestIT extends StandardVersusLogsIndexModeRandomDataChallengeRestIT { +public abstract class ReindexChallengeRestIT extends StandardVersusLogsIndexModeChallengeRestIT { + @Override - public Response indexContenderDocuments(CheckedSupplier, IOException> documentsSupplier) throws IOException { + public void indexDocuments( + final CheckedSupplier, IOException> baselineSupplier, + final CheckedSupplier, IOException> contencontenderSupplierderSupplier + ) throws IOException { + indexBaselineDocuments(baselineSupplier); + indexContenderDocuments(); + } + + private void indexBaselineDocuments(final CheckedSupplier, IOException> documentsSupplier) throws IOException { + final StringBuilder sb = new StringBuilder(); + int id = 0; + for (var document : documentsSupplier.get()) { + sb.append(Strings.format("{ \"create\": { \"_id\" : \"%d\" } }\n", id)); + sb.append(Strings.toString(document)).append("\n"); + id++; + } + performBulkRequest(sb.toString(), true); + } + + private void indexContenderDocuments() throws IOException { var reindexRequest = new Request("POST", "/_reindex?refresh=true"); reindexRequest.setJsonEntity(String.format(Locale.ROOT, """ { @@ -38,7 +58,5 @@ public Response indexContenderDocuments(CheckedSupplier, I var body = entityAsMap(response); assertThat("encountered failures when performing reindex:\n " + body, body.get("failures"), equalTo(List.of())); - - return response; } } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java index f18e57c229345..2a8c85efc863b 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; -import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; @@ -19,7 +18,6 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.logsdb.datageneration.matchers.MatchResult; import org.elasticsearch.logsdb.datageneration.matchers.Matcher; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; @@ -29,7 +27,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; -import org.hamcrest.Matchers; import java.io.IOException; import java.time.Instant; @@ -46,103 +43,34 @@ import static org.hamcrest.Matchers.greaterThan; /** - * Basic challenge test - we index same documents into an index with standard index mode and an index with logsdb index mode. - * Then we verify that results of common operations are the same modulo knows differences like synthetic source modifications. - * This test uses simple mapping and document structure in order to allow easier debugging of the test itself. + * Challenge test (see {@link BulkStaticMappingChallengeRestIT}) that uses randomly generated + * mapping and documents in order to cover more code paths and permutations. */ -public class StandardVersusLogsIndexModeChallengeRestIT extends AbstractChallengeRestTest { +public abstract class StandardVersusLogsIndexModeChallengeRestIT extends AbstractChallengeRestTest { + protected final boolean fullyDynamicMapping = randomBoolean(); + private final boolean useCustomSortConfig = fullyDynamicMapping == false && randomBoolean(); + private final boolean routeOnSortFields = useCustomSortConfig && randomBoolean(); private final int numShards = randomBoolean() ? randomIntBetween(2, 4) : 0; private final int numReplicas = randomBoolean() ? randomIntBetween(1, 3) : 0; - private final boolean fullyDynamicMapping = randomBoolean(); + protected final DataGenerationHelper dataGenerationHelper; public StandardVersusLogsIndexModeChallengeRestIT() { + this(new DataGenerationHelper()); + } + + protected StandardVersusLogsIndexModeChallengeRestIT(DataGenerationHelper dataGenerationHelper) { super("standard-apache-baseline", "logs-apache-contender", "baseline-template", "contender-template", 101, 101); + this.dataGenerationHelper = dataGenerationHelper; } @Override public void baselineMappings(XContentBuilder builder) throws IOException { - if (fullyDynamicMapping == false) { - builder.startObject() - .startObject("properties") - - .startObject("@timestamp") - .field("type", "date") - .endObject() - - .startObject("host.name") - .field("type", "keyword") - .field("ignore_above", randomIntBetween(1000, 1200)) - .endObject() - - .startObject("message") - .field("type", "keyword") - .field("ignore_above", randomIntBetween(1000, 1200)) - .endObject() - - .startObject("method") - .field("type", "keyword") - .field("ignore_above", randomIntBetween(1000, 1200)) - .endObject() - - .startObject("memory_usage_bytes") - .field("type", "long") - .field("ignore_malformed", randomBoolean()) - .endObject() - - .endObject() - - .endObject(); - } else { - // We want dynamic mapping, but we need host.name to be a keyword instead of text to support aggregations. - builder.startObject() - .startObject("properties") - - .startObject("host.name") - .field("type", "keyword") - .field("ignore_above", randomIntBetween(1000, 1200)) - .endObject() - - .endObject() - .endObject(); - } + dataGenerationHelper.standardMapping(builder); } @Override public void contenderMappings(XContentBuilder builder) throws IOException { - builder.startObject(); - builder.field("subobjects", false); - - if (fullyDynamicMapping == false) { - builder.startObject("properties") - - .startObject("@timestamp") - .field("type", "date") - .endObject() - - .startObject("host.name") - .field("type", "keyword") - .field("ignore_above", randomIntBetween(1000, 1200)) - .endObject() - - .startObject("message") - .field("type", "keyword") - .field("ignore_above", randomIntBetween(1000, 1200)) - .endObject() - - .startObject("method") - .field("type", "keyword") - .field("ignore_above", randomIntBetween(1000, 1200)) - .endObject() - - .startObject("memory_usage_bytes") - .field("type", "long") - .field("ignore_malformed", randomBoolean()) - .endObject() - - .endObject(); - } - - builder.endObject(); + dataGenerationHelper.logsDbMapping(builder); } @Override @@ -159,6 +87,14 @@ public void commonSettings(Settings.Builder builder) { @Override public void contenderSettings(Settings.Builder builder) { builder.put("index.mode", "logsdb"); + if (useCustomSortConfig) { + builder.putList("index.sort.field", "host.name", "method", "@timestamp"); + builder.putList("index.sort.order", "asc", "asc", "desc"); + if (routeOnSortFields) { + builder.put("index.logsdb.route_on_sort_fields", true); + } + } + dataGenerationHelper.logsDbSettings(builder); } @Override @@ -169,6 +105,10 @@ public void beforeStart() throws Exception { waitForLogs(client()); } + protected boolean autoGenerateId() { + return routeOnSortFields; + } + protected static void waitForLogs(RestClient client) throws Exception { assertBusy(() -> { try { @@ -330,28 +270,6 @@ public void testFieldCaps() throws IOException { assertTrue(matchResult.getMessage(), matchResult.isMatch()); } - @Override - public Response indexBaselineDocuments(CheckedSupplier, IOException> documentsSupplier) throws IOException { - var response = super.indexBaselineDocuments(documentsSupplier); - - assertThat(response.getStatusLine().getStatusCode(), Matchers.equalTo(RestStatus.OK.getStatus())); - var baselineResponseBody = entityAsMap(response); - assertThat("errors in baseline bulk response:\n " + baselineResponseBody, baselineResponseBody.get("errors"), equalTo(false)); - - return response; - } - - @Override - public Response indexContenderDocuments(CheckedSupplier, IOException> documentsSupplier) throws IOException { - var response = super.indexContenderDocuments(documentsSupplier); - - assertThat(response.getStatusLine().getStatusCode(), Matchers.equalTo(RestStatus.OK.getStatus())); - var contenderResponseBody = entityAsMap(response); - assertThat("errors in contender bulk response:\n " + contenderResponseBody, contenderResponseBody.get("errors"), equalTo(false)); - - return response; - } - private List generateDocuments(int numberOfDocuments) throws IOException { final List documents = new ArrayList<>(); // This is static in order to be able to identify documents between test runs. @@ -364,14 +282,12 @@ private List generateDocuments(int numberOfDocuments) throws IO } protected XContentBuilder generateDocument(final Instant timestamp) throws IOException { - return XContentFactory.jsonBuilder() - .startObject() - .field("@timestamp", DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(timestamp)) - .field("host.name", randomFrom("foo", "bar", "baz")) - .field("message", randomFrom("a message", "another message", "still another message", "one more message")) - .field("method", randomFrom("put", "post", "get")) - .field("memory_usage_bytes", randomLongBetween(1000, 2000)) - .endObject(); + var document = XContentFactory.jsonBuilder(); + dataGenerationHelper.generateDocument( + document, + Map.of("@timestamp", DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(timestamp)) + ); + return document; } @SuppressWarnings("unchecked") @@ -383,7 +299,7 @@ private static List> getQueryHits(final Response response) t assertThat(hitsList.size(), greaterThan(0)); return hitsList.stream() - .sorted(Comparator.comparingInt((Map hit) -> Integer.parseInt((String) hit.get("_id")))) + .sorted(Comparator.comparing((Map hit) -> ((String) hit.get("_id")))) .map(hit -> (Map) hit.get("_source")) .toList(); } @@ -404,7 +320,7 @@ private static List> getEsqlSourceResults(final Response res // Results contain a list of [source, id] lists. return values.stream() - .sorted(Comparator.comparingInt((List value) -> Integer.parseInt((String) value.get(1)))) + .sorted(Comparator.comparing((List value) -> ((String) value.get(1)))) .map(value -> (Map) value.get(0)) .toList(); } @@ -437,4 +353,19 @@ private static List> getAggregationBuckets(final Response re private void indexDocuments(List documents) throws IOException { indexDocuments(() -> documents, () -> documents); } + + protected final Map performBulkRequest(String json, boolean isBaseline) throws IOException { + var request = new Request("POST", "/" + (isBaseline ? getBaselineDataStreamName() : getContenderDataStreamName()) + "/_bulk"); + request.setJsonEntity(json); + request.addParameter("refresh", "true"); + var response = client.performRequest(request); + assertOK(response); + var responseBody = entityAsMap(response); + assertThat( + "errors in " + (isBaseline ? "baseline" : "contender") + " bulk response:\n " + responseBody, + responseBody.get("errors"), + equalTo(false) + ); + return responseBody; + } } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java deleted file mode 100644 index 3b141908f45b1..0000000000000 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeRandomDataChallengeRestIT.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.logsdb.qa; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.time.FormatNames; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; - -import java.io.IOException; -import java.time.Instant; -import java.util.Map; - -/** - * Challenge test (see {@link StandardVersusLogsIndexModeChallengeRestIT}) that uses randomly generated - * mapping and documents in order to cover more code paths and permutations. - */ -public class StandardVersusLogsIndexModeRandomDataChallengeRestIT extends StandardVersusLogsIndexModeChallengeRestIT { - protected final DataGenerationHelper dataGenerationHelper; - - public StandardVersusLogsIndexModeRandomDataChallengeRestIT() { - this(new DataGenerationHelper()); - } - - protected StandardVersusLogsIndexModeRandomDataChallengeRestIT(DataGenerationHelper dataGenerationHelper) { - super(); - this.dataGenerationHelper = dataGenerationHelper; - } - - @Override - public void baselineMappings(XContentBuilder builder) throws IOException { - dataGenerationHelper.standardMapping(builder); - } - - @Override - public void contenderMappings(XContentBuilder builder) throws IOException { - dataGenerationHelper.logsDbMapping(builder); - } - - @Override - public void contenderSettings(Settings.Builder builder) { - super.contenderSettings(builder); - dataGenerationHelper.logsDbSettings(builder); - } - - @Override - protected XContentBuilder generateDocument(final Instant timestamp) throws IOException { - var document = XContentFactory.jsonBuilder(); - dataGenerationHelper.generateDocument( - document, - Map.of("@timestamp", DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(timestamp)) - ); - return document; - } -} From be8f51a3703c851b9e19d66ccabb2ffe2758a37f Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Mon, 27 Jan 2025 12:19:16 -0500 Subject: [PATCH 076/383] Use new semantic text format by default (#120813) --- docs/changelog/120813.yaml | 17 +++++++ .../test/nodes.stats/11_indices_metrics.yml | 16 +++---- .../mapper/InferenceMetadataFieldsMapper.java | 3 +- .../xpack/inference/InferenceFeatures.java | 3 +- ...SemanticInferenceMetadataFieldsMapper.java | 3 ++ .../inference/30_semantic_text_inference.yml | 48 +++++++++++++++++++ 6 files changed, 79 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/120813.yaml diff --git a/docs/changelog/120813.yaml b/docs/changelog/120813.yaml new file mode 100644 index 0000000000000..4e2fc496d08ca --- /dev/null +++ b/docs/changelog/120813.yaml @@ -0,0 +1,17 @@ +pr: 120813 +summary: Change Semantic Text To Act Like A Normal Text Field +area: Search +type: breaking +issues: [] +breaking: + title: Change Semantic Text To Act Like A Normal Text Field + area: Search + details: + The previous semantic_text format used a complex subfield structure in _source to store the embeddings. + This complicated interactions/integrations with semantic_text fields and _source in general. + This new semantic_text format treats it as a normal text field, where the field's value in _source is the value assigned by the user. + impact: + Users who parsed the subfield structure of the previous semantic_text format in _source will need to update their parsing logic. + The new format does not directly expose the chunks and embeddings generated from the input text. + The new format will be applied to all new indices, any existing indices will continue to use the previous format. + notable: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml index 2a14c291d5d31..195da7b8e6854 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml @@ -570,14 +570,14 @@ nodes.stats: { metric: _all, level: "indices", human: true } - gte: { nodes.$node_id.indices.mappings.total_count: 28 } - - lte: { nodes.$node_id.indices.mappings.total_count: 29 } + - lte: { nodes.$node_id.indices.mappings.total_count: 30 } - gte: { nodes.$node_id.indices.mappings.total_estimated_overhead_in_bytes: 28672 } - - lte: { nodes.$node_id.indices.mappings.total_estimated_overhead_in_bytes: 29696 } + - lte: { nodes.$node_id.indices.mappings.total_estimated_overhead_in_bytes: 30720 } - match: { nodes.$node_id.indices.mappings.total_segments: 1 } - gte: { nodes.$node_id.indices.mappings.total_segment_fields: 28 } - - lte: { nodes.$node_id.indices.mappings.total_segment_fields: 29 } + - lte: { nodes.$node_id.indices.mappings.total_segment_fields: 30 } - gte: { nodes.$node_id.indices.mappings.average_fields_per_segment: 28 } - - lte: { nodes.$node_id.indices.mappings.average_fields_per_segment: 29 } + - lte: { nodes.$node_id.indices.mappings.average_fields_per_segment: 30 } - do: index: @@ -590,14 +590,14 @@ nodes.stats: { metric: _all, level: "indices", human: true } - gte: { nodes.$node_id.indices.mappings.total_count: 28 } - - lte: { nodes.$node_id.indices.mappings.total_count: 29 } + - lte: { nodes.$node_id.indices.mappings.total_count: 30 } - gte: { nodes.$node_id.indices.mappings.total_estimated_overhead_in_bytes: 28672 } - - lte: { nodes.$node_id.indices.mappings.total_estimated_overhead_in_bytes: 29696 } + - lte: { nodes.$node_id.indices.mappings.total_estimated_overhead_in_bytes: 30720 } - match: { nodes.$node_id.indices.mappings.total_segments: 2 } - gte: { nodes.$node_id.indices.mappings.total_segment_fields: 56 } - - lte: { nodes.$node_id.indices.mappings.total_segment_fields: 58 } + - lte: { nodes.$node_id.indices.mappings.total_segment_fields: 60 } - gte: { nodes.$node_id.indices.mappings.average_fields_per_segment: 28 } - - lte: { nodes.$node_id.indices.mappings.average_fields_per_segment: 29 } + - lte: { nodes.$node_id.indices.mappings.average_fields_per_segment: 30 } --- "indices mappings does not exist in shards level": diff --git a/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java index 80fee58e93110..be4237fec3303 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java @@ -35,8 +35,7 @@ public abstract class InferenceMetadataFieldsMapper extends MetadataFieldMapper */ public static final Setting USE_LEGACY_SEMANTIC_TEXT_FORMAT = Setting.boolSetting( "index.mapping.semantic_text.use_legacy_format", - // don't use the new format by default yet - true, + false, Setting.Property.Final, Setting.Property.IndexScope, Setting.Property.InternalIndex diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index 7d6069572ba21..4707a7824fcd1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -39,7 +39,8 @@ public Set getTestFeatures() { SEMANTIC_SPARSE_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED, SemanticInferenceMetadataFieldsMapper.EXPLICIT_NULL_FIXES, SEMANTIC_KNN_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED, - TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_ALIAS_HANDLING_FIX + TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_ALIAS_HANDLING_FIX, + SemanticInferenceMetadataFieldsMapper.INFERENCE_METADATA_FIELDS_ENABLED_BY_DEFAULT ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapper.java index f20b202b2991f..607bc4f480ccd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticInferenceMetadataFieldsMapper.java @@ -40,6 +40,9 @@ public class SemanticInferenceMetadataFieldsMapper extends InferenceMetadataFiel private static final SemanticInferenceMetadataFieldsMapper INSTANCE = new SemanticInferenceMetadataFieldsMapper(); public static final NodeFeature EXPLICIT_NULL_FIXES = new NodeFeature("semantic_text.inference_metadata_fields.explicit_null_fixes"); + public static final NodeFeature INFERENCE_METADATA_FIELDS_ENABLED_BY_DEFAULT = new NodeFeature( + "semantic_text.inference_metadata_fields.enabled_by_default" + ); public static final TypeParser PARSER = new FixedTypeParser( c -> InferenceMetadataFieldsMapper.isEnabled(c.getSettings()) ? INSTANCE : null diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml index c67327d9447f9..5a03418f2042e 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml @@ -957,3 +957,51 @@ setup: - length: { hits.hits.1.highlight.field: 2 } - match: { hits.hits.1.highlight.field.0: "some more tests" } - match: { hits.hits.1.highlight.field.1: "that include chunks" } + +--- +"Inference metadata fields format is used by default": + - requires: + cluster_features: "semantic_text.inference_metadata_fields.enabled_by_default" + reason: Inference metadata fields format is used by default as of 8.18 + + - do: + indices.create: + index: default-index + body: + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + dense_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + + - do: + index: + index: default-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + refresh: true + + - do: + search: + index: default-index + body: + query: + semantic: + field: sparse_field + query: "inference test" + fields: [ "_inference_fields" ] + + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0._source.sparse_field: "inference test" } + - match: { hits.hits.0._source.dense_field: "another inference test" } + - match: { hits.hits.0._source.non_inference_field: "non inference test" } + - exists: hits.hits.0._source._inference_fields From c971460faac752bf2e63792b83ba537f126d836a Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 27 Jan 2025 09:29:00 -0800 Subject: [PATCH 077/383] Track in/out pages in exchange (#120867) This is a spin-off of the "retry node requests on shard-level failures" work. Currently, a driver can execute against multiple shards simultaneously. If the execution fails and no pages are added to the sink, we can retry the failed shards on another node. In another scenario, if no pages are fetched or added to the exchange source and the entire data node request fails, we can also retry the entire request. This change adds callbacks to RemoteSink and ExchangeSink, allowing for tracking of in/out pages. --- .../operator/exchange/ExchangeService.java | 3 +- .../exchange/ExchangeSinkHandler.java | 16 +++--- .../exchange/ExchangeSourceHandler.java | 40 +++++++++------ .../compute/operator/DriverTests.java | 4 +- .../operator/ForkingOperatorTestCase.java | 3 +- .../exchange/ExchangeServiceTests.java | 51 +++++++++++++++---- .../esql/planner/LocalExecutionPlanner.java | 25 ++++----- .../esql/plugin/ClusterComputeHandler.java | 5 +- .../xpack/esql/plugin/ComputeContext.java | 9 ++-- .../xpack/esql/plugin/ComputeService.java | 14 +++-- .../esql/plugin/DataNodeComputeHandler.java | 11 ++-- .../elasticsearch/xpack/esql/CsvTests.java | 5 +- .../optimizer/PhysicalPlanOptimizerTests.java | 5 +- 13 files changed, 125 insertions(+), 66 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index d1a5d1757bc90..ac02273a48ee4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -51,7 +51,8 @@ /** * {@link ExchangeService} is responsible for exchanging pages between exchange sinks and sources on the same or different nodes. * It holds a map of {@link ExchangeSinkHandler} instances for each node in the cluster to serve {@link ExchangeRequest}s - * To connect exchange sources to exchange sinks, use {@link ExchangeSourceHandler#addRemoteSink(RemoteSink, boolean, int, ActionListener)}. + * To connect exchange sources to exchange sinks, + * use {@link ExchangeSourceHandler#addRemoteSink(RemoteSink, boolean, Runnable, int, ActionListener)}. */ public final class ExchangeService extends AbstractLifecycleComponent { // TODO: Make this a child action of the data node transport to ensure that exchanges diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java index 21eb2ed565618..ef137f7306e67 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java @@ -22,10 +22,10 @@ /** * An {@link ExchangeSinkHandler} receives pages and status from its {@link ExchangeSink}s, which are created using - * {@link #createExchangeSink()}} method. Pages and status can then be retrieved asynchronously by {@link ExchangeSourceHandler}s + * {@link #createExchangeSink(Runnable)}} method. Pages and status can then be retrieved asynchronously by {@link ExchangeSourceHandler}s * using the {@link #fetchPageAsync(boolean, ActionListener)} method. * - * @see #createExchangeSink() + * @see #createExchangeSink(Runnable) * @see #fetchPageAsync(boolean, ActionListener) * @see ExchangeSourceHandler */ @@ -52,9 +52,11 @@ public ExchangeSinkHandler(BlockFactory blockFactory, int maxBufferSize, LongSup private class ExchangeSinkImpl implements ExchangeSink { boolean finished; + private final Runnable onPageFetched; private final SubscribableListener onFinished = new SubscribableListener<>(); - ExchangeSinkImpl() { + ExchangeSinkImpl(Runnable onPageFetched) { + this.onPageFetched = onPageFetched; onChanged(); buffer.addCompletionListener(onFinished); outstandingSinks.incrementAndGet(); @@ -62,6 +64,7 @@ private class ExchangeSinkImpl implements ExchangeSink { @Override public void addPage(Page page) { + onPageFetched.run(); buffer.addPage(page); notifyListeners(); } @@ -101,7 +104,7 @@ public IsBlockedResult waitForWriting() { * @param sourceFinished if true, then this handler can finish as sources have enough pages. * @param listener the listener that will be notified when pages are ready or this handler is finished * @see RemoteSink - * @see ExchangeSourceHandler#addRemoteSink(RemoteSink, boolean, int, ActionListener) + * @see ExchangeSourceHandler#addRemoteSink(RemoteSink, boolean, Runnable, int, ActionListener) */ public void fetchPageAsync(boolean sourceFinished, ActionListener listener) { if (sourceFinished) { @@ -161,10 +164,11 @@ private void notifyListeners() { /** * Create a new exchange sink for exchanging data * + * @param onPageFetched a {@link Runnable} that will be called when a page is fetched. * @see ExchangeSinkOperator */ - public ExchangeSink createExchangeSink() { - return new ExchangeSinkImpl(); + public ExchangeSink createExchangeSink(Runnable onPageFetched) { + return new ExchangeSinkImpl(onPageFetched); } /** diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index aa722695b841e..db9a62da5d9ea 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -27,10 +27,10 @@ /** * An {@link ExchangeSourceHandler} asynchronously fetches pages and status from multiple {@link RemoteSink}s * and feeds them to its {@link ExchangeSource}, which are created using the {@link #createExchangeSource()}) method. - * {@link RemoteSink}s are added using the {@link #addRemoteSink(RemoteSink, boolean, int, ActionListener)}) method. + * {@link RemoteSink}s are added using the {@link #addRemoteSink(RemoteSink, boolean, Runnable, int, ActionListener)}) method. * * @see #createExchangeSource() - * @see #addRemoteSink(RemoteSink, boolean, int, ActionListener) + * @see #addRemoteSink(RemoteSink, boolean, Runnable, int, ActionListener) */ public final class ExchangeSourceHandler { private final ExchangeBuffer buffer; @@ -185,11 +185,13 @@ private final class RemoteSinkFetcher { private volatile boolean finished = false; private final RemoteSink remoteSink; private final boolean failFast; + private final Runnable onPageFetched; private final ActionListener completionListener; - RemoteSinkFetcher(RemoteSink remoteSink, boolean failFast, ActionListener completionListener) { + RemoteSinkFetcher(RemoteSink remoteSink, boolean failFast, Runnable onPageFetched, ActionListener completionListener) { outstandingSinks.trackNewInstance(); this.remoteSink = remoteSink; + this.onPageFetched = onPageFetched; this.failFast = failFast; this.completionListener = completionListener; } @@ -203,6 +205,7 @@ void fetchPage() { remoteSink.fetchPageAsync(toFinishSinks, ActionListener.wrap(resp -> { Page page = resp.takePage(); if (page != null) { + onPageFetched.run(); buffer.addPage(page); } if (resp.finished()) { @@ -252,19 +255,26 @@ void onSinkComplete() { /** * Add a remote sink as a new data source of this handler. The handler will start fetching data from this remote sink intermediately. * - * @param remoteSink the remote sink - * @param failFast determines how failures in this remote sink are handled: - * - If {@code false}, failures from this remote sink will not cause the exchange source to abort. - * Callers must handle these failures notified via {@code listener}. - * - If {@code true}, failures from this remote sink will cause the exchange source to abort. - * Callers can safely ignore failures notified via this listener, as they are collected and - * reported by the exchange source. - * @param instances the number of concurrent ``clients`` that this handler should use to fetch pages. - * More clients reduce latency, but add overhead. - * @param listener a listener that will be notified when the sink fails or completes + * @param remoteSink the remote sink + * @param failFast determines how failures in this remote sink are handled: + * - If {@code false}, failures from this remote sink will not cause the exchange source to abort. + * Callers must handle these failures notified via {@code listener}. + * - If {@code true}, failures from this remote sink will cause the exchange source to abort. + * Callers can safely ignore failures notified via this listener, as they are collected and + * reported by the exchange source. + * @param onPageFetched a callback that will be called when a page is fetched from the remote sink + * @param instances the number of concurrent ``clients`` that this handler should use to fetch pages. + * More clients reduce latency, but add overhead. + * @param listener a listener that will be notified when the sink fails or completes * @see ExchangeSinkHandler#fetchPageAsync(boolean, ActionListener) */ - public void addRemoteSink(RemoteSink remoteSink, boolean failFast, int instances, ActionListener listener) { + public void addRemoteSink( + RemoteSink remoteSink, + boolean failFast, + Runnable onPageFetched, + int instances, + ActionListener listener + ) { final int sinkId = nextSinkId.incrementAndGet(); remoteSinks.put(sinkId, remoteSink); final ActionListener sinkListener = ActionListener.assertAtLeastOnce( @@ -284,7 +294,7 @@ public void onFailure(Exception e) { protected void doRun() { try (EsqlRefCountingListener refs = new EsqlRefCountingListener(sinkListener)) { for (int i = 0; i < instances; i++) { - var fetcher = new RemoteSinkFetcher(remoteSink, failFast, refs.acquire()); + var fetcher = new RemoteSinkFetcher(remoteSink, failFast, onPageFetched, refs.acquire()); fetcher.fetchPage(); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java index cc983e6b83fbe..e715b94bc55e5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java @@ -297,7 +297,7 @@ public void testEarlyTermination() { final int maxAllowedRows = between(1, 100); final AtomicInteger processedRows = new AtomicInteger(0); var sinkHandler = new ExchangeSinkHandler(driverContext.blockFactory(), positions, System::currentTimeMillis); - var sinkOperator = new ExchangeSinkOperator(sinkHandler.createExchangeSink(), Function.identity()); + var sinkOperator = new ExchangeSinkOperator(sinkHandler.createExchangeSink(() -> {}), Function.identity()); final var delayOperator = new EvalOperator(driverContext.blockFactory(), new EvalOperator.ExpressionEvaluator() { @Override public Block eval(Page page) { @@ -335,7 +335,7 @@ public void testResumeOnEarlyFinish() throws Exception { var sourceHandler = new ExchangeSourceHandler(between(1, 5), threadPool.executor("esql"), sourceFuture); var sinkHandler = new ExchangeSinkHandler(driverContext.blockFactory(), between(1, 5), System::currentTimeMillis); var sourceOperator = new ExchangeSourceOperator(sourceHandler.createExchangeSource()); - var sinkOperator = new ExchangeSinkOperator(sinkHandler.createExchangeSink(), Function.identity()); + var sinkOperator = new ExchangeSinkOperator(sinkHandler.createExchangeSink(() -> {}), Function.identity()); Driver driver = new Driver(driverContext, sourceOperator, List.of(), sinkOperator, () -> {}); PlainActionFuture future = new PlainActionFuture<>(); Driver.start(threadPool.getThreadContext(), threadPool.executor("esql"), driver, between(1, 1000), future); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index 94a5299dd8216..744121a3807c3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -220,6 +220,7 @@ List createDriversForInput(List input, List results, boolean sourceExchanger.addRemoteSink( sinkExchanger::fetchPageAsync, randomBoolean(), + () -> {}, 1, ActionListener.noop().delegateResponse((l, e) -> { throw new AssertionError("unexpected failure", e); @@ -248,7 +249,7 @@ List createDriversForInput(List input, List results, boolean simpleWithMode(AggregatorMode.INTERMEDIATE).get(driver1Context), intermediateOperatorItr.next() ), - new ExchangeSinkOperator(sinkExchanger.createExchangeSink(), Function.identity()), + new ExchangeSinkOperator(sinkExchanger.createExchangeSink(() -> {}), Function.identity()), () -> {} ) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 363ad9c49ddfe..fffeeac4e4cc2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -97,35 +97,50 @@ public void testBasic() throws Exception { pages[i] = new Page(blockFactory.newConstantIntBlockWith(i, 2)); } ExchangeSinkHandler sinkExchanger = new ExchangeSinkHandler(blockFactory, 2, threadPool.relativeTimeInMillisSupplier()); - ExchangeSink sink1 = sinkExchanger.createExchangeSink(); - ExchangeSink sink2 = sinkExchanger.createExchangeSink(); + AtomicInteger pagesAddedToSink = new AtomicInteger(); + ExchangeSink sink1 = sinkExchanger.createExchangeSink(pagesAddedToSink::incrementAndGet); + ExchangeSink sink2 = sinkExchanger.createExchangeSink(pagesAddedToSink::incrementAndGet); PlainActionFuture sourceCompletion = new PlainActionFuture<>(); ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(3, threadPool.executor(ESQL_TEST_EXECUTOR), sourceCompletion); ExchangeSource source = sourceExchanger.createExchangeSource(); - sourceExchanger.addRemoteSink(sinkExchanger::fetchPageAsync, randomBoolean(), 1, ActionListener.noop()); + AtomicInteger pagesAddedToSource = new AtomicInteger(); + sourceExchanger.addRemoteSink( + sinkExchanger::fetchPageAsync, + randomBoolean(), + pagesAddedToSource::incrementAndGet, + 1, + ActionListener.noop() + ); SubscribableListener waitForReading = source.waitForReading().listener(); assertFalse(waitForReading.isDone()); assertNull(source.pollPage()); assertTrue(sink1.waitForWriting().listener().isDone()); randomFrom(sink1, sink2).addPage(pages[0]); + assertThat(pagesAddedToSink.get(), equalTo(1)); randomFrom(sink1, sink2).addPage(pages[1]); + assertThat(pagesAddedToSink.get(), equalTo(2)); + assertBusy(() -> assertThat(pagesAddedToSource.get(), equalTo(2))); // source and sink buffers can store 5 pages for (Page p : List.of(pages[2], pages[3], pages[4])) { ExchangeSink sink = randomFrom(sink1, sink2); assertBusy(() -> assertTrue(sink.waitForWriting().listener().isDone())); sink.addPage(p); } + assertThat(pagesAddedToSink.get(), equalTo(5)); + assertBusy(() -> assertThat(pagesAddedToSource.get(), equalTo(3))); // sink buffer is full assertFalse(randomFrom(sink1, sink2).waitForWriting().listener().isDone()); assertBusy(() -> assertTrue(source.waitForReading().listener().isDone())); assertEquals(pages[0], source.pollPage()); assertBusy(() -> assertTrue(source.waitForReading().listener().isDone())); assertEquals(pages[1], source.pollPage()); + assertBusy(() -> assertThat(pagesAddedToSource.get(), equalTo(5))); // sink can write again assertBusy(() -> assertTrue(randomFrom(sink1, sink2).waitForWriting().listener().isDone())); randomFrom(sink1, sink2).addPage(pages[5]); assertBusy(() -> assertTrue(randomFrom(sink1, sink2).waitForWriting().listener().isDone())); randomFrom(sink1, sink2).addPage(pages[6]); + assertThat(pagesAddedToSink.get(), equalTo(7)); // sink buffer is full assertFalse(randomFrom(sink1, sink2).waitForWriting().listener().isDone()); sink1.finish(); @@ -134,6 +149,7 @@ public void testBasic() throws Exception { assertBusy(() -> assertTrue(source.waitForReading().listener().isDone())); assertEquals(pages[2 + i], source.pollPage()); } + assertBusy(() -> assertThat(pagesAddedToSource.get(), equalTo(7))); // source buffer is empty assertFalse(source.waitForReading().listener().isDone()); assertBusy(() -> assertTrue(sink2.waitForWriting().listener().isDone())); @@ -340,10 +356,16 @@ public void testConcurrentWithHandlers() { sinkHandler = randomFrom(sinkHandlers); } else { sinkHandler = new ExchangeSinkHandler(blockFactory, randomExchangeBuffer(), threadPool.relativeTimeInMillisSupplier()); - sourceExchanger.addRemoteSink(sinkHandler::fetchPageAsync, randomBoolean(), randomIntBetween(1, 3), ActionListener.noop()); + sourceExchanger.addRemoteSink( + sinkHandler::fetchPageAsync, + randomBoolean(), + () -> {}, + randomIntBetween(1, 3), + ActionListener.noop() + ); sinkHandlers.add(sinkHandler); } - return sinkHandler.createExchangeSink(); + return sinkHandler.createExchangeSink(() -> {}); }; final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); @@ -398,14 +420,14 @@ public void testExchangeSourceContinueOnFailure() { l.onResponse(new ExchangeResponse(blockFactory, page, r.finished())); })); } - }, false, instance, ActionListener.wrap(r -> { + }, false, () -> {}, instance, ActionListener.wrap(r -> { assertFalse(sinkFailed.get()); completedSinks.incrementAndGet(); }, e -> { assertTrue(sinkFailed.get()); failedSinks.incrementAndGet(); })); - return sinkHandler.createExchangeSink(); + return sinkHandler.createExchangeSink(() -> {}); }; Set actualSeqNos = runConcurrentTest( maxInputSeqNo, @@ -430,7 +452,7 @@ public void testClosingSinks() { Page p1 = new Page(block1); Page p2 = new Page(block2); ExchangeSinkHandler sinkExchanger = new ExchangeSinkHandler(blockFactory, 2, threadPool.relativeTimeInMillisSupplier()); - ExchangeSink sink = sinkExchanger.createExchangeSink(); + ExchangeSink sink = sinkExchanger.createExchangeSink(() -> {}); sink.addPage(p1); sink.addPage(p2); assertFalse(sink.waitForWriting().listener().isDone()); @@ -475,7 +497,7 @@ public void testFinishEarly() throws Exception { throw new AssertionError(e); } } - }, false, between(1, 3), sinkCompleted); + }, false, () -> {}, between(1, 3), sinkCompleted); threadPool.schedule( () -> sourceHandler.finishEarly(randomBoolean(), ActionListener.noop()), TimeValue.timeValueMillis(between(0, 10)), @@ -526,6 +548,7 @@ public void testConcurrentWithTransportActions() { sourceHandler.addRemoteSink( exchange0.newRemoteSink(task, exchangeId, node0, connection), randomBoolean(), + () -> {}, randomIntBetween(1, 5), ActionListener.noop() ); @@ -535,7 +558,7 @@ public void testConcurrentWithTransportActions() { maxInputSeqNo, maxOutputSeqNo, sourceHandler::createExchangeSource, - sinkHandler::createExchangeSink + () -> sinkHandler.createExchangeSink(() -> {}) ); var expectedSeqNos = IntStream.range(0, Math.min(maxInputSeqNo, maxOutputSeqNo)).boxed().collect(Collectors.toSet()); assertThat(actualSeqNos, hasSize(expectedSeqNos.size())); @@ -601,12 +624,18 @@ public void sendResponse(TransportResponse transportResponse) { sourceHandler.addRemoteSink( exchange0.newRemoteSink(task, exchangeId, node0, connection), true, + () -> {}, randomIntBetween(1, 5), ActionListener.noop() ); Exception err = expectThrows( Exception.class, - () -> runConcurrentTest(maxSeqNo, maxSeqNo, sourceHandler::createExchangeSource, sinkHandler::createExchangeSink) + () -> runConcurrentTest( + maxSeqNo, + maxSeqNo, + sourceHandler::createExchangeSource, + () -> sinkHandler.createExchangeSink(() -> {}) + ) ); Throwable cause = ExceptionsHelper.unwrap(err, IOException.class); assertNotNull(cause); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index c17ff0475b945..5975af29f5d04 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -36,9 +36,9 @@ import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.operator.SourceOperator.SourceOperatorFactory; import org.elasticsearch.compute.operator.StringExtractOperator; -import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; +import org.elasticsearch.compute.operator.exchange.ExchangeSink; import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator.ExchangeSinkOperatorFactory; -import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; +import org.elasticsearch.compute.operator.exchange.ExchangeSource; import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator.ExchangeSourceOperatorFactory; import org.elasticsearch.compute.operator.topn.TopNEncoder; import org.elasticsearch.compute.operator.topn.TopNOperator; @@ -103,6 +103,7 @@ import java.util.Objects; import java.util.Optional; import java.util.function.Function; +import java.util.function.Supplier; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -126,8 +127,8 @@ public class LocalExecutionPlanner { private final BlockFactory blockFactory; private final Settings settings; private final Configuration configuration; - private final ExchangeSourceHandler exchangeSourceHandler; - private final ExchangeSinkHandler exchangeSinkHandler; + private final Supplier exchangeSourceSupplier; + private final Supplier exchangeSinkSupplier; private final EnrichLookupService enrichLookupService; private final LookupFromIndexService lookupFromIndexService; private final PhysicalOperationProviders physicalOperationProviders; @@ -140,8 +141,8 @@ public LocalExecutionPlanner( BlockFactory blockFactory, Settings settings, Configuration configuration, - ExchangeSourceHandler exchangeSourceHandler, - ExchangeSinkHandler exchangeSinkHandler, + Supplier exchangeSourceSupplier, + Supplier exchangeSinkSupplier, EnrichLookupService enrichLookupService, LookupFromIndexService lookupFromIndexService, PhysicalOperationProviders physicalOperationProviders @@ -152,8 +153,8 @@ public LocalExecutionPlanner( this.bigArrays = bigArrays; this.blockFactory = blockFactory; this.settings = settings; - this.exchangeSourceHandler = exchangeSourceHandler; - this.exchangeSinkHandler = exchangeSinkHandler; + this.exchangeSourceSupplier = exchangeSourceSupplier; + this.exchangeSinkSupplier = exchangeSinkSupplier; this.enrichLookupService = enrichLookupService; this.lookupFromIndexService = lookupFromIndexService; this.physicalOperationProviders = physicalOperationProviders; @@ -323,7 +324,7 @@ private PhysicalOperation planExchange(ExchangeExec exchangeExec, LocalExecution } private PhysicalOperation planExchangeSink(ExchangeSinkExec exchangeSink, LocalExecutionPlannerContext context) { - Objects.requireNonNull(exchangeSinkHandler, "ExchangeSinkHandler wasn't provided"); + Objects.requireNonNull(exchangeSinkSupplier, "ExchangeSinkHandler wasn't provided"); var child = exchangeSink.child(); PhysicalOperation source = plan(child, context); @@ -332,11 +333,11 @@ private PhysicalOperation planExchangeSink(ExchangeSinkExec exchangeSink, LocalE ? Function.identity() : alignPageToAttributes(exchangeSink.output(), source.layout); - return source.withSink(new ExchangeSinkOperatorFactory(exchangeSinkHandler::createExchangeSink, transformer), source.layout); + return source.withSink(new ExchangeSinkOperatorFactory(exchangeSinkSupplier, transformer), source.layout); } private PhysicalOperation planExchangeSource(ExchangeSourceExec exchangeSource, LocalExecutionPlannerContext context) { - Objects.requireNonNull(exchangeSourceHandler, "ExchangeSourceHandler wasn't provided"); + Objects.requireNonNull(exchangeSourceSupplier, "ExchangeSourceHandler wasn't provided"); var builder = new Layout.Builder(); builder.append(exchangeSource.output()); @@ -344,7 +345,7 @@ private PhysicalOperation planExchangeSource(ExchangeSourceExec exchangeSource, var l = builder.build(); var layout = exchangeSource.isIntermediateAgg() ? new ExchangeLayout(l) : l; - return PhysicalOperation.fromSource(new ExchangeSourceOperatorFactory(exchangeSourceHandler::createExchangeSource), layout); + return PhysicalOperation.fromSource(new ExchangeSourceOperatorFactory(exchangeSourceSupplier), layout); } private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerContext context) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java index 20211323b3afb..19ed77405daa2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java @@ -92,6 +92,7 @@ void startComputeOnRemoteCluster( exchangeSource.addRemoteSink( remoteSink, true, + () -> {}, queryPragmas.concurrentExchangeClients(), computeListener.acquireAvoid() ); @@ -209,8 +210,8 @@ void runComputeOnRemoteCluster( List.of(), configuration, configuration.newFoldContext(), - exchangeSource, - exchangeSink + exchangeSource::createExchangeSource, + () -> exchangeSink.createExchangeSink(() -> {}) ), coordinatorPlan, computeListener.acquireCompute() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java index 4e178bb740757..82943d23581fd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java @@ -7,14 +7,15 @@ package org.elasticsearch.xpack.esql.plugin; -import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; -import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; +import org.elasticsearch.compute.operator.exchange.ExchangeSink; +import org.elasticsearch.compute.operator.exchange.ExchangeSource; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.session.Configuration; import java.util.List; +import java.util.function.Supplier; record ComputeContext( String sessionId, @@ -22,8 +23,8 @@ record ComputeContext( List searchContexts, Configuration configuration, FoldContext foldCtx, - ExchangeSourceHandler exchangeSource, - ExchangeSinkHandler exchangeSink + Supplier exchangeSourceSupplier, + Supplier exchangeSinkSupplier ) { List searchExecutionContexts() { return searchContexts.stream().map(SearchContext::getSearchExecutionContext).toList(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 75619958c5228..de6fc082eb243 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -224,7 +224,15 @@ public void execute( ) { runCompute( rootTask, - new ComputeContext(sessionId, LOCAL_CLUSTER, List.of(), configuration, foldContext, exchangeSource, null), + new ComputeContext( + sessionId, + LOCAL_CLUSTER, + List.of(), + configuration, + foldContext, + exchangeSource::createExchangeSource, + null + ), coordinatorPlan, localListener.acquireCompute() ); @@ -372,8 +380,8 @@ public SourceProvider createSourceProvider() { blockFactory, clusterService.getSettings(), context.configuration(), - context.exchangeSource(), - context.exchangeSink(), + context.exchangeSourceSupplier(), + context.exchangeSinkSupplier(), enrichLookupService, lookupFromIndexService, new EsPhysicalOperationProviders(context.foldCtx(), contexts, searchService.getIndicesService().getAnalysis()) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java index 5b4f3e8cbffb1..7020932819421 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java @@ -130,6 +130,7 @@ void startComputeOnDataNodes( exchangeSource.addRemoteSink( remoteSink, true, + () -> {}, queryPragmas.concurrentExchangeClients(), computeListener.acquireAvoid() ); @@ -330,7 +331,7 @@ private class DataNodeRequestExecutor { this.exchangeSink = exchangeSink; this.computeListener = computeListener; this.maxConcurrentShards = maxConcurrentShards; - this.blockingSink = exchangeSink.createExchangeSink(); + this.blockingSink = exchangeSink.createExchangeSink(() -> {}); } void start() { @@ -376,7 +377,7 @@ public void onFailure(Exception e) { configuration, configuration.newFoldContext(), null, - exchangeSink + () -> exchangeSink.createExchangeSink(() -> {}) ); computeService.runCompute(parentTask, computeContext, request.plan(), batchListener); }, batchListener::onFailure)); @@ -428,7 +429,7 @@ private void runComputeOnDataNode( () -> exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled())) ); var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor, computeListener.acquireAvoid()); - exchangeSource.addRemoteSink(internalSink::fetchPageAsync, true, 1, ActionListener.noop()); + exchangeSource.addRemoteSink(internalSink::fetchPageAsync, true, () -> {}, 1, ActionListener.noop()); var reductionListener = computeListener.acquireCompute(); computeService.runCompute( task, @@ -438,8 +439,8 @@ private void runComputeOnDataNode( List.of(), request.configuration(), new FoldContext(request.pragmas().foldLimit().getBytes()), - exchangeSource, - externalSink + exchangeSource::createExchangeSource, + () -> externalSink.createExchangeSink(() -> {}) ), reducePlan, ActionListener.wrap(resp -> { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index ae9c12fd7c711..02e683542df7c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -626,8 +626,8 @@ void executeSubPlan( blockFactory, randomNodeSettings(), configuration, - exchangeSource, - exchangeSink, + exchangeSource::createExchangeSource, + () -> exchangeSink.createExchangeSink(() -> {}), Mockito.mock(EnrichLookupService.class), Mockito.mock(LookupFromIndexService.class), physicalOperationProviders @@ -653,6 +653,7 @@ void executeSubPlan( exchangeSource.addRemoteSink( exchangeSink::fetchPageAsync, Randomness.get().nextBoolean(), + () -> {}, randomIntBetween(1, 3), ActionListener.noop().delegateResponse((l, e) -> { throw new AssertionError("expected no failure", e); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index a51ad384d9488..23e0937380f34 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -7587,6 +7587,7 @@ private LocalExecutionPlanner.LocalExecutionPlan physicalOperationsFromPhysicalP var plans = PlannerUtils.breakPlanBetweenCoordinatorAndDataNode(EstimatesRowSize.estimateRowSize(0, plan), config); plan = useDataNodePlan ? plans.v2() : plans.v1(); plan = PlannerUtils.localPlan(List.of(), config, FoldContext.small(), plan); + ExchangeSinkHandler exchangeSinkHandler = new ExchangeSinkHandler(null, 10, () -> 10); LocalExecutionPlanner planner = new LocalExecutionPlanner( "test", "", @@ -7595,8 +7596,8 @@ private LocalExecutionPlanner.LocalExecutionPlan physicalOperationsFromPhysicalP TestBlockFactory.getNonBreakingInstance(), Settings.EMPTY, config, - new ExchangeSourceHandler(10, null, null), - new ExchangeSinkHandler(null, 10, () -> 10), + new ExchangeSourceHandler(10, null, null)::createExchangeSource, + () -> exchangeSinkHandler.createExchangeSink(() -> {}), null, null, new EsPhysicalOperationProviders(FoldContext.small(), List.of(), null) From b54296eb2fa5db1d1444190814424f4820bb294c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Mon, 27 Jan 2025 18:32:15 +0100 Subject: [PATCH 078/383] ESQL: Fix DateExtract with nanos tests (#120929) Fix switch failing in 8.x: https://buildkite.com/elastic/elasticsearch-pull-request/builds/52921#0194a834-0080-4d4e-906e-2bafbfd8aba8 Fix tests failing in main because of order: https://gradle-enterprise.elastic.co/s/pfbdstwc4qa4o/tests/overview?outcome=FAILED --- .../qa/testFixtures/src/main/resources/date_nanos.csv-spec | 3 ++- .../esql/expression/function/scalar/date/DateExtract.java | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec index ec68e7eecc658..a41c9b8cd0b16 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec @@ -506,7 +506,8 @@ required_capability: date_nanos_date_extract FROM date_nanos | EVAL nn = MV_MAX(nanos) | EVAL year = DATE_EXTRACT("year", nn), ns = DATE_EXTRACT("nano_of_second", nn) -| KEEP nn, year, ns; +| KEEP nn, year, ns +| SORT nn DESC; nn:date_nanos | year:long | ns:long 2023-10-23T13:55:01.543123456Z | 2023 | 543123456 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index 20ff398803854..7d8648a672ff8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -114,8 +114,8 @@ public String getWriteableName() { @Override public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { boolean isNanos = switch (field().dataType()) { - case DataType.DATETIME -> false; - case DataType.DATE_NANOS -> true; + case DATETIME -> false; + case DATE_NANOS -> true; default -> throw new UnsupportedOperationException( "Unsupported field type [" + field().dataType().name() From 10d09a1d717183c4364acb32787a63c89624d64c Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 27 Jan 2025 12:57:49 -0500 Subject: [PATCH 079/383] [CI] Bump memory on PR part-4 agents --- .buildkite/pipelines/pull-request/part-4-entitlements.yml | 2 +- .buildkite/pipelines/pull-request/part-4-fips.yml | 2 +- .buildkite/pipelines/pull-request/part-4.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.buildkite/pipelines/pull-request/part-4-entitlements.yml b/.buildkite/pipelines/pull-request/part-4-entitlements.yml index 5817dacbad80b..67172f891b4b6 100644 --- a/.buildkite/pipelines/pull-request/part-4-entitlements.yml +++ b/.buildkite/pipelines/pull-request/part-4-entitlements.yml @@ -7,5 +7,5 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/part-4-fips.yml b/.buildkite/pipelines/pull-request/part-4-fips.yml index 11a50456ca4c0..5c020117ff00f 100644 --- a/.buildkite/pipelines/pull-request/part-4-fips.yml +++ b/.buildkite/pipelines/pull-request/part-4-fips.yml @@ -7,5 +7,5 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/part-4.yml b/.buildkite/pipelines/pull-request/part-4.yml index af11f08953d07..8774389a43e26 100644 --- a/.buildkite/pipelines/pull-request/part-4.yml +++ b/.buildkite/pipelines/pull-request/part-4.yml @@ -7,5 +7,5 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk From d28e9ed78ff3ccc63e0c13f9dbbe4de2f6c7b1d7 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Mon, 27 Jan 2025 17:59:45 +0000 Subject: [PATCH 080/383] Add Minimal Service Settings to the Model Registry (#120560) This commit introduces minimal service settings in the model registry, accessible without querying the inference index. These settings are now available for the default models exposed by the inference service. The ability to access settings without an inference index query is needed for the semantic text field, as it would benefit from eager validation of configuration during field creation. This is not feasible currently because retrieving service settings relies on an asynchronous call to the inference index. ### Follow-Up Plans: 1. Extend this capability to include minimal service settings for all newly added models, making them accessible via the cluster state. 2. Update the semantic text field to eagerly retrieve service settings directly from the model registry. --- .../inference/InferenceService.java | 2 +- .../inference/MinimalServiceSettings.java | 181 ++++++++++++++++++ .../MinimalServiceSettingsTests.java | 43 +++++ .../integration/ModelRegistryIT.java | 44 +++-- .../ShardBulkInferenceActionFilter.java | 3 +- .../inference/mapper/SemanticTextField.java | 141 +------------- .../mapper/SemanticTextFieldMapper.java | 47 +++-- .../inference/registry/ModelRegistry.java | 65 +++++-- .../ElasticsearchInternalService.java | 7 +- .../ElserInternalServiceSettings.java | 5 + ...lingualE5SmallInternalServiceSettings.java | 5 + .../mapper/SemanticTextFieldMapperTests.java | 63 +++--- .../mapper/SemanticTextFieldTests.java | 35 +--- .../queries/SemanticQueryBuilderTests.java | 7 +- .../registry/ModelRegistryTests.java | 45 ++++- 15 files changed, 439 insertions(+), 254 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/inference/MinimalServiceSettings.java create mode 100644 server/src/test/java/org/elasticsearch/inference/MinimalServiceSettingsTests.java diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index bea7ee52ecfb0..e1ebd8bb81ff4 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -219,7 +219,7 @@ default boolean canStream(TaskType taskType) { return supportedStreamingTasks().contains(taskType); } - record DefaultConfigId(String inferenceId, TaskType taskType, InferenceService service) {}; + record DefaultConfigId(String inferenceId, MinimalServiceSettings settings, InferenceService service) {}; /** * Get the Ids and task type of any default configurations provided by this service diff --git a/server/src/main/java/org/elasticsearch/inference/MinimalServiceSettings.java b/server/src/main/java/org/elasticsearch/inference/MinimalServiceSettings.java new file mode 100644 index 0000000000000..be380d74093af --- /dev/null +++ b/server/src/main/java/org/elasticsearch/inference/MinimalServiceSettings.java @@ -0,0 +1,181 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.inference; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; +import static org.elasticsearch.inference.TaskType.COMPLETION; +import static org.elasticsearch.inference.TaskType.RERANK; +import static org.elasticsearch.inference.TaskType.SPARSE_EMBEDDING; +import static org.elasticsearch.inference.TaskType.TEXT_EMBEDDING; + +/** + * Defines the base settings required to configure an inference endpoint. + * + * These settings are immutable and describe the input and output types that the endpoint will handle. + * They capture the essential properties of an inference model, ensuring the endpoint is correctly configured. + * + * Key properties include: + *
    + *
  • {@code taskType} - Specifies the type of task the model performs, such as classification or text embeddings.
  • + *
  • {@code dimensions}, {@code similarity}, and {@code elementType} - These settings are applicable only when + * the {@code taskType} is {@link TaskType#TEXT_EMBEDDING}. They define the structure and behavior of embeddings.
  • + *
+ * + * @param taskType the type of task the inference model performs. + * @param dimensions the number of dimensions for the embeddings, applicable only for {@link TaskType#TEXT_EMBEDDING} (nullable). + * @param similarity the similarity measure used for embeddings, applicable only for {@link TaskType#TEXT_EMBEDDING} (nullable). + * @param elementType the type of elements in the embeddings, applicable only for {@link TaskType#TEXT_EMBEDDING} (nullable). + */ +public record MinimalServiceSettings( + TaskType taskType, + @Nullable Integer dimensions, + @Nullable SimilarityMeasure similarity, + @Nullable ElementType elementType +) implements ToXContentObject { + + public static final String TASK_TYPE_FIELD = "task_type"; + static final String DIMENSIONS_FIELD = "dimensions"; + static final String SIMILARITY_FIELD = "similarity"; + static final String ELEMENT_TYPE_FIELD = "element_type"; + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "model_settings", + true, + args -> { + TaskType taskType = TaskType.fromString((String) args[0]); + Integer dimensions = (Integer) args[1]; + SimilarityMeasure similarity = args[2] == null ? null : SimilarityMeasure.fromString((String) args[2]); + DenseVectorFieldMapper.ElementType elementType = args[3] == null + ? null + : DenseVectorFieldMapper.ElementType.fromString((String) args[3]); + return new MinimalServiceSettings(taskType, dimensions, similarity, elementType); + } + ); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField(TASK_TYPE_FIELD)); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), new ParseField(DIMENSIONS_FIELD)); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField(SIMILARITY_FIELD)); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField(ELEMENT_TYPE_FIELD)); + } + + public static MinimalServiceSettings parse(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + public static MinimalServiceSettings textEmbedding(int dimensions, SimilarityMeasure similarity, ElementType elementType) { + return new MinimalServiceSettings(TEXT_EMBEDDING, dimensions, similarity, elementType); + } + + public static MinimalServiceSettings sparseEmbedding() { + return new MinimalServiceSettings(SPARSE_EMBEDDING, null, null, null); + } + + public static MinimalServiceSettings rerank() { + return new MinimalServiceSettings(RERANK, null, null, null); + } + + public static MinimalServiceSettings completion() { + return new MinimalServiceSettings(COMPLETION, null, null, null); + } + + public MinimalServiceSettings(Model model) { + this( + model.getTaskType(), + model.getServiceSettings().dimensions(), + model.getServiceSettings().similarity(), + model.getServiceSettings().elementType() + ); + } + + public MinimalServiceSettings( + TaskType taskType, + @Nullable Integer dimensions, + @Nullable SimilarityMeasure similarity, + @Nullable ElementType elementType + ) { + this.taskType = Objects.requireNonNull(taskType, "task type must not be null"); + this.dimensions = dimensions; + this.similarity = similarity; + this.elementType = elementType; + validate(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(TASK_TYPE_FIELD, taskType.toString()); + if (dimensions != null) { + builder.field(DIMENSIONS_FIELD, dimensions); + } + if (similarity != null) { + builder.field(SIMILARITY_FIELD, similarity); + } + if (elementType != null) { + builder.field(ELEMENT_TYPE_FIELD, elementType); + } + return builder.endObject(); + } + + @Override + public String toString() { + final StringBuilder sb = new StringBuilder(); + sb.append("task_type=").append(taskType); + if (dimensions != null) { + sb.append(", dimensions=").append(dimensions); + } + if (similarity != null) { + sb.append(", similarity=").append(similarity); + } + if (elementType != null) { + sb.append(", element_type=").append(elementType); + } + return sb.toString(); + } + + private void validate() { + switch (taskType) { + case TEXT_EMBEDDING: + validateFieldPresent(DIMENSIONS_FIELD, dimensions); + validateFieldPresent(SIMILARITY_FIELD, similarity); + validateFieldPresent(ELEMENT_TYPE_FIELD, elementType); + break; + + default: + validateFieldNotPresent(DIMENSIONS_FIELD, dimensions); + validateFieldNotPresent(SIMILARITY_FIELD, similarity); + validateFieldNotPresent(ELEMENT_TYPE_FIELD, elementType); + break; + } + } + + private void validateFieldPresent(String field, Object fieldValue) { + if (fieldValue == null) { + throw new IllegalArgumentException("required [" + field + "] field is missing for task_type [" + taskType.name() + "]"); + } + } + + private void validateFieldNotPresent(String field, Object fieldValue) { + if (fieldValue != null) { + throw new IllegalArgumentException("[" + field + "] is not allowed for task_type [" + taskType.name() + "]"); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/inference/MinimalServiceSettingsTests.java b/server/src/test/java/org/elasticsearch/inference/MinimalServiceSettingsTests.java new file mode 100644 index 0000000000000..55b1774fb7cde --- /dev/null +++ b/server/src/test/java/org/elasticsearch/inference/MinimalServiceSettingsTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.inference; + +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +public class MinimalServiceSettingsTests extends AbstractXContentTestCase { + @Override + protected MinimalServiceSettings createTestInstance() { + TaskType taskType = randomFrom(TaskType.values()); + Integer dimensions = null; + SimilarityMeasure similarity = null; + DenseVectorFieldMapper.ElementType elementType = null; + + if (taskType == TaskType.TEXT_EMBEDDING) { + dimensions = randomIntBetween(2, 1024); + similarity = randomFrom(SimilarityMeasure.values()); + elementType = randomFrom(DenseVectorFieldMapper.ElementType.values()); + } + return new MinimalServiceSettings(taskType, dimensions, similarity, elementType); + } + + @Override + protected MinimalServiceSettings doParseInstance(XContentParser parser) throws IOException { + return MinimalServiceSettings.parse(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index fd0480b141981..51ee42cf2f7f2 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -15,13 +15,16 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceExtension; +import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; @@ -34,6 +37,7 @@ import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests; import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.registry.ModelRegistryTests; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalModel; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserInternalServiceSettingsTests; @@ -305,9 +309,9 @@ public void testGetAllModels_WithDefaults() throws Exception { var defaultIds = new ArrayList(); for (int i = 0; i < defaultModelCount; i++) { var id = "default-" + i; - var taskType = randomFrom(TaskType.values()); - defaultConfigs.add(createModel(id, taskType, serviceName)); - defaultIds.add(new InferenceService.DefaultConfigId(id, taskType, service)); + var modelSettings = ModelRegistryTests.randomMinimalServiceSettings(); + defaultConfigs.add(createModel(id, modelSettings.taskType(), serviceName)); + defaultIds.add(new InferenceService.DefaultConfigId(id, modelSettings, service)); } doAnswer(invocation -> { @@ -371,9 +375,9 @@ public void testGetAllModels_OnlyDefaults() throws Exception { var defaultIds = new ArrayList(); for (int i = 0; i < defaultModelCount; i++) { var id = "default-" + i; - var taskType = randomFrom(TaskType.values()); - defaultConfigs.add(createModel(id, taskType, serviceName)); - defaultIds.add(new InferenceService.DefaultConfigId(id, taskType, service)); + var modelSettings = ModelRegistryTests.randomMinimalServiceSettings(); + defaultConfigs.add(createModel(id, modelSettings.taskType(), serviceName)); + defaultIds.add(new InferenceService.DefaultConfigId(id, modelSettings, service)); } doAnswer(invocation -> { @@ -414,9 +418,9 @@ public void testGetAllModels_withDoNotPersist() throws Exception { var defaultIds = new ArrayList(); for (int i = 0; i < defaultModelCount; i++) { var id = "default-" + i; - var taskType = randomFrom(TaskType.values()); - defaultConfigs.add(createModel(id, taskType, serviceName)); - defaultIds.add(new InferenceService.DefaultConfigId(id, taskType, service)); + var modelSettings = ModelRegistryTests.randomMinimalServiceSettings(); + defaultConfigs.add(createModel(id, modelSettings.taskType(), serviceName)); + defaultIds.add(new InferenceService.DefaultConfigId(id, modelSettings, service)); } doAnswer(invocation -> { @@ -455,8 +459,14 @@ public void testGet_WithDefaults() throws InterruptedException { defaultConfigs.add(createModel("default-sparse", TaskType.SPARSE_EMBEDDING, serviceName)); defaultConfigs.add(createModel("default-text", TaskType.TEXT_EMBEDDING, serviceName)); - defaultIds.add(new InferenceService.DefaultConfigId("default-sparse", TaskType.SPARSE_EMBEDDING, service)); - defaultIds.add(new InferenceService.DefaultConfigId("default-text", TaskType.TEXT_EMBEDDING, service)); + defaultIds.add(new InferenceService.DefaultConfigId("default-sparse", MinimalServiceSettings.sparseEmbedding(), service)); + defaultIds.add( + new InferenceService.DefaultConfigId( + "default-text", + MinimalServiceSettings.textEmbedding(384, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT), + service + ) + ); doAnswer(invocation -> { @SuppressWarnings("unchecked") @@ -502,9 +512,15 @@ public void testGetByTaskType_WithDefaults() throws Exception { var service = mock(InferenceService.class); var defaultIds = new ArrayList(); - defaultIds.add(new InferenceService.DefaultConfigId("default-sparse", TaskType.SPARSE_EMBEDDING, service)); - defaultIds.add(new InferenceService.DefaultConfigId("default-text", TaskType.TEXT_EMBEDDING, service)); - defaultIds.add(new InferenceService.DefaultConfigId("default-chat", TaskType.COMPLETION, service)); + defaultIds.add(new InferenceService.DefaultConfigId("default-sparse", MinimalServiceSettings.sparseEmbedding(), service)); + defaultIds.add( + new InferenceService.DefaultConfigId( + "default-text", + MinimalServiceSettings.textEmbedding(384, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT), + service + ) + ); + defaultIds.add(new InferenceService.DefaultConfigId("default-chat", MinimalServiceSettings.completion(), service)); doAnswer(invocation -> { @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java index f4aa49bad1648..3933260664b7c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -35,6 +35,7 @@ import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.rest.RestStatus; @@ -438,7 +439,7 @@ private void applyInferenceResponses(BulkItemRequest item, FieldInferenceRespons useLegacyFormat ? inputs : null, new SemanticTextField.InferenceResult( inferenceFieldMetadata.getInferenceId(), - model != null ? new SemanticTextField.ModelSettings(model) : null, + model != null ? new MinimalServiceSettings(model) : null, chunkMap ), indexRequest.getContentType() diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java index fddff17dab4cf..489951a206149 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java @@ -14,11 +14,8 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersions; -import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ChunkedInference; -import org.elasticsearch.inference.Model; -import org.elasticsearch.inference.SimilarityMeasure; -import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -38,10 +35,7 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Objects; -import static org.elasticsearch.inference.TaskType.SPARSE_EMBEDDING; -import static org.elasticsearch.inference.TaskType.TEXT_EMBEDDING; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -75,117 +69,13 @@ public record SemanticTextField( static final String CHUNKED_START_OFFSET_FIELD = "start_offset"; static final String CHUNKED_END_OFFSET_FIELD = "end_offset"; static final String MODEL_SETTINGS_FIELD = "model_settings"; - static final String TASK_TYPE_FIELD = "task_type"; - static final String DIMENSIONS_FIELD = "dimensions"; - static final String SIMILARITY_FIELD = "similarity"; - static final String ELEMENT_TYPE_FIELD = "element_type"; - public record InferenceResult(String inferenceId, ModelSettings modelSettings, Map> chunks) {} + public record InferenceResult(String inferenceId, MinimalServiceSettings modelSettings, Map> chunks) {} public record Chunk(@Nullable String text, int startOffset, int endOffset, BytesReference rawEmbeddings) {} public record Offset(String sourceFieldName, int startOffset, int endOffset) {} - public record ModelSettings( - TaskType taskType, - Integer dimensions, - SimilarityMeasure similarity, - DenseVectorFieldMapper.ElementType elementType - ) implements ToXContentObject { - public ModelSettings(Model model) { - this( - model.getTaskType(), - model.getServiceSettings().dimensions(), - model.getServiceSettings().similarity(), - model.getServiceSettings().elementType() - ); - } - - public ModelSettings( - TaskType taskType, - Integer dimensions, - SimilarityMeasure similarity, - DenseVectorFieldMapper.ElementType elementType - ) { - this.taskType = Objects.requireNonNull(taskType, "task type must not be null"); - this.dimensions = dimensions; - this.similarity = similarity; - this.elementType = elementType; - validate(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(TASK_TYPE_FIELD, taskType.toString()); - if (dimensions != null) { - builder.field(DIMENSIONS_FIELD, dimensions); - } - if (similarity != null) { - builder.field(SIMILARITY_FIELD, similarity); - } - if (elementType != null) { - builder.field(ELEMENT_TYPE_FIELD, elementType); - } - return builder.endObject(); - } - - @Override - public String toString() { - final StringBuilder sb = new StringBuilder(); - sb.append("task_type=").append(taskType); - if (dimensions != null) { - sb.append(", dimensions=").append(dimensions); - } - if (similarity != null) { - sb.append(", similarity=").append(similarity); - } - if (elementType != null) { - sb.append(", element_type=").append(elementType); - } - return sb.toString(); - } - - private void validate() { - switch (taskType) { - case TEXT_EMBEDDING: - validateFieldPresent(DIMENSIONS_FIELD, dimensions); - validateFieldPresent(SIMILARITY_FIELD, similarity); - validateFieldPresent(ELEMENT_TYPE_FIELD, elementType); - break; - case SPARSE_EMBEDDING: - validateFieldNotPresent(DIMENSIONS_FIELD, dimensions); - validateFieldNotPresent(SIMILARITY_FIELD, similarity); - validateFieldNotPresent(ELEMENT_TYPE_FIELD, elementType); - break; - - default: - throw new IllegalArgumentException( - "Wrong [" - + TASK_TYPE_FIELD - + "], expected " - + TEXT_EMBEDDING - + " or " - + SPARSE_EMBEDDING - + ", got " - + taskType.name() - ); - } - } - - private void validateFieldPresent(String field, Object fieldValue) { - if (fieldValue == null) { - throw new IllegalArgumentException("required [" + field + "] field is missing for task_type [" + taskType.name() + "]"); - } - } - - private void validateFieldNotPresent(String field, Object fieldValue) { - if (fieldValue != null) { - throw new IllegalArgumentException("[" + field + "] is not allowed for task_type [" + taskType.name() + "]"); - } - } - } - public static String getOriginalTextFieldName(String fieldName) { return fieldName + "." + TEXT_FIELD; } @@ -212,7 +102,7 @@ static SemanticTextField parse(XContentParser parser, ParserContext context) thr return SEMANTIC_TEXT_FIELD_PARSER.parse(parser, context); } - static ModelSettings parseModelSettingsFromMap(Object node) { + static MinimalServiceSettings parseModelSettingsFromMap(Object node) { if (node == null) { return null; } @@ -224,7 +114,7 @@ static ModelSettings parseModelSettingsFromMap(Object node) { map, XContentType.JSON ); - return MODEL_SETTINGS_PARSER.parse(parser, null); + return MinimalServiceSettings.parse(parser); } catch (Exception exc) { throw new ElasticsearchException(exc); } @@ -307,7 +197,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws private static final ConstructingObjectParser INFERENCE_RESULT_PARSER = new ConstructingObjectParser<>( INFERENCE_FIELD, true, - args -> new InferenceResult((String) args[0], (ModelSettings) args[1], (Map>) args[2]) + args -> new InferenceResult((String) args[0], (MinimalServiceSettings) args[1], (Map>) args[2]) ); private static final ConstructingObjectParser CHUNKS_PARSER = new ConstructingObjectParser<>( @@ -322,20 +212,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } ); - private static final ConstructingObjectParser MODEL_SETTINGS_PARSER = new ConstructingObjectParser<>( - MODEL_SETTINGS_FIELD, - true, - args -> { - TaskType taskType = TaskType.fromString((String) args[0]); - Integer dimensions = (Integer) args[1]; - SimilarityMeasure similarity = args[2] == null ? null : SimilarityMeasure.fromString((String) args[2]); - DenseVectorFieldMapper.ElementType elementType = args[3] == null - ? null - : DenseVectorFieldMapper.ElementType.fromString((String) args[3]); - return new ModelSettings(taskType, dimensions, similarity, elementType); - } - ); - static { SEMANTIC_TEXT_FIELD_PARSER.declareStringArray(optionalConstructorArg(), new ParseField(TEXT_FIELD)); SEMANTIC_TEXT_FIELD_PARSER.declareObject(constructorArg(), INFERENCE_RESULT_PARSER, new ParseField(INFERENCE_FIELD)); @@ -343,7 +219,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws INFERENCE_RESULT_PARSER.declareString(constructorArg(), new ParseField(INFERENCE_ID_FIELD)); INFERENCE_RESULT_PARSER.declareObjectOrNull( constructorArg(), - (p, c) -> MODEL_SETTINGS_PARSER.parse(p, null), + (p, c) -> MinimalServiceSettings.parse(p), null, new ParseField(MODEL_SETTINGS_FIELD) ); @@ -362,11 +238,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws b.copyCurrentStructure(p); return BytesReference.bytes(b); }, new ParseField(CHUNKED_EMBEDDINGS_FIELD), ObjectParser.ValueType.OBJECT_ARRAY); - - MODEL_SETTINGS_PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField(TASK_TYPE_FIELD)); - MODEL_SETTINGS_PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), new ParseField(DIMENSIONS_FIELD)); - MODEL_SETTINGS_PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField(SIMILARITY_FIELD)); - MODEL_SETTINGS_PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField(ELEMENT_TYPE_FIELD)); } private static Map> parseChunksMap(XContentParser parser, ParserContext context) throws IOException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index b00e37ba612ca..1acdff7a751ae 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -59,6 +59,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.search.fetch.StoredFieldsSpec; import org.elasticsearch.search.lookup.Source; @@ -87,6 +88,8 @@ import java.util.function.BiConsumer; import java.util.function.Function; +import static org.elasticsearch.inference.TaskType.SPARSE_EMBEDDING; +import static org.elasticsearch.inference.TaskType.TEXT_EMBEDDING; import static org.elasticsearch.search.SearchService.DEFAULT_SIZE; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_EMBEDDINGS_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_OFFSET_FIELD; @@ -160,7 +163,7 @@ public static class Builder extends FieldMapper.Builder { } }); - private final Parameter modelSettings = new Parameter<>( + private final Parameter modelSettings = new Parameter<>( MODEL_SETTINGS_FIELD, true, () -> null, @@ -207,7 +210,7 @@ public Builder setSearchInferenceId(String id) { return this; } - public Builder setModelSettings(SemanticTextField.ModelSettings value) { + public Builder setModelSettings(MinimalServiceSettings value) { this.modelSettings.setValue(value); return this; } @@ -238,6 +241,9 @@ public SemanticTextFieldMapper build(MapperBuilderContext context) { if (useLegacyFormat && multiFieldsBuilder.hasMultiFields()) { throw new IllegalArgumentException(CONTENT_TYPE + " field [" + leafName() + "] does not support multi-fields"); } + if (modelSettings.get() != null) { + validateServiceSettings(modelSettings.get()); + } final String fullName = context.buildFullName(leafName()); if (context.isInNestedContext()) { @@ -261,9 +267,26 @@ public SemanticTextFieldMapper build(MapperBuilderContext context) { ); } + private void validateServiceSettings(MinimalServiceSettings settings) { + switch (settings.taskType()) { + case SPARSE_EMBEDDING, TEXT_EMBEDDING -> { + } + default -> throw new IllegalArgumentException( + "Wrong [" + + MinimalServiceSettings.TASK_TYPE_FIELD + + "], expected " + + TEXT_EMBEDDING + + " or " + + SPARSE_EMBEDDING + + ", got " + + settings.taskType().name() + ); + } + } + /** * As necessary, copy settings from this builder to the passed-in mapper. - * Used to preserve {@link SemanticTextField.ModelSettings} when updating a semantic text mapping to one where the model settings + * Used to preserve {@link MinimalServiceSettings} when updating a semantic text mapping to one where the model settings * are not specified. * * @param mapper The mapper @@ -522,7 +545,7 @@ protected void doValidate(MappingLookup mappers) { public static class SemanticTextFieldType extends SimpleMappedFieldType { private final String inferenceId; private final String searchInferenceId; - private final SemanticTextField.ModelSettings modelSettings; + private final MinimalServiceSettings modelSettings; private final ObjectMapper inferenceField; private final boolean useLegacyFormat; @@ -530,7 +553,7 @@ public SemanticTextFieldType( String name, String inferenceId, String searchInferenceId, - SemanticTextField.ModelSettings modelSettings, + MinimalServiceSettings modelSettings, ObjectMapper inferenceField, boolean useLegacyFormat, Map meta @@ -565,7 +588,7 @@ public String getSearchInferenceId() { return searchInferenceId == null ? inferenceId : searchInferenceId; } - public SemanticTextField.ModelSettings getModelSettings() { + public MinimalServiceSettings getModelSettings() { return modelSettings; } @@ -879,7 +902,7 @@ private static ObjectMapper createInferenceField( MapperBuilderContext context, IndexVersion indexVersionCreated, boolean useLegacyFormat, - @Nullable SemanticTextField.ModelSettings modelSettings, + @Nullable MinimalServiceSettings modelSettings, Function bitSetProducer, IndexSettings indexSettings ) { @@ -891,7 +914,7 @@ private static ObjectMapper createInferenceField( private static NestedObjectMapper.Builder createChunksField( IndexVersion indexVersionCreated, boolean useLegacyFormat, - @Nullable SemanticTextField.ModelSettings modelSettings, + @Nullable MinimalServiceSettings modelSettings, Function bitSetProducer, IndexSettings indexSettings ) { @@ -916,7 +939,7 @@ private static NestedObjectMapper.Builder createChunksField( private static Mapper.Builder createEmbeddingsField( IndexVersion indexVersionCreated, - SemanticTextField.ModelSettings modelSettings, + MinimalServiceSettings modelSettings, boolean useLegacyFormat ) { return switch (modelSettings.taskType()) { @@ -947,11 +970,7 @@ private static Mapper.Builder createEmbeddingsField( }; } - private static boolean canMergeModelSettings( - SemanticTextField.ModelSettings previous, - SemanticTextField.ModelSettings current, - Conflicts conflicts - ) { + private static boolean canMergeModelSettings(MinimalServiceSettings previous, MinimalServiceSettings current, Conflicts conflicts) { if (Objects.equals(previous, current)) { return true; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 4506a05d58054..1369ebf7dd87b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; @@ -56,6 +57,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -103,33 +105,33 @@ public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) private static final Logger logger = LogManager.getLogger(ModelRegistry.class); private final OriginSettingClient client; - private final List defaultConfigIds; + private final Map defaultConfigIds; private final Set preventDeletionLock = Collections.newSetFromMap(new ConcurrentHashMap<>()); public ModelRegistry(Client client) { this.client = new OriginSettingClient(client, ClientHelper.INFERENCE_ORIGIN); - defaultConfigIds = new ArrayList<>(); + defaultConfigIds = new HashMap<>(); } /** * Set the default inference ids provided by the services - * @param defaultConfigIds The defaults + * @param defaultConfigId The default */ - public void addDefaultIds(InferenceService.DefaultConfigId defaultConfigIds) { - var matched = idMatchedDefault(defaultConfigIds.inferenceId(), this.defaultConfigIds); - if (matched.isPresent()) { + public synchronized void addDefaultIds(InferenceService.DefaultConfigId defaultConfigId) { + var config = defaultConfigIds.get(defaultConfigId.inferenceId()); + if (config != null) { throw new IllegalStateException( "Cannot add default endpoint to the inference endpoint registry with duplicate inference id [" - + defaultConfigIds.inferenceId() + + defaultConfigId.inferenceId() + "] declared by service [" - + defaultConfigIds.service().name() + + defaultConfigId.service().name() + "]. The inference Id is already use by [" - + matched.get().service().name() + + config.service().name() + "] service." ); } - this.defaultConfigIds.add(defaultConfigIds); + defaultConfigIds.put(defaultConfigId.inferenceId(), defaultConfigId); } /** @@ -141,9 +143,9 @@ public void getModelWithSecrets(String inferenceEntityId, ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // There should be a hit for the configurations if (searchResponse.getHits().getHits().length == 0) { - var maybeDefault = idMatchedDefault(inferenceEntityId, defaultConfigIds); - if (maybeDefault.isPresent()) { - getDefaultConfig(true, maybeDefault.get(), listener); + var maybeDefault = defaultConfigIds.get(inferenceEntityId); + if (maybeDefault != null) { + getDefaultConfig(true, maybeDefault, listener); } else { delegate.onFailure(inferenceNotFoundException(inferenceEntityId)); } @@ -172,9 +174,9 @@ public void getModel(String inferenceEntityId, ActionListener lis ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // There should be a hit for the configurations if (searchResponse.getHits().getHits().length == 0) { - var maybeDefault = idMatchedDefault(inferenceEntityId, defaultConfigIds); - if (maybeDefault.isPresent()) { - getDefaultConfig(true, maybeDefault.get(), listener); + var maybeDefault = defaultConfigIds.get(inferenceEntityId); + if (maybeDefault != null) { + getDefaultConfig(true, maybeDefault, listener); } else { delegate.onFailure(inferenceNotFoundException(inferenceEntityId)); } @@ -196,6 +198,27 @@ public void getModel(String inferenceEntityId, ActionListener lis client.search(modelSearch, searchListener); } + /** + * Retrieves the {@link MinimalServiceSettings} associated with the specified {@code inferenceEntityId}. + * + * If the {@code inferenceEntityId} is not found, the method behaves as follows: + *
    + *
  • Returns {@code null} if the id might exist but its configuration is not available locally.
  • + *
  • Throws a {@link ResourceNotFoundException} if it is certain that the id does not exist in the cluster.
  • + *
+ * + * @param inferenceEntityId the unique identifier for the inference entity. + * @return the {@link MinimalServiceSettings} associated with the provided ID, or {@code null} if unavailable locally. + * @throws ResourceNotFoundException if the specified id is guaranteed to not exist in the cluster. + */ + public MinimalServiceSettings getMinimalServiceSettings(String inferenceEntityId) throws ResourceNotFoundException { + var config = defaultConfigIds.get(inferenceEntityId); + if (config != null) { + return config.settings(); + } + return null; + } + private ResourceNotFoundException inferenceNotFoundException(String inferenceEntityId) { return new ResourceNotFoundException("Inference endpoint not found [{}]", inferenceEntityId); } @@ -209,7 +232,7 @@ private ResourceNotFoundException inferenceNotFoundException(String inferenceEnt public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistry::unparsedModelFromMap).toList(); - var defaultConfigsForTaskType = taskTypeMatchedDefaults(taskType, defaultConfigIds); + var defaultConfigsForTaskType = taskTypeMatchedDefaults(taskType, defaultConfigIds.values()); addAllDefaultConfigsIfMissing(true, modelConfigs, defaultConfigsForTaskType, delegate); }); @@ -240,7 +263,7 @@ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { var foundConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistry::unparsedModelFromMap).toList(); - addAllDefaultConfigsIfMissing(persistDefaultEndpoints, foundConfigs, defaultConfigIds, delegate); + addAllDefaultConfigsIfMissing(persistDefaultEndpoints, foundConfigs, defaultConfigIds.values(), delegate); }); // In theory the index should only contain model config documents @@ -261,7 +284,7 @@ public void getAllModels(boolean persistDefaultEndpoints, ActionListener foundConfigs, - List matchedDefaults, + Collection matchedDefaults, ActionListener> listener ) { var foundIds = foundConfigs.stream().map(UnparsedModel::inferenceEntityId).collect(Collectors.toSet()); @@ -671,10 +694,10 @@ static Optional idMatchedDefault( static List taskTypeMatchedDefaults( TaskType taskType, - List defaultConfigIds + Collection defaultConfigIds ) { return defaultConfigIds.stream() - .filter(defaultConfigId -> defaultConfigId.taskType().equals(taskType)) + .filter(defaultConfigId -> defaultConfigId.settings().taskType().equals(taskType)) .collect(Collectors.toList()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 9807a079140db..9dfa21a323c33 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -25,6 +25,7 @@ import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.SettingsConfiguration; @@ -828,9 +829,9 @@ private RankedDocsResults textSimilarityResultsToRankedDocs( public List defaultConfigIds() { return List.of( - new DefaultConfigId(DEFAULT_ELSER_ID, TaskType.SPARSE_EMBEDDING, this), - new DefaultConfigId(DEFAULT_E5_ID, TaskType.TEXT_EMBEDDING, this), - new DefaultConfigId(DEFAULT_RERANK_ID, TaskType.RERANK, this) + new DefaultConfigId(DEFAULT_ELSER_ID, ElserInternalServiceSettings.minimalServiceSettings(), this), + new DefaultConfigId(DEFAULT_E5_ID, MultilingualE5SmallInternalServiceSettings.minimalServiceSettings(), this), + new DefaultConfigId(DEFAULT_RERANK_ID, MinimalServiceSettings.rerank(), this) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java index f7bcd95c8bd28..da9164bf3f288 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java @@ -11,6 +11,7 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import java.io.IOException; @@ -21,6 +22,10 @@ public class ElserInternalServiceSettings extends ElasticsearchInternalServiceSe public static final String NAME = "elser_mlnode_service_settings"; + public static MinimalServiceSettings minimalServiceSettings() { + return MinimalServiceSettings.sparseEmbedding(); + } + public static Builder fromRequestMap(Map map) { ValidationException validationException = new ValidationException(); var baseSettings = ElasticsearchInternalServiceSettings.fromMap(map, validationException); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java index 2f27fa073b4f0..317cc48172fca 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; @@ -24,6 +25,10 @@ public class MultilingualE5SmallInternalServiceSettings extends ElasticsearchInt static final int DIMENSIONS = 384; static final SimilarityMeasure SIMILARITY = SimilarityMeasure.COSINE; + public static MinimalServiceSettings minimalServiceSettings() { + return MinimalServiceSettings.textEmbedding(DIMENSIONS, SIMILARITY, DenseVectorFieldMapper.ElementType.FLOAT); + } + public MultilingualE5SmallInternalServiceSettings(ElasticsearchInternalServiceSettings other) { super(other); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index e5084b253d7e9..e837e1b0db989 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -54,6 +54,7 @@ import org.elasticsearch.index.mapper.vectors.XFeatureField; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.search.ESToParentBlockJoinQuery; +import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; @@ -297,6 +298,28 @@ public void testInvalidInferenceEndpoints() { } } + public void testInvalidTaskTypes() { + for (var taskType : TaskType.values()) { + if (taskType == TaskType.TEXT_EMBEDDING || taskType == TaskType.SPARSE_EMBEDDING) { + continue; + } + Exception e = expectThrows( + MapperParsingException.class, + () -> createMapperService( + fieldMapping( + b -> b.field("type", "semantic_text") + .field(INFERENCE_ID_FIELD, "test1") + .startObject("model_settings") + .field("task_type", taskType) + .endObject() + ), + useLegacyFormat + ) + ); + assertThat(e.getMessage(), containsString("Failed to parse mapping: Wrong [task_type]")); + } + } + public void testMultiFieldsSupport() throws IOException { if (useLegacyFormat) { Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> { @@ -397,7 +420,7 @@ public void testDynamicUpdate() throws IOException { MapperService mapperService = mapperServiceForFieldWithModelSettings( fieldName, inferenceId, - new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null) + new MinimalServiceSettings(TaskType.SPARSE_EMBEDDING, null, null, null) ); assertSemanticTextField(mapperService, fieldName, true); assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId); @@ -408,7 +431,7 @@ public void testDynamicUpdate() throws IOException { fieldName, inferenceId, searchInferenceId, - new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null) + new MinimalServiceSettings(TaskType.SPARSE_EMBEDDING, null, null, null) ); assertSemanticTextField(mapperService, fieldName, true); assertInferenceEndpoints(mapperService, fieldName, inferenceId, searchInferenceId); @@ -528,7 +551,7 @@ public void testUpdateSearchInferenceId() throws IOException { mapperService = mapperServiceForFieldWithModelSettings( fieldName, inferenceId, - new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null) + new MinimalServiceSettings(TaskType.SPARSE_EMBEDDING, null, null, null) ); assertSemanticTextField(mapperService, fieldName, true); assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId); @@ -747,7 +770,7 @@ public void testMissingInferenceId() throws IOException { useLegacyFormat, b -> b.startObject("field") .startObject(INFERENCE_FIELD) - .field(MODEL_SETTINGS_FIELD, new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null)) + .field(MODEL_SETTINGS_FIELD, new MinimalServiceSettings(TaskType.SPARSE_EMBEDDING, null, null, null)) .field(CHUNKS_FIELD, useLegacyFormat ? List.of() : Map.of()) .endObject() .endObject() @@ -809,24 +832,14 @@ public void testDenseVectorElementType() throws IOException { MapperService floatMapperService = mapperServiceForFieldWithModelSettings( fieldName, inferenceId, - new SemanticTextField.ModelSettings( - TaskType.TEXT_EMBEDDING, - 1024, - SimilarityMeasure.COSINE, - DenseVectorFieldMapper.ElementType.FLOAT - ) + new MinimalServiceSettings(TaskType.TEXT_EMBEDDING, 1024, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT) ); assertMapperService.accept(floatMapperService, DenseVectorFieldMapper.ElementType.FLOAT); MapperService byteMapperService = mapperServiceForFieldWithModelSettings( fieldName, inferenceId, - new SemanticTextField.ModelSettings( - TaskType.TEXT_EMBEDDING, - 1024, - SimilarityMeasure.COSINE, - DenseVectorFieldMapper.ElementType.BYTE - ) + new MinimalServiceSettings(TaskType.TEXT_EMBEDDING, 1024, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.BYTE) ); assertMapperService.accept(byteMapperService, DenseVectorFieldMapper.ElementType.BYTE); } @@ -860,11 +873,8 @@ public void testModelSettingsRequiredWithChunks() throws IOException { assertThat(ex.getMessage(), containsString("[model_settings] must be set for field [field] when chunks are provided")); } - private MapperService mapperServiceForFieldWithModelSettings( - String fieldName, - String inferenceId, - SemanticTextField.ModelSettings modelSettings - ) throws IOException { + private MapperService mapperServiceForFieldWithModelSettings(String fieldName, String inferenceId, MinimalServiceSettings modelSettings) + throws IOException { return mapperServiceForFieldWithModelSettings(fieldName, inferenceId, null, modelSettings); } @@ -872,7 +882,7 @@ private MapperService mapperServiceForFieldWithModelSettings( String fieldName, String inferenceId, String searchInferenceId, - SemanticTextField.ModelSettings modelSettings + MinimalServiceSettings modelSettings ) throws IOException { String mappingParams = "type=semantic_text,inference_id=" + inferenceId; if (searchInferenceId != null) { @@ -919,7 +929,7 @@ public void testExistsQuerySparseVector() throws IOException { MapperService mapperService = mapperServiceForFieldWithModelSettings( fieldName, inferenceId, - new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null) + new MinimalServiceSettings(TaskType.SPARSE_EMBEDDING, null, null, null) ); Mapper mapper = mapperService.mappingLookup().getMapper(fieldName); @@ -936,12 +946,7 @@ public void testExistsQueryDenseVector() throws IOException { MapperService mapperService = mapperServiceForFieldWithModelSettings( fieldName, inferenceId, - new SemanticTextField.ModelSettings( - TaskType.TEXT_EMBEDDING, - 1024, - SimilarityMeasure.COSINE, - DenseVectorFieldMapper.ElementType.FLOAT - ) + new MinimalServiceSettings(TaskType.TEXT_EMBEDDING, 1024, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT) ); Mapper mapper = mapperService.mappingLookup().getMapper(fieldName); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java index 6a25ed506c2a3..404713581eddd 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ChunkedInference; +import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; @@ -66,7 +67,7 @@ protected void assertEqualInstances(SemanticTextField expectedInstance, Semantic assertThat(newInstance.originalValues(), equalTo(expectedInstance.originalValues())); assertThat(newInstance.inference().modelSettings(), equalTo(expectedInstance.inference().modelSettings())); assertThat(newInstance.inference().chunks().size(), equalTo(expectedInstance.inference().chunks().size())); - SemanticTextField.ModelSettings modelSettings = newInstance.inference().modelSettings(); + MinimalServiceSettings modelSettings = newInstance.inference().modelSettings(); for (var entry : newInstance.inference().chunks().entrySet()) { var expectedChunks = expectedInstance.inference().chunks().get(entry.getKey()); assertNotNull(expectedChunks); @@ -133,53 +134,37 @@ protected boolean supportsUnknownFields() { public void testModelSettingsValidation() { NullPointerException npe = expectThrows(NullPointerException.class, () -> { - new SemanticTextField.ModelSettings(null, 10, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT); + new MinimalServiceSettings(null, 10, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT); }); assertThat(npe.getMessage(), equalTo("task type must not be null")); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> { - new SemanticTextField.ModelSettings( - TaskType.COMPLETION, - 10, - SimilarityMeasure.COSINE, - DenseVectorFieldMapper.ElementType.FLOAT - ); + new MinimalServiceSettings(TaskType.SPARSE_EMBEDDING, 10, null, null); }); - assertThat(ex.getMessage(), containsString("Wrong [task_type]")); - - ex = expectThrows( - IllegalArgumentException.class, - () -> { new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, 10, null, null); } - ); assertThat(ex.getMessage(), containsString("[dimensions] is not allowed")); ex = expectThrows(IllegalArgumentException.class, () -> { - new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, SimilarityMeasure.COSINE, null); + new MinimalServiceSettings(TaskType.SPARSE_EMBEDDING, null, SimilarityMeasure.COSINE, null); }); assertThat(ex.getMessage(), containsString("[similarity] is not allowed")); ex = expectThrows(IllegalArgumentException.class, () -> { - new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, DenseVectorFieldMapper.ElementType.FLOAT); + new MinimalServiceSettings(TaskType.SPARSE_EMBEDDING, null, null, DenseVectorFieldMapper.ElementType.FLOAT); }); assertThat(ex.getMessage(), containsString("[element_type] is not allowed")); ex = expectThrows(IllegalArgumentException.class, () -> { - new SemanticTextField.ModelSettings( - TaskType.TEXT_EMBEDDING, - null, - SimilarityMeasure.COSINE, - DenseVectorFieldMapper.ElementType.FLOAT - ); + new MinimalServiceSettings(TaskType.TEXT_EMBEDDING, null, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT); }); assertThat(ex.getMessage(), containsString("required [dimensions] field is missing")); ex = expectThrows(IllegalArgumentException.class, () -> { - new SemanticTextField.ModelSettings(TaskType.TEXT_EMBEDDING, 10, null, DenseVectorFieldMapper.ElementType.FLOAT); + new MinimalServiceSettings(TaskType.TEXT_EMBEDDING, 10, null, DenseVectorFieldMapper.ElementType.FLOAT); }); assertThat(ex.getMessage(), containsString("required [similarity] field is missing")); ex = expectThrows(IllegalArgumentException.class, () -> { - new SemanticTextField.ModelSettings(TaskType.TEXT_EMBEDDING, 10, SimilarityMeasure.COSINE, null); + new MinimalServiceSettings(TaskType.TEXT_EMBEDDING, 10, SimilarityMeasure.COSINE, null); }); assertThat(ex.getMessage(), containsString("required [element_type] field is missing")); } @@ -285,7 +270,7 @@ public static SemanticTextField semanticTextFieldFromChunkedInferenceResults( useLegacyFormat ? inputs : null, new SemanticTextField.InferenceResult( model.getInferenceEntityId(), - new SemanticTextField.ModelSettings(model), + new MinimalServiceSettings(model), Map.of(fieldName, chunks) ), contentType diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java index d5042643013e6..9a3b4eff1958a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.search.ESToParentBlockJoinQuery; import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.plugins.Plugin; @@ -351,10 +352,10 @@ private static SourceToParse buildSemanticTextFieldWithInferenceResults( DenseVectorFieldMapper.ElementType denseVectorElementType, boolean useLegacyFormat ) throws IOException { - SemanticTextField.ModelSettings modelSettings = switch (inferenceResultType) { + var modelSettings = switch (inferenceResultType) { case NONE -> null; - case SPARSE_EMBEDDING -> new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null); - case TEXT_EMBEDDING -> new SemanticTextField.ModelSettings( + case SPARSE_EMBEDDING -> new MinimalServiceSettings(TaskType.SPARSE_EMBEDDING, null, null, null); + case TEXT_EMBEDDING -> new MinimalServiceSettings( TaskType.TEXT_EMBEDDING, TEXT_EMBEDDING_DIMENSION_COUNT, SimilarityMeasure.COSINE, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index 6f033fdfd2f22..1e47a9b8d5ab6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -22,7 +22,10 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.MinimalServiceSettings; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.search.SearchHit; @@ -294,8 +297,12 @@ public void testStoreModel_ThrowsException_WhenFailureIsNotAVersionConflict() { public void testIdMatchedDefault() { var defaultConfigIds = new ArrayList(); - defaultConfigIds.add(new InferenceService.DefaultConfigId("foo", TaskType.SPARSE_EMBEDDING, mock(InferenceService.class))); - defaultConfigIds.add(new InferenceService.DefaultConfigId("bar", TaskType.SPARSE_EMBEDDING, mock(InferenceService.class))); + defaultConfigIds.add( + new InferenceService.DefaultConfigId("foo", MinimalServiceSettings.sparseEmbedding(), mock(InferenceService.class)) + ); + defaultConfigIds.add( + new InferenceService.DefaultConfigId("bar", MinimalServiceSettings.sparseEmbedding(), mock(InferenceService.class)) + ); var matched = ModelRegistry.idMatchedDefault("bar", defaultConfigIds); assertEquals(defaultConfigIds.get(1), matched.get()); @@ -305,10 +312,20 @@ public void testIdMatchedDefault() { public void testTaskTypeMatchedDefaults() { var defaultConfigIds = new ArrayList(); - defaultConfigIds.add(new InferenceService.DefaultConfigId("s1", TaskType.SPARSE_EMBEDDING, mock(InferenceService.class))); - defaultConfigIds.add(new InferenceService.DefaultConfigId("s2", TaskType.SPARSE_EMBEDDING, mock(InferenceService.class))); - defaultConfigIds.add(new InferenceService.DefaultConfigId("d1", TaskType.TEXT_EMBEDDING, mock(InferenceService.class))); - defaultConfigIds.add(new InferenceService.DefaultConfigId("c1", TaskType.COMPLETION, mock(InferenceService.class))); + defaultConfigIds.add( + new InferenceService.DefaultConfigId("s1", MinimalServiceSettings.sparseEmbedding(), mock(InferenceService.class)) + ); + defaultConfigIds.add( + new InferenceService.DefaultConfigId("s2", MinimalServiceSettings.sparseEmbedding(), mock(InferenceService.class)) + ); + defaultConfigIds.add( + new InferenceService.DefaultConfigId( + "d1", + MinimalServiceSettings.textEmbedding(384, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT), + mock(InferenceService.class) + ) + ); + defaultConfigIds.add(new InferenceService.DefaultConfigId("c1", MinimalServiceSettings.completion(), mock(InferenceService.class))); var matched = ModelRegistry.taskTypeMatchedDefaults(TaskType.SPARSE_EMBEDDING, defaultConfigIds); assertThat(matched, contains(defaultConfigIds.get(0), defaultConfigIds.get(1))); @@ -328,10 +345,10 @@ public void testDuplicateDefaultIds() { var mockServiceB = mock(InferenceService.class); when(mockServiceB.name()).thenReturn("service-b"); - registry.addDefaultIds(new InferenceService.DefaultConfigId(id, randomFrom(TaskType.values()), mockServiceA)); + registry.addDefaultIds(new InferenceService.DefaultConfigId(id, randomMinimalServiceSettings(), mockServiceA)); var ise = expectThrows( IllegalStateException.class, - () -> registry.addDefaultIds(new InferenceService.DefaultConfigId(id, randomFrom(TaskType.values()), mockServiceB)) + () -> registry.addDefaultIds(new InferenceService.DefaultConfigId(id, randomMinimalServiceSettings(), mockServiceB)) ); assertThat( ise.getMessage(), @@ -385,4 +402,16 @@ private static SearchResponse mockSearchResponse(SearchHit[] hits) { return searchResponse; } + + public static MinimalServiceSettings randomMinimalServiceSettings() { + TaskType type = randomFrom(TaskType.values()); + if (type == TaskType.TEXT_EMBEDDING) { + return MinimalServiceSettings.textEmbedding( + randomIntBetween(2, 384), + randomFrom(SimilarityMeasure.values()), + randomFrom(DenseVectorFieldMapper.ElementType.values()) + ); + } + return new MinimalServiceSettings(type, null, null, null); + } } From 241b998c7f031327a8303d7a41c5443e2ab95f0b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 05:07:18 +1100 Subject: [PATCH 081/383] Mute org.elasticsearch.xpack.esql.qa.multi_node.RestEsqlIT org.elasticsearch.xpack.esql.qa.multi_node.RestEsqlIT #120948 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 986ddff789732..3f03fbb6e3bbd 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -272,6 +272,8 @@ tests: - class: org.elasticsearch.packaging.test.DockerTests method: test070BindMountCustomPathConfAndJvmOptions issue: https://github.com/elastic/elasticsearch/issues/120910 +- class: org.elasticsearch.xpack.esql.qa.multi_node.RestEsqlIT + issue: https://github.com/elastic/elasticsearch/issues/120948 # Examples: # From 570efa2babcb5ee955a72e0a81acf3cc75172483 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Mon, 27 Jan 2025 19:11:55 +0100 Subject: [PATCH 082/383] Revert "Add http stream content size handler (#120246)" (#120934) This reverts commit 43e3e24d075332ed8c697113236ee4126fd56603. --- .../Netty4IncrementalRequestHandlingIT.java | 94 ++++--- .../http/netty4/Netty4HttpAggregator.java | 41 ++- .../netty4/Netty4HttpContentSizeHandler.java | 171 ------------- .../netty4/Netty4HttpServerTransport.java | 3 +- .../Netty4HttpContentSizeHandlerTests.java | 238 ------------------ 5 files changed, 81 insertions(+), 466 deletions(-) delete mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandler.java delete mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandlerTests.java diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java index 0158384b47aa4..d825ec0a83f53 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java @@ -54,6 +54,7 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.http.HttpBodyTracer; +import org.elasticsearch.http.HttpHandlingSettings; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.plugins.ActionPlugin; @@ -92,15 +93,10 @@ @ESIntegTestCase.ClusterScope(numDataNodes = 1) public class Netty4IncrementalRequestHandlingIT extends ESNetty4IntegTestCase { - private static final int MAX_CONTENT_LENGTH = ByteSizeUnit.MB.toIntBytes(50); - @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); - builder.put( - HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH.getKey(), - ByteSizeValue.of(MAX_CONTENT_LENGTH, ByteSizeUnit.BYTES) - ); + builder.put(HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH.getKey(), ByteSizeValue.of(50, ByteSizeUnit.MB)); return builder.build(); } @@ -139,7 +135,7 @@ public void testReceiveAllChunks() throws Exception { var opaqueId = opaqueId(reqNo); // this dataset will be compared with one on server side - var dataSize = randomIntBetween(1024, MAX_CONTENT_LENGTH); + var dataSize = randomIntBetween(1024, maxContentLength()); var sendData = Unpooled.wrappedBuffer(randomByteArrayOfLength(dataSize)); sendData.retain(); ctx.clientChannel.writeAndFlush(fullHttpRequest(opaqueId, sendData)); @@ -247,7 +243,7 @@ public void testServerExceptionMidStream() throws Exception { public void testClientBackpressure() throws Exception { try (var ctx = setupClientCtx()) { var opaqueId = opaqueId(0); - var payloadSize = MAX_CONTENT_LENGTH; + var payloadSize = maxContentLength(); var totalParts = 10; var partSize = payloadSize / totalParts; ctx.clientChannel.writeAndFlush(httpRequest(opaqueId, payloadSize)); @@ -289,7 +285,7 @@ public void test100Continue() throws Exception { try (var ctx = setupClientCtx()) { for (int reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var acceptableContentLength = randomIntBetween(0, MAX_CONTENT_LENGTH); + var acceptableContentLength = randomIntBetween(0, maxContentLength()); // send request header and await 100-continue var req = httpRequest(id, acceptableContentLength); @@ -321,7 +317,7 @@ public void test413TooLargeOnExpect100Continue() throws Exception { try (var ctx = setupClientCtx()) { for (int reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var oversized = MAX_CONTENT_LENGTH + 1; + var oversized = maxContentLength() + 1; // send request header and await 413 too large var req = httpRequest(id, oversized); @@ -337,28 +333,32 @@ public void test413TooLargeOnExpect100Continue() throws Exception { } } - // ensures that oversized chunked encoded request has maxContentLength limit and returns 413 - public void testOversizedChunkedEncoding() throws Exception { + // ensures that oversized chunked encoded request has no limits at http layer + // rest handler is responsible for oversized requests + public void testOversizedChunkedEncodingNoLimits() throws Exception { try (var ctx = setupClientCtx()) { - var id = opaqueId(0); - var contentSize = MAX_CONTENT_LENGTH + 1; - var content = randomByteArrayOfLength(contentSize); - var is = new ByteBufInputStream(Unpooled.wrappedBuffer(content)); - var chunkedIs = new ChunkedStream(is); - var httpChunkedIs = new HttpChunkedInput(chunkedIs, LastHttpContent.EMPTY_LAST_CONTENT); - var req = httpRequest(id, 0); - HttpUtil.setTransferEncodingChunked(req, true); - - ctx.clientChannel.pipeline().addLast(new ChunkedWriteHandler()); - ctx.clientChannel.writeAndFlush(req); - ctx.clientChannel.writeAndFlush(httpChunkedIs); - var handler = ctx.awaitRestChannelAccepted(id); - var consumed = handler.readAllBytes(); - assertTrue(consumed <= MAX_CONTENT_LENGTH); - - var resp = (FullHttpResponse) safePoll(ctx.clientRespQueue); - assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); - resp.release(); + for (var reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { + var id = opaqueId(reqNo); + var contentSize = maxContentLength() + 1; + var content = randomByteArrayOfLength(contentSize); + var is = new ByteBufInputStream(Unpooled.wrappedBuffer(content)); + var chunkedIs = new ChunkedStream(is); + var httpChunkedIs = new HttpChunkedInput(chunkedIs, LastHttpContent.EMPTY_LAST_CONTENT); + var req = httpRequest(id, 0); + HttpUtil.setTransferEncodingChunked(req, true); + + ctx.clientChannel.pipeline().addLast(new ChunkedWriteHandler()); + ctx.clientChannel.writeAndFlush(req); + ctx.clientChannel.writeAndFlush(httpChunkedIs); + var handler = ctx.awaitRestChannelAccepted(id); + var consumed = handler.readAllBytes(); + assertEquals(contentSize, consumed); + handler.sendResponse(new RestResponse(RestStatus.OK, "")); + + var resp = (FullHttpResponse) safePoll(ctx.clientRespQueue); + assertEquals(HttpResponseStatus.OK, resp.status()); + resp.release(); + } } } @@ -369,7 +369,7 @@ public void testBadRequestReleaseQueuedChunks() throws Exception { try (var ctx = setupClientCtx()) { for (var reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var contentSize = randomIntBetween(0, MAX_CONTENT_LENGTH); + var contentSize = randomIntBetween(0, maxContentLength()); var req = httpRequest(id, contentSize); var content = randomContent(contentSize, true); @@ -405,7 +405,7 @@ public void testHttpClientStats() throws Exception { for (var reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var contentSize = randomIntBetween(0, MAX_CONTENT_LENGTH); + var contentSize = randomIntBetween(0, maxContentLength()); totalBytesSent += contentSize; ctx.clientChannel.writeAndFlush(httpRequest(id, contentSize)); ctx.clientChannel.writeAndFlush(randomContent(contentSize, true)); @@ -485,6 +485,10 @@ private void assertHttpBodyLogging(Function test) throws Exceptio } } + private int maxContentLength() { + return HttpHandlingSettings.fromSettings(internalCluster().getInstance(Settings.class)).maxContentLength(); + } + private String opaqueId(int reqNo) { return getTestName() + "-" + reqNo; } @@ -654,22 +658,14 @@ void sendResponse(RestResponse response) { int readBytes(int bytes) { var consumed = 0; if (recvLast == false) { - stream.next(); - while (consumed < bytes && streamClosed == false) { - try { - var recvChunk = recvChunks.poll(10, TimeUnit.MILLISECONDS); - if (recvChunk != null) { - consumed += recvChunk.chunk.length(); - recvChunk.chunk.close(); - if (recvChunk.isLast) { - recvLast = true; - break; - } - stream.next(); - } - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new AssertionError(e); + while (consumed < bytes) { + stream.next(); + var recvChunk = safePoll(recvChunks); + consumed += recvChunk.chunk.length(); + recvChunk.chunk.close(); + if (recvChunk.isLast) { + recvLast = true; + break; } } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java index 0294b4626496c..021ce09e0ed8e 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java @@ -11,10 +11,13 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.FullHttpRequest; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpContent; import io.netty.handler.codec.http.HttpObject; import io.netty.handler.codec.http.HttpObjectAggregator; import io.netty.handler.codec.http.HttpRequest; -import io.netty.handler.codec.http.HttpRequestDecoder; +import io.netty.handler.codec.http.HttpResponseStatus; +import io.netty.handler.codec.http.HttpUtil; import org.elasticsearch.http.HttpPreRequest; import org.elasticsearch.http.netty4.internal.HttpHeadersAuthenticatorUtils; @@ -24,19 +27,18 @@ /** * A wrapper around {@link HttpObjectAggregator}. Provides optional content aggregation based on * predicate. {@link HttpObjectAggregator} also handles Expect: 100-continue and oversized content. - * Provides content size handling for non-aggregated requests too. + * Unfortunately, Netty does not provide handlers for oversized messages beyond HttpObjectAggregator. */ public class Netty4HttpAggregator extends HttpObjectAggregator { private static final Predicate IGNORE_TEST = (req) -> req.uri().startsWith("/_test/request-stream") == false; private final Predicate decider; - private final Netty4HttpContentSizeHandler streamContentSizeHandler; private boolean aggregating = true; + private boolean ignoreContentAfterContinueResponse = false; - public Netty4HttpAggregator(int maxContentLength, Predicate decider, HttpRequestDecoder decoder) { + public Netty4HttpAggregator(int maxContentLength, Predicate decider) { super(maxContentLength); this.decider = decider; - this.streamContentSizeHandler = new Netty4HttpContentSizeHandler(decoder, maxContentLength); } @Override @@ -49,7 +51,34 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception if (aggregating || msg instanceof FullHttpRequest) { super.channelRead(ctx, msg); } else { - streamContentSizeHandler.channelRead(ctx, msg); + handle(ctx, (HttpObject) msg); + } + } + + private void handle(ChannelHandlerContext ctx, HttpObject msg) { + if (msg instanceof HttpRequest request) { + var continueResponse = newContinueResponse(request, maxContentLength(), ctx.pipeline()); + if (continueResponse != null) { + // there are 3 responses expected: 100, 413, 417 + // on 100 we pass request further and reply to client to continue + // on 413/417 we ignore following content + ctx.writeAndFlush(continueResponse); + var resp = (FullHttpResponse) continueResponse; + if (resp.status() != HttpResponseStatus.CONTINUE) { + ignoreContentAfterContinueResponse = true; + return; + } + HttpUtil.set100ContinueExpected(request, false); + } + ignoreContentAfterContinueResponse = false; + ctx.fireChannelRead(msg); + } else { + var httpContent = (HttpContent) msg; + if (ignoreContentAfterContinueResponse) { + httpContent.release(); + } else { + ctx.fireChannelRead(msg); + } } } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandler.java deleted file mode 100644 index 2b322fefa1262..0000000000000 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandler.java +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.http.netty4; - -import io.netty.buffer.Unpooled; -import io.netty.channel.ChannelFutureListener; -import io.netty.channel.ChannelHandlerContext; -import io.netty.channel.ChannelInboundHandlerAdapter; -import io.netty.handler.codec.http.DefaultFullHttpResponse; -import io.netty.handler.codec.http.DefaultHttpHeaders; -import io.netty.handler.codec.http.EmptyHttpHeaders; -import io.netty.handler.codec.http.FullHttpResponse; -import io.netty.handler.codec.http.HttpContent; -import io.netty.handler.codec.http.HttpHeaderNames; -import io.netty.handler.codec.http.HttpHeaderValues; -import io.netty.handler.codec.http.HttpObject; -import io.netty.handler.codec.http.HttpRequest; -import io.netty.handler.codec.http.HttpRequestDecoder; -import io.netty.handler.codec.http.HttpResponseStatus; -import io.netty.handler.codec.http.HttpUtil; -import io.netty.handler.codec.http.HttpVersion; -import io.netty.handler.codec.http.LastHttpContent; - -import org.elasticsearch.core.SuppressForbidden; - -import static io.netty.handler.codec.http.HttpHeaderNames.CONNECTION; -import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH; - -/** - * Provides handling for 'Expect' header and content size. Implements HTTP1.1 spec. - * Allows {@code Expect: 100-continue} header only. Other 'Expect' headers will be rejected with - * {@code 417 Expectation Failed} reason. - *
- * Replies {@code 100 Continue} to requests with allowed maxContentLength. - *
- * Replies {@code 413 Request Entity Too Large} when content size exceeds maxContentLength. - * - * Channel can be reused for requests with "Expect:100-Continue" header that exceed allowed content length, - * as long as request does not include content. If oversized request already contains content then - * we cannot safely proceed and connection will be closed. - *

- * TODO: move to RestController to allow content limits per RestHandler. - * Ideally we should be able to handle Continue and oversized request in the RestController. - *
    - *
  • - * 100 Continue is interim response, means RestChannel will send 2 responses for a single request. See - * rfc9110.html#status.100 - *
  • - *
  • - * RestChannel should be able to close underlying HTTP channel connection. - *
  • - *
- */ -@SuppressForbidden(reason = "use of default ChannelFutureListener's CLOSE and CLOSE_ON_FAILURE") -public class Netty4HttpContentSizeHandler extends ChannelInboundHandlerAdapter { - - // copied from netty's HttpObjectAggregator - static final FullHttpResponse CONTINUE = new DefaultFullHttpResponse( - HttpVersion.HTTP_1_1, - HttpResponseStatus.CONTINUE, - Unpooled.EMPTY_BUFFER - ); - static final FullHttpResponse EXPECTATION_FAILED_CLOSE = new DefaultFullHttpResponse( - HttpVersion.HTTP_1_1, - HttpResponseStatus.EXPECTATION_FAILED, - Unpooled.EMPTY_BUFFER, - new DefaultHttpHeaders().add(CONTENT_LENGTH, 0).add(CONNECTION, HttpHeaderValues.CLOSE), - EmptyHttpHeaders.INSTANCE - ); - static final FullHttpResponse TOO_LARGE_CLOSE = new DefaultFullHttpResponse( - HttpVersion.HTTP_1_1, - HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, - Unpooled.EMPTY_BUFFER, - new DefaultHttpHeaders().add(CONTENT_LENGTH, 0).add(CONNECTION, HttpHeaderValues.CLOSE), - EmptyHttpHeaders.INSTANCE - ); - static final FullHttpResponse TOO_LARGE = new DefaultFullHttpResponse( - HttpVersion.HTTP_1_1, - HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, - Unpooled.EMPTY_BUFFER, - new DefaultHttpHeaders().add(CONTENT_LENGTH, 0), - EmptyHttpHeaders.INSTANCE - ); - - private final int maxContentLength; - private final HttpRequestDecoder decoder; // need to reset decoder after sending 413 - private int currentContentLength; // chunked encoding does not provide content length, need to track actual length - private boolean contentExpected; - - public Netty4HttpContentSizeHandler(HttpRequestDecoder decoder, int maxContentLength) { - this.maxContentLength = maxContentLength; - this.decoder = decoder; - } - - @Override - public void channelRead(ChannelHandlerContext ctx, Object msg) { - assert msg instanceof HttpObject; - if (msg instanceof HttpRequest request) { - handleRequest(ctx, request); - } else { - handleContent(ctx, (HttpContent) msg); - } - } - - private void handleRequest(ChannelHandlerContext ctx, HttpRequest request) { - contentExpected = false; - if (request.decoderResult().isFailure()) { - ctx.fireChannelRead(request); - return; - } - - final var expectValue = request.headers().get(HttpHeaderNames.EXPECT); - boolean isContinueExpected = false; - // Only "Expect: 100-Continue" header is supported - if (expectValue != null) { - if (HttpHeaderValues.CONTINUE.toString().equalsIgnoreCase(expectValue)) { - isContinueExpected = true; - } else { - ctx.writeAndFlush(EXPECTATION_FAILED_CLOSE.retainedDuplicate()).addListener(ChannelFutureListener.CLOSE); - return; - } - } - - boolean isOversized = HttpUtil.getContentLength(request, -1) > maxContentLength; - if (isOversized) { - if (isContinueExpected) { - // Client is allowed to send content without waiting for Continue. - // See https://www.rfc-editor.org/rfc/rfc9110.html#section-10.1.1-11.3 - // this content will result in HttpRequestDecoder failure and send downstream - decoder.reset(); - ctx.writeAndFlush(TOO_LARGE.retainedDuplicate()).addListener(ChannelFutureListener.CLOSE_ON_FAILURE); - } else { - // Client is sending oversized content, we cannot safely take it. Closing channel. - ctx.writeAndFlush(TOO_LARGE_CLOSE.retainedDuplicate()).addListener(ChannelFutureListener.CLOSE); - } - } else { - contentExpected = true; - currentContentLength = 0; - if (isContinueExpected) { - ctx.writeAndFlush(CONTINUE.retainedDuplicate()); - HttpUtil.set100ContinueExpected(request, false); - } - ctx.fireChannelRead(request); - } - } - - private void handleContent(ChannelHandlerContext ctx, HttpContent msg) { - if (contentExpected) { - currentContentLength += msg.content().readableBytes(); - if (currentContentLength > maxContentLength) { - msg.release(); - ctx.writeAndFlush(TOO_LARGE_CLOSE.retainedDuplicate()).addListener(ChannelFutureListener.CLOSE); - } else { - ctx.fireChannelRead(msg); - } - } else { - msg.release(); - if (msg != LastHttpContent.EMPTY_LAST_CONTENT) { - ctx.close(); // there is no reliable recovery from unexpected content, closing channel - } - } - } - -} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 9ffa4b479be17..36c860f1fb90b 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -381,8 +381,7 @@ protected HttpMessage createMessage(String[] initialLine) throws Exception { handlingSettings.maxContentLength(), httpPreRequest -> enabled.get() == false || ((httpPreRequest.rawPath().endsWith("/_bulk") == false) - || httpPreRequest.rawPath().startsWith("/_xpack/monitoring/_bulk")), - decoder + || httpPreRequest.rawPath().startsWith("/_xpack/monitoring/_bulk")) ); aggregator.setMaxCumulationBufferComponents(transport.maxCompositeBufferComponents); ch.pipeline() diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandlerTests.java deleted file mode 100644 index 3f8fe0075689f..0000000000000 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandlerTests.java +++ /dev/null @@ -1,238 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.http.netty4; - -import io.netty.buffer.ByteBuf; -import io.netty.buffer.Unpooled; -import io.netty.channel.embedded.EmbeddedChannel; -import io.netty.handler.codec.http.DefaultHttpContent; -import io.netty.handler.codec.http.DefaultHttpRequest; -import io.netty.handler.codec.http.DefaultLastHttpContent; -import io.netty.handler.codec.http.FullHttpResponse; -import io.netty.handler.codec.http.HttpContent; -import io.netty.handler.codec.http.HttpHeaderNames; -import io.netty.handler.codec.http.HttpHeaderValues; -import io.netty.handler.codec.http.HttpMethod; -import io.netty.handler.codec.http.HttpObject; -import io.netty.handler.codec.http.HttpRequest; -import io.netty.handler.codec.http.HttpRequestDecoder; -import io.netty.handler.codec.http.HttpRequestEncoder; -import io.netty.handler.codec.http.HttpResponseStatus; -import io.netty.handler.codec.http.HttpUtil; -import io.netty.handler.codec.http.HttpVersion; -import io.netty.handler.codec.http.LastHttpContent; - -import org.elasticsearch.test.ESTestCase; - -import java.util.Arrays; - -public class Netty4HttpContentSizeHandlerTests extends ESTestCase { - - private static final int MAX_CONTENT_LENGTH = 1024; - private static final int OVERSIZED_LENGTH = MAX_CONTENT_LENGTH + 1; - private static final int REPS = 1000; - private EmbeddedChannel channel; - private EmbeddedChannel encoder; // channel to encode HTTP objects into bytes - - private static HttpContent httpContent(int size) { - return new DefaultHttpContent(Unpooled.wrappedBuffer(randomByteArrayOfLength(size))); - } - - private static LastHttpContent lastHttpContent(int size) { - return new DefaultLastHttpContent(Unpooled.wrappedBuffer(randomByteArrayOfLength(size))); - } - - private HttpRequest httpRequest() { - return new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/"); - } - - // encodes multiple HTTP objects into single ByteBuf - private ByteBuf encode(HttpObject... objs) { - var out = Unpooled.compositeBuffer(); - Arrays.stream(objs).forEach(encoder::writeOutbound); - while (encoder.outboundMessages().isEmpty() == false) { - out.addComponent(true, encoder.readOutbound()); - } - return out; - } - - @Override - public void setUp() throws Exception { - super.setUp(); - var decoder = new HttpRequestDecoder(); - encoder = new EmbeddedChannel(new HttpRequestEncoder()); - channel = new EmbeddedChannel(decoder, new Netty4HttpContentSizeHandler(decoder, MAX_CONTENT_LENGTH)); - } - - /** - * Assert that handler replies 100-continue for acceptable request and pass request further. - */ - public void testContinue() { - for (var i = 0; i < REPS; i++) { - var sendRequest = httpRequest(); - HttpUtil.set100ContinueExpected(sendRequest, true); - channel.writeInbound(encode(sendRequest)); - assertEquals("should send back 100-continue", Netty4HttpContentSizeHandler.CONTINUE, channel.readOutbound()); - var recvRequest = (HttpRequest) channel.readInbound(); - assertNotNull(recvRequest); - assertFalse(HttpUtil.is100ContinueExpected(recvRequest)); - channel.writeInbound(encode(LastHttpContent.EMPTY_LAST_CONTENT)); - assertEquals(LastHttpContent.EMPTY_LAST_CONTENT, channel.readInbound()); - } - } - - /** - * Assert that handler pass through acceptable request. - */ - public void testWithoutContinue() { - for (var i = 0; i < REPS; i++) { - var sendRequest = httpRequest(); - channel.writeInbound(encode(sendRequest)); - assertNull("should not receive response", channel.readOutbound()); - assertNotNull("request should pass", channel.readInbound()); - channel.writeInbound(encode(LastHttpContent.EMPTY_LAST_CONTENT)); - assertEquals(LastHttpContent.EMPTY_LAST_CONTENT, channel.readInbound()); - } - } - - /** - * Assert that handler pass through request and content for acceptable request. - */ - public void testContinueWithContent() { - for (var i = 0; i < REPS; i++) { - var sendRequest = httpRequest(); - HttpUtil.set100ContinueExpected(sendRequest, true); - HttpUtil.setContentLength(sendRequest, MAX_CONTENT_LENGTH); - var sendContent = lastHttpContent(MAX_CONTENT_LENGTH); - channel.writeInbound(encode(sendRequest, sendContent)); - var resp = (FullHttpResponse) channel.readOutbound(); - assertEquals("should send back 100-continue", Netty4HttpContentSizeHandler.CONTINUE, resp); - resp.release(); - var recvRequest = (HttpRequest) channel.readInbound(); - assertNotNull(recvRequest); - var recvContent = (HttpContent) channel.readInbound(); - assertNotNull(recvContent); - assertEquals(MAX_CONTENT_LENGTH, recvContent.content().readableBytes()); - recvContent.release(); - } - } - - /** - * Assert that handler return 417 Expectation Failed and closes channel on request - * with "Expect" header other than "100-Continue". - */ - public void testExpectationFailed() { - var sendRequest = httpRequest(); - sendRequest.headers().set(HttpHeaderNames.EXPECT, randomValueOtherThan(HttpHeaderValues.CONTINUE, ESTestCase::randomIdentifier)); - channel.writeInbound(encode(sendRequest)); - var resp = (FullHttpResponse) channel.readOutbound(); - assertEquals(HttpResponseStatus.EXPECTATION_FAILED, resp.status()); - assertFalse(channel.isOpen()); - resp.release(); - } - - /** - * Assert that handler returns 413 Request Entity Too Large for oversized request - * and does not close channel if following content is not present. - */ - public void testEntityTooLarge() { - for (var i = 0; i < REPS; i++) { - var sendRequest = httpRequest(); - HttpUtil.set100ContinueExpected(sendRequest, true); - HttpUtil.setContentLength(sendRequest, OVERSIZED_LENGTH); - channel.writeInbound(encode(sendRequest, LastHttpContent.EMPTY_LAST_CONTENT)); - var resp = (FullHttpResponse) channel.readOutbound(); - assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); - assertNull("request should not pass", channel.readInbound()); - assertTrue("should not close channel", channel.isOpen()); - resp.release(); - } - } - - /** - * Mixed load of oversized and normal requests with Exepct:100-Continue. - */ - public void testMixedContent() { - for (int i = 0; i < REPS; i++) { - var isOversized = randomBoolean(); - var sendRequest = httpRequest(); - HttpUtil.set100ContinueExpected(sendRequest, true); - if (isOversized) { - HttpUtil.setContentLength(sendRequest, OVERSIZED_LENGTH); - channel.writeInbound(encode(sendRequest)); - var resp = (FullHttpResponse) channel.readOutbound(); - assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); - channel.writeInbound(encode(LastHttpContent.EMPTY_LAST_CONTENT)); // terminate - assertNull(channel.readInbound()); - resp.release(); - } else { - var normalSize = between(1, MAX_CONTENT_LENGTH); - HttpUtil.setContentLength(sendRequest, normalSize); - channel.writeInbound(encode(sendRequest)); - var resp = (FullHttpResponse) channel.readOutbound(); - assertEquals(HttpResponseStatus.CONTINUE, resp.status()); - resp.release(); - var sendContent = lastHttpContent(normalSize); - channel.writeInbound(encode(sendContent)); - var recvRequest = (HttpRequest) channel.readInbound(); - var recvContent = (LastHttpContent) channel.readInbound(); - assertEquals("content length header should match", normalSize, HttpUtil.getContentLength(recvRequest)); - assertFalse("should remove expect header", HttpUtil.is100ContinueExpected(recvRequest)); - assertEquals("actual content size should match", normalSize, recvContent.content().readableBytes()); - recvContent.release(); - } - } - } - - /** - * Assert that handler returns 413 Request Entity Too Large and close channel for - * oversized request with content. - */ - public void testEntityTooLargeWithContentWithoutExpect() { - var sendRequest = httpRequest(); - HttpUtil.setContentLength(sendRequest, OVERSIZED_LENGTH); - var unexpectedContent = lastHttpContent(OVERSIZED_LENGTH); - channel.writeInbound(encode(sendRequest, unexpectedContent)); - var resp = (FullHttpResponse) channel.readOutbound(); - assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); - assertFalse(channel.isOpen()); - resp.release(); - } - - /** - * Assert that handler return 413 Request Entity Too Large and closes channel for oversized - * requests with chunked content. - */ - public void testEntityTooLargeWithChunkedContent() { - var sendRequest = httpRequest(); - HttpUtil.setTransferEncodingChunked(sendRequest, true); - channel.writeInbound(encode(sendRequest)); - assertTrue("request should pass", channel.readInbound() instanceof HttpRequest); - - int contentBytesSent = 0; - do { - var thisPartSize = between(1, MAX_CONTENT_LENGTH * 2); - channel.writeInbound(encode(httpContent(thisPartSize))); - contentBytesSent += thisPartSize; - - if (contentBytesSent <= MAX_CONTENT_LENGTH) { - ((HttpContent) channel.readInbound()).release(); - } else { - break; - } - } while (true); - - var resp = (FullHttpResponse) channel.readOutbound(); - assertEquals("should respond with 413", HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); - assertFalse("should close channel", channel.isOpen()); - resp.release(); - } - -} From b6145919f6f35d47597cfce31f4a8c3904cc756f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 05:21:10 +1100 Subject: [PATCH 083/383] Mute org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT test {p0=nodes.stats/11_indices_metrics/indices mappings exact count test for indices level} #120950 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 3f03fbb6e3bbd..b917c6c92cb0d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -274,6 +274,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120910 - class: org.elasticsearch.xpack.esql.qa.multi_node.RestEsqlIT issue: https://github.com/elastic/elasticsearch/issues/120948 +- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT + method: test {p0=nodes.stats/11_indices_metrics/indices mappings exact count test for indices level} + issue: https://github.com/elastic/elasticsearch/issues/120950 # Examples: # From a4482d4c4c2d24418553510afa6a11a5f316983a Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Mon, 27 Jan 2025 19:25:15 +0100 Subject: [PATCH 084/383] ESQL: Implement a MetricsAware interface (#120527) This implements an interface that export the names of the plan nodes and functions that need to be counted in the metrics. Also, the metrics are now counted from within the parser. This should allow correct accounting for the cases where some nodes can appear both standalone or part other nodes' children (like Aggregate being a child of INLINESTATS, so no STATS counting should occur). The functions counting now also validates that behind a name there is actually a function registered. Closes #115992. --- docs/changelog/120527.yaml | 6 ++ .../xpack/esql/EsqlTestUtils.java | 2 +- .../xpack/esql/action/TelemetryIT.java | 35 ++++++---- .../xpack/esql/analysis/Analyzer.java | 8 +-- .../xpack/esql/analysis/Verifier.java | 4 +- .../esql/capabilities/TelemetryAware.java | 23 +++++++ .../xpack/esql/execution/PlanExecutor.java | 20 +++--- .../function/EsqlFunctionRegistry.java | 15 ++++ .../xpack/esql/parser/AstBuilder.java | 4 +- .../xpack/esql/parser/EsqlParser.java | 13 +++- .../xpack/esql/parser/ExpressionBuilder.java | 39 +++++++---- .../xpack/esql/parser/LogicalPlanBuilder.java | 16 +++-- .../xpack/esql/plan/logical/Aggregate.java | 5 +- .../xpack/esql/plan/logical/Dissect.java | 8 +-- .../xpack/esql/plan/logical/Drop.java | 7 +- .../xpack/esql/plan/logical/Enrich.java | 7 +- .../xpack/esql/plan/logical/EsRelation.java | 5 -- .../xpack/esql/plan/logical/Eval.java | 8 +-- .../xpack/esql/plan/logical/Explain.java | 8 +-- .../xpack/esql/plan/logical/Filter.java | 5 +- .../xpack/esql/plan/logical/Grok.java | 8 +-- .../xpack/esql/plan/logical/InlineStats.java | 8 +-- .../xpack/esql/plan/logical/Keep.java | 8 +-- .../xpack/esql/plan/logical/Limit.java | 8 +-- .../xpack/esql/plan/logical/LogicalPlan.java | 2 - .../xpack/esql/plan/logical/Lookup.java | 8 +-- .../xpack/esql/plan/logical/MvExpand.java | 5 +- .../xpack/esql/plan/logical/OrderBy.java | 5 +- .../xpack/esql/plan/logical/Project.java | 8 --- .../xpack/esql/plan/logical/Rename.java | 8 +-- .../xpack/esql/plan/logical/Row.java | 8 +-- .../xpack/esql/plan/logical/TopN.java | 7 -- .../esql/plan/logical/UnresolvedRelation.java | 19 +++++- .../xpack/esql/plan/logical/join/Join.java | 5 -- .../esql/plan/logical/join/LookupJoin.java | 8 ++- .../esql/plan/logical/join/StubRelation.java | 5 -- .../plan/logical/local/LocalRelation.java | 8 --- .../esql/plan/logical/show/ShowInfo.java | 5 +- .../xpack/esql/session/EsqlSession.java | 11 ++- .../xpack/esql/stats/PlanningMetrics.java | 41 ----------- .../{stats => telemetry}/FeatureMetric.java | 2 +- .../esql/{stats => telemetry}/Metrics.java | 2 +- .../xpack/esql/telemetry/PlanTelemetry.java | 68 +++++++++++++++++++ .../PlanTelemetryManager.java} | 12 ++-- .../{stats => telemetry}/QueryMetric.java | 2 +- .../elasticsearch/xpack/esql/CsvTests.java | 4 +- .../function/CheckLicenseTests.java | 2 +- .../LocalLogicalPlanOptimizerTests.java | 5 -- .../LocalPhysicalPlanOptimizerTests.java | 2 +- .../esql/planner/QueryTranslatorTests.java | 2 +- .../PlanExecutorMetricsTests.java | 2 +- .../VerifierMetricsTests.java | 36 +++++----- 52 files changed, 297 insertions(+), 265 deletions(-) create mode 100644 docs/changelog/120527.yaml create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{stats => telemetry}/FeatureMetric.java (98%) rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{stats => telemetry}/Metrics.java (99%) create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{stats/PlanningMetricsManager.java => telemetry/PlanTelemetryManager.java} (89%) rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{stats => telemetry}/QueryMetric.java (93%) rename x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/{stats => telemetry}/PlanExecutorMetricsTests.java (99%) rename x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/{stats => telemetry}/VerifierMetricsTests.java (93%) diff --git a/docs/changelog/120527.yaml b/docs/changelog/120527.yaml new file mode 100644 index 0000000000000..a8e8088ea2aba --- /dev/null +++ b/docs/changelog/120527.yaml @@ -0,0 +1,6 @@ +pr: 120527 +summary: Implement a `MetricsAware` interface +area: ES|QL +type: enhancement +issues: + - 115992 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index f3b2ea0d864ff..01195e0040a75 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -73,8 +73,8 @@ import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; -import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchStats; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import org.elasticsearch.xpack.versionfield.Version; import org.junit.Assert; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java index 25603acece3cb..a27b64044ca9c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java @@ -20,7 +20,7 @@ import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; -import org.elasticsearch.xpack.esql.stats.PlanningMetricsManager; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetryManager; import org.junit.Before; import java.util.Collection; @@ -113,6 +113,17 @@ public static Iterable parameters() { Map.ofEntries(Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2)), true ) }, + new Object[] { + new Test( + // Using the `::` cast operator and a function alias + """ + FROM idx + | EVAL ip = host::ip::string, y = to_str(host) + """, + Map.ofEntries(Map.entry("FROM", 1), Map.entry("EVAL", 1)), + Map.ofEntries(Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2)), + true + ) }, new Object[] { new Test( "METRICS idx | LIMIT 10", @@ -123,9 +134,7 @@ public static Iterable parameters() { new Object[] { new Test( "METRICS idx max(id) BY host | LIMIT 10", - Build.current().isSnapshot() - ? Map.ofEntries(Map.entry("METRICS", 1), Map.entry("LIMIT", 1), Map.entry("FROM TS", 1)) - : Collections.emptyMap(), + Build.current().isSnapshot() ? Map.ofEntries(Map.entry("METRICS", 1), Map.entry("LIMIT", 1)) : Collections.emptyMap(), Build.current().isSnapshot() ? Map.ofEntries(Map.entry("MAX", 1)) : Collections.emptyMap(), Build.current().isSnapshot() ) } @@ -138,7 +147,7 @@ public static Iterable parameters() { // | EVAL ip = to_ip(host), x = to_string(host), y = to_string(host) // | INLINESTATS max(id) // """, - // Build.current().isSnapshot() ? Map.of("FROM", 1, "EVAL", 1, "INLINESTATS", 1, "STATS", 1) : Collections.emptyMap(), + // Build.current().isSnapshot() ? Map.of("FROM", 1, "EVAL", 1, "INLINESTATS", 1) : Collections.emptyMap(), // Build.current().isSnapshot() // ? Map.ofEntries(Map.entry("MAX", 1), Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2)) // : Collections.emptyMap(), @@ -186,19 +195,19 @@ private static void testQuery( client(dataNode.getName()).execute(EsqlQueryAction.INSTANCE, request, ActionListener.running(() -> { try { // test total commands used - final List commandMeasurementsAll = measurements(plugin, PlanningMetricsManager.FEATURE_METRICS_ALL); + final List commandMeasurementsAll = measurements(plugin, PlanTelemetryManager.FEATURE_METRICS_ALL); assertAllUsages(expectedCommands, commandMeasurementsAll, iteration, success); // test num of queries using a command - final List commandMeasurements = measurements(plugin, PlanningMetricsManager.FEATURE_METRICS); + final List commandMeasurements = measurements(plugin, PlanTelemetryManager.FEATURE_METRICS); assertUsageInQuery(expectedCommands, commandMeasurements, iteration, success); // test total functions used - final List functionMeasurementsAll = measurements(plugin, PlanningMetricsManager.FUNCTION_METRICS_ALL); + final List functionMeasurementsAll = measurements(plugin, PlanTelemetryManager.FUNCTION_METRICS_ALL); assertAllUsages(expectedFunctions, functionMeasurementsAll, iteration, success); // test number of queries using a function - final List functionMeasurements = measurements(plugin, PlanningMetricsManager.FUNCTION_METRICS); + final List functionMeasurements = measurements(plugin, PlanTelemetryManager.FUNCTION_METRICS); assertUsageInQuery(expectedFunctions, functionMeasurements, iteration, success); } finally { latch.countDown(); @@ -216,8 +225,8 @@ private static void assertAllUsages(Map expected, List found = featureNames(metrics); assertThat(found, is(expected.keySet())); for (Measurement metric : metrics) { - assertThat(metric.attributes().get(PlanningMetricsManager.SUCCESS), is(success)); - String featureName = (String) metric.attributes().get(PlanningMetricsManager.FEATURE_NAME); + assertThat(metric.attributes().get(PlanTelemetryManager.SUCCESS), is(success)); + String featureName = (String) metric.attributes().get(PlanTelemetryManager.FEATURE_NAME); assertThat(metric.getLong(), is(iteration * expected.get(featureName))); } } @@ -227,7 +236,7 @@ private static void assertUsageInQuery(Map expected, List measurements(TestTelemetryPlugin plugin, String private static Set featureNames(List functionMeasurements) { return functionMeasurements.stream() - .map(x -> x.attributes().get(PlanningMetricsManager.FEATURE_NAME)) + .map(x -> x.attributes().get(PlanTelemetryManager.FEATURE_NAME)) .map(String.class::cast) .collect(Collectors.toSet()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 4f5ff35b84054..812080085b5a7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -92,7 +92,7 @@ import org.elasticsearch.xpack.esql.rule.Rule; import org.elasticsearch.xpack.esql.rule.RuleExecutor; import org.elasticsearch.xpack.esql.session.Configuration; -import org.elasticsearch.xpack.esql.stats.FeatureMetric; +import org.elasticsearch.xpack.esql.telemetry.FeatureMetric; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.time.Duration; @@ -133,7 +133,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.core.type.DataType.isTemporalAmount; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.LIMIT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.maybeParseTemporalAmount; /** @@ -220,7 +220,7 @@ private LogicalPlan resolveIndex(UnresolvedRelation plan, IndexResolution indexR plan.metadataFields(), plan.indexMode(), indexResolutionMessage, - plan.commandName() + plan.telemetryLabel() ); } IndexPattern table = plan.indexPattern(); @@ -233,7 +233,7 @@ private LogicalPlan resolveIndex(UnresolvedRelation plan, IndexResolution indexR plan.metadataFields(), plan.indexMode(), "invalid [" + table + "] resolution to [" + indexResolution + "]", - plan.commandName() + plan.telemetryLabel() ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index b59a112b1adb6..c2663650685eb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -32,8 +32,8 @@ import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.Project; -import org.elasticsearch.xpack.esql.stats.FeatureMetric; -import org.elasticsearch.xpack.esql.stats.Metrics; +import org.elasticsearch.xpack.esql.telemetry.FeatureMetric; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import java.util.ArrayList; import java.util.BitSet; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java new file mode 100644 index 0000000000000..9116c18b7a9bc --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.capabilities; + +import java.util.Locale; + +/** + * Interface for plan nodes that need to be accounted in the statistics + */ +public interface TelemetryAware { + + /** + * @return the label reported in the telemetry data. Only needs to be overwritten if the label doesn't match the class name. + */ + default String telemetryLabel() { + return getClass().getSimpleName().toUpperCase(Locale.ROOT); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index 94913581f696d..81f63fd9d37a6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -26,10 +26,10 @@ import org.elasticsearch.xpack.esql.session.IndexResolver; import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; import org.elasticsearch.xpack.esql.session.Result; -import org.elasticsearch.xpack.esql.stats.Metrics; -import org.elasticsearch.xpack.esql.stats.PlanningMetrics; -import org.elasticsearch.xpack.esql.stats.PlanningMetricsManager; -import org.elasticsearch.xpack.esql.stats.QueryMetric; +import org.elasticsearch.xpack.esql.telemetry.Metrics; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetryManager; +import org.elasticsearch.xpack.esql.telemetry.QueryMetric; import static org.elasticsearch.action.ActionListener.wrap; @@ -41,7 +41,7 @@ public class PlanExecutor { private final Mapper mapper; private final Metrics metrics; private final Verifier verifier; - private final PlanningMetricsManager planningMetricsManager; + private final PlanTelemetryManager planTelemetryManager; public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry, XPackLicenseState licenseState) { this.indexResolver = indexResolver; @@ -50,7 +50,7 @@ public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry, XP this.mapper = new Mapper(); this.metrics = new Metrics(functionRegistry); this.verifier = new Verifier(metrics, licenseState); - this.planningMetricsManager = new PlanningMetricsManager(meterRegistry); + this.planTelemetryManager = new PlanTelemetryManager(meterRegistry); } public void esql( @@ -65,7 +65,7 @@ public void esql( QueryBuilderResolver queryBuilderResolver, ActionListener listener ) { - final PlanningMetrics planningMetrics = new PlanningMetrics(); + final PlanTelemetry planTelemetry = new PlanTelemetry(functionRegistry); final var session = new EsqlSession( sessionId, cfg, @@ -76,7 +76,7 @@ public void esql( new LogicalPlanOptimizer(new LogicalOptimizerContext(cfg, foldContext)), mapper, verifier, - planningMetrics, + planTelemetry, indicesExpressionGrouper, queryBuilderResolver ); @@ -84,12 +84,12 @@ public void esql( metrics.total(clientId); ActionListener executeListener = wrap(x -> { - planningMetricsManager.publish(planningMetrics, true); + planTelemetryManager.publish(planTelemetry, true); listener.onResponse(x); }, ex -> { // TODO when we decide if we will differentiate Kibana from REST, this String value will likely come from the request metrics.failed(clientId); - planningMetricsManager.publish(planningMetrics, false); + planTelemetryManager.publish(planTelemetry, false); listener.onFailure(ex); }); // Wrap it in a listener so that if we have any exceptions during execution, the listener picks it up diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 3b1e4828f0707..9c469d55c21c4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -225,6 +225,7 @@ public class EsqlFunctionRegistry { // it has with the alias name associated to the FunctionDefinition instance private final Map defs = new LinkedHashMap<>(); private final Map aliases = new HashMap<>(); + private final Map, String> names = new HashMap<>(); private SnapshotFunctionRegistry snapshotRegistry = null; @@ -259,6 +260,12 @@ public boolean functionExists(String functionName) { return defs.containsKey(functionName); } + public String functionName(Class clazz) { + String name = names.get(clazz); + Check.notNull(name, "Cannot find function by class {}", clazz); + return name; + } + public Collection listFunctions() { // It is worth double checking if we need this copy. These are immutable anyway. return defs.values(); @@ -762,6 +769,14 @@ void register(FunctionDefinition... functions) { } aliases.put(alias, f.name()); } + Check.isTrue( + names.containsKey(f.clazz()) == false, + "function type [{}} is registered twice with names [{}] and [{}]", + f.clazz(), + names.get(f.clazz()), + f.name() + ); + names.put(f.clazz(), f.name()); } // sort the temporary map by key name and add it to the global map of functions defs.putAll( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java index 3b39e6a9d1fdb..ec23783fe1a2c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.esql.parser; public class AstBuilder extends LogicalPlanBuilder { - public AstBuilder(QueryParams params) { - super(params); + public AstBuilder(ParsingContext context) { + super(context); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java index 9538e3ba495db..5912f1fe58bcd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java @@ -18,7 +18,9 @@ import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.xpack.esql.core.util.StringUtils; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import java.util.BitSet; import java.util.function.BiFunction; @@ -52,20 +54,27 @@ public void setEsqlConfig(EsqlConfig config) { this.config = config; } + // testing utility public LogicalPlan createStatement(String query) { return createStatement(query, new QueryParams()); } + // testing utility public LogicalPlan createStatement(String query, QueryParams params) { + return createStatement(query, params, new PlanTelemetry(new EsqlFunctionRegistry())); + } + + public LogicalPlan createStatement(String query, QueryParams params, PlanTelemetry metrics) { if (log.isDebugEnabled()) { log.debug("Parsing as statement: {}", query); } - return invokeParser(query, params, EsqlBaseParser::singleStatement, AstBuilder::plan); + return invokeParser(query, params, metrics, EsqlBaseParser::singleStatement, AstBuilder::plan); } private T invokeParser( String query, QueryParams params, + PlanTelemetry metrics, Function parseFunction, BiFunction result ) { @@ -99,7 +108,7 @@ private T invokeParser( log.trace("Parse tree: {}", tree.toStringTree()); } - return result.apply(new AstBuilder(params), tree); + return result.apply(new AstBuilder(new ExpressionBuilder.ParsingContext(params, metrics)), tree); } catch (StackOverflowError e) { throw new ParsingException("ESQL statement is too large, causing stack overflow when generating the parsing tree: [{}]", query); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 114fcda1e634a..78c3044257f9f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -62,6 +62,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.math.BigInteger; @@ -115,10 +116,12 @@ public abstract class ExpressionBuilder extends IdentifierBuilder { */ public static final int MAX_EXPRESSION_DEPTH = 400; - protected final QueryParams params; + protected final ParsingContext context; - ExpressionBuilder(QueryParams params) { - this.params = params; + public record ParsingContext(QueryParams params, PlanTelemetry telemetry) {} + + ExpressionBuilder(ParsingContext context) { + this.context = context; } protected Expression expression(ParseTree ctx) { @@ -621,7 +624,9 @@ public Expression visitFunctionExpression(EsqlBaseParser.FunctionExpressionConte @Override public String visitFunctionName(EsqlBaseParser.FunctionNameContext ctx) { - return visitIdentifierOrParameter(ctx.identifierOrParameter()); + var name = visitIdentifierOrParameter(ctx.identifierOrParameter()); + context.telemetry().function(name); + return name; } @Override @@ -683,7 +688,9 @@ private Expression castToType(Source source, ParseTree parseTree, EsqlBaseParser throw new ParsingException(source, "Unsupported conversion to type [{}]", dataType); } Expression expr = expression(parseTree); - return converterToFactory.apply(source, expr); + var convertFunction = converterToFactory.apply(source, expr); + context.telemetry().function(convertFunction.getClass()); + return convertFunction; } @Override @@ -915,10 +922,10 @@ QueryParam paramByToken(TerminalNode node) { return null; } Token token = node.getSymbol(); - if (params.contains(token) == false) { + if (context.params().contains(token) == false) { throw new ParsingException(source(node), "Unexpected parameter"); } - return params.get(token); + return context.params().get(token); } QueryParam paramByNameOrPosition(TerminalNode node) { @@ -929,26 +936,28 @@ QueryParam paramByNameOrPosition(TerminalNode node) { String nameOrPosition = token.getText().substring(1); if (isInteger(nameOrPosition)) { int index = Integer.parseInt(nameOrPosition); - if (params.get(index) == null) { + if (context.params().get(index) == null) { String message = ""; - int np = params.size(); + int np = context.params().size(); if (np > 0) { message = ", did you mean " + (np == 1 ? "position 1?" : "any position between 1 and " + np + "?"); } - params.addParsingError(new ParsingException(source(node), "No parameter is defined for position " + index + message)); + context.params() + .addParsingError(new ParsingException(source(node), "No parameter is defined for position " + index + message)); } - return params.get(index); + return context.params().get(index); } else { - if (params.contains(nameOrPosition) == false) { + if (context.params().contains(nameOrPosition) == false) { String message = ""; - List potentialMatches = StringUtils.findSimilar(nameOrPosition, params.namedParams().keySet()); + List potentialMatches = StringUtils.findSimilar(nameOrPosition, context.params().namedParams().keySet()); if (potentialMatches.size() > 0) { message = ", did you mean " + (potentialMatches.size() == 1 ? "[" + potentialMatches.get(0) + "]?" : "any of " + potentialMatches + "?"); } - params.addParsingError(new ParsingException(source(node), "Unknown query parameter [" + nameOrPosition + "]" + message)); + context.params() + .addParsingError(new ParsingException(source(node), "Unknown query parameter [" + nameOrPosition + "]" + message)); } - return params.get(nameOrPosition); + return context.params().get(nameOrPosition); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 7ddd3dafd2784..82f3e18912325 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -18,6 +18,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -94,15 +95,18 @@ interface PlanFactory extends Function {} */ public static final int MAX_QUERY_DEPTH = 500; - public LogicalPlanBuilder(QueryParams params) { - super(params); + public LogicalPlanBuilder(ParsingContext context) { + super(context); } private int queryDepth = 0; protected LogicalPlan plan(ParseTree ctx) { LogicalPlan p = ParserUtils.typedParsing(this, ctx, LogicalPlan.class); - var errors = this.params.parsingErrors(); + if (p instanceof TelemetryAware ma) { + this.context.telemetry().command(ma); + } + var errors = this.context.params().parsingErrors(); if (errors.hasNext() == false) { return p; } else { @@ -482,8 +486,7 @@ public LogicalPlan visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) false, List.of(new MetadataAttribute(source, MetadataAttribute.TSID_FIELD, DataType.KEYWORD, false)), IndexMode.TIME_SERIES, - null, - "FROM TS" + null ); return new Aggregate(source, relation, Aggregate.AggregateType.METRICS, stats.groupings, stats.aggregates); } @@ -543,8 +546,7 @@ public PlanFactory visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { false, emptyList(), IndexMode.LOOKUP, - null, - "???" + null ); var condition = ctx.joinCondition(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java index 0111d23fac281..5c40bfce32064 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -39,7 +40,7 @@ import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; import static org.elasticsearch.xpack.esql.plan.logical.Filter.checkFilterConditionDataType; -public class Aggregate extends UnaryPlan implements PostAnalysisVerificationAware { +public class Aggregate extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "Aggregate", @@ -142,7 +143,7 @@ public List aggregates() { } @Override - public String commandName() { + public String telemetryLabel() { return switch (aggregateType) { case STANDARD -> "STATS"; case METRICS -> "METRICS"; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java index a83e102e51005..9200850b2f9db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.dissect.DissectParser; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; @@ -25,7 +26,7 @@ import java.util.List; import java.util.Objects; -public class Dissect extends RegexExtract { +public class Dissect extends RegexExtract implements TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Dissect", Dissect::new); private final Parser parser; @@ -123,11 +124,6 @@ public boolean equals(Object o) { return Objects.equals(parser, dissect.parser); } - @Override - public String commandName() { - return "DISSECT"; - } - @Override public int hashCode() { return Objects.hash(super.hashCode(), parser); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java index add5a2d576c00..483c3508013ab 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -16,7 +17,7 @@ import java.util.List; import java.util.Objects; -public class Drop extends UnaryPlan { +public class Drop extends UnaryPlan implements TelemetryAware { private final List removals; public Drop(Source source, LogicalPlan child, List removals) { @@ -38,10 +39,6 @@ public List removals() { return removals; } - public String commandName() { - return "DROP"; - } - @Override public boolean expressionsResolved() { return Resolvables.resolved(removals); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java index 9b81060349815..4e9fc87318029 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java @@ -18,6 +18,7 @@ import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -48,7 +49,7 @@ import static org.elasticsearch.xpack.esql.core.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; -public class Enrich extends UnaryPlan implements GeneratingPlan, PostAnalysisPlanVerificationAware { +public class Enrich extends UnaryPlan implements GeneratingPlan, PostAnalysisPlanVerificationAware, TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "Enrich", @@ -202,10 +203,6 @@ protected AttributeSet computeReferences() { return matchField.references(); } - public String commandName() { - return "ENRICH"; - } - @Override public boolean expressionsResolved() { return policyName.resolved() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index 90b3aa8625087..448085df1e831 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -172,11 +172,6 @@ public Set concreteIndices() { return indexNameWithModes.keySet(); } - @Override - public String commandName() { - return "FROM"; - } - @Override public boolean expressionsResolved() { // For unresolved expressions to exist in EsRelation is fine, as long as they are not used in later operations diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java index cbd79011032df..7c437dac03409 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -37,7 +38,7 @@ import static org.elasticsearch.xpack.esql.core.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; -public class Eval extends UnaryPlan implements GeneratingPlan, PostAnalysisVerificationAware { +public class Eval extends UnaryPlan implements GeneratingPlan, PostAnalysisVerificationAware, TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Eval", Eval::new); private final List fields; @@ -131,11 +132,6 @@ private List renameAliases(List originalAttributes, List n return newFieldsWithUpdatedRefs; } - @Override - public String commandName() { - return "EVAL"; - } - @Override public boolean expressionsResolved() { return Resolvables.resolved(fields); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java index 38e7c19522df6..bd49ed04881cc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -17,7 +18,7 @@ import java.util.List; import java.util.Objects; -public class Explain extends LeafPlan { +public class Explain extends LeafPlan implements TelemetryAware { public enum Type { PARSED, @@ -69,11 +70,6 @@ public List output() { ); } - @Override - public String commandName() { - return "EXPLAIN"; - } - @Override public boolean expressionsResolved() { return true; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java index 0fae5e5831fc7..6931c320007fe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -28,7 +29,7 @@ * {@code SELECT x FROM y WHERE z ..} the "WHERE" clause is a Filter. A * {@code Filter} has a "condition" Expression that does the filtering. */ -public class Filter extends UnaryPlan implements PostAnalysisVerificationAware { +public class Filter extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Filter", Filter::new); private final Expression condition; @@ -69,7 +70,7 @@ public Expression condition() { } @Override - public String commandName() { + public String telemetryLabel() { return "WHERE"; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java index fcfd1ac0f04da..1fab2cbecd034 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java @@ -15,6 +15,7 @@ import org.elasticsearch.grok.GrokCaptureType; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; @@ -31,7 +32,7 @@ import java.util.Objects; import java.util.stream.Collectors; -public class Grok extends RegexExtract { +public class Grok extends RegexExtract implements TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Grok", Grok::readFrom); public record Parser(String pattern, org.elasticsearch.grok.Grok grok) { @@ -148,11 +149,6 @@ public boolean equals(Object o) { return Objects.equals(parser, grok.parser); } - @Override - public String commandName() { - return "GROK"; - } - @Override public int hashCode() { return Objects.hash(super.hashCode(), parser); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java index 4211f8a0d45b6..527ba28d377f1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; @@ -36,7 +37,7 @@ * underlying aggregate. *

*/ -public class InlineStats extends UnaryPlan implements NamedWriteable, SurrogateLogicalPlan { +public class InlineStats extends UnaryPlan implements NamedWriteable, SurrogateLogicalPlan, TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "InlineStats", @@ -80,11 +81,6 @@ public Aggregate aggregate() { return aggregate; } - @Override - public String commandName() { - return "INLINESTATS"; - } - @Override public boolean expressionsResolved() { return aggregate.expressionsResolved(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java index 4c03d68e6e6f7..67108afb94668 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.plan.logical; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -14,7 +15,7 @@ import java.util.List; import java.util.Objects; -public class Keep extends Project { +public class Keep extends Project implements TelemetryAware { public Keep(Source source, LogicalPlan child, List projections) { super(source, child, projections); @@ -44,9 +45,4 @@ public int hashCode() { public boolean equals(Object obj) { return super.equals(obj); } - - @Override - public String commandName() { - return "KEEP"; - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java index ea64b7687f4c0..1bb89acf1942d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -17,7 +18,7 @@ import java.io.IOException; import java.util.Objects; -public class Limit extends UnaryPlan { +public class Limit extends UnaryPlan implements TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Limit", Limit::new); private final Expression limit; @@ -57,11 +58,6 @@ public Expression limit() { return limit; } - @Override - public String commandName() { - return "LIMIT"; - } - @Override public boolean expressionsResolved() { return limit.resolved(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java index e845c25bd3b32..ac4baea8bc853 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java @@ -75,8 +75,6 @@ public boolean resolved() { return lazyResolved; } - public abstract String commandName(); - public abstract boolean expressionsResolved(); @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java index 6e7f421003292..1c05ceb124529 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -31,7 +32,7 @@ * Looks up values from the associated {@code tables}. * The class is supposed to be substituted by a {@link Join}. */ -public class Lookup extends UnaryPlan implements SurrogateLogicalPlan { +public class Lookup extends UnaryPlan implements SurrogateLogicalPlan, TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Lookup", Lookup::new); private final Expression tableName; @@ -117,11 +118,6 @@ public JoinConfig joinConfig() { return new JoinConfig(JoinTypes.LEFT, matchFields, leftFields, rightFields); } - @Override - public String commandName() { - return "LOOKUP"; - } - @Override public boolean expressionsResolved() { return tableName.resolved() && Resolvables.resolved(matchFields) && localRelation != null; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java index 949e4906e5033..f5a3c8230b124 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -22,7 +23,7 @@ import java.util.List; import java.util.Objects; -public class MvExpand extends UnaryPlan { +public class MvExpand extends UnaryPlan implements TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "MvExpand", MvExpand::new); private final NamedExpression target; @@ -95,7 +96,7 @@ protected AttributeSet computeReferences() { return target.references(); } - public String commandName() { + public String telemetryLabel() { return "MV_EXPAND"; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java index d927d78701c65..051e2c7769bde 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -24,7 +25,7 @@ import static org.elasticsearch.xpack.esql.common.Failure.fail; -public class OrderBy extends UnaryPlan implements PostAnalysisVerificationAware { +public class OrderBy extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "OrderBy", OrderBy::new); private final List order; @@ -69,7 +70,7 @@ public List order() { } @Override - public String commandName() { + public String telemetryLabel() { return "SORT"; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java index 841e7fbe81896..e12a8cb557fde 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java @@ -78,14 +78,6 @@ public boolean resolved() { return super.resolved() && Expressions.anyMatch(projections, Functions::isAggregate) == false; } - @Override - public String commandName() { - // this could represent multiple commands (KEEP, DROP, RENAME) - // and should not be present in a pre-analyzed plan. - // maybe it should throw exception? - return ""; - } - @Override public boolean expressionsResolved() { return Resolvables.resolved(projections); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java index 773d3fd015e5f..7887d8ed66b99 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.analysis.Analyzer.ResolveRefs; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expressions; @@ -20,7 +21,7 @@ import java.util.List; import java.util.Objects; -public class Rename extends UnaryPlan { +public class Rename extends UnaryPlan implements TelemetryAware { private final List renamings; @@ -51,11 +52,6 @@ public List output() { return Expressions.asAttributes(projectionsAfterResolution); } - @Override - public String commandName() { - return "RENAME"; - } - @Override public boolean expressionsResolved() { for (var alias : renamings) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java index 65d1adf5e2799..005ca45d19131 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -23,7 +24,7 @@ import static org.elasticsearch.xpack.esql.common.Failure.fail; -public class Row extends LeafPlan implements PostAnalysisVerificationAware { +public class Row extends LeafPlan implements PostAnalysisVerificationAware, TelemetryAware { private final List fields; @@ -51,11 +52,6 @@ public List output() { return Expressions.asAttributes(fields); } - @Override - public String commandName() { - return "ROW"; - } - @Override public boolean expressionsResolved() { return Resolvables.resolved(fields); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java index d6e0e4334bd47..a9a5dbddc544f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java @@ -55,13 +55,6 @@ public String getWriteableName() { return ENTRY.name; } - @Override - public String commandName() { - // this is the result of optimizations, it will never appear in a pre-analyzed plan - // maybe we should throw exception? - return ""; - } - @Override public boolean expressionsResolved() { return limit.resolved() && Resolvables.resolved(order); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java index 0a20e1dd9080d..5d22a86b2cdf7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java @@ -8,11 +8,13 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.plan.IndexPattern; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import java.util.Collections; import java.util.List; @@ -20,7 +22,7 @@ import static java.util.Collections.singletonList; -public class UnresolvedRelation extends LeafPlan implements Unresolvable { +public class UnresolvedRelation extends LeafPlan implements Unresolvable, TelemetryAware { private final IndexPattern indexPattern; private final boolean frozen; @@ -56,6 +58,17 @@ public UnresolvedRelation( this.commandName = commandName; } + public UnresolvedRelation( + Source source, + IndexPattern table, + boolean frozen, + List metadataFields, + IndexMode indexMode, + String unresolvedMessage + ) { + this(source, table, frozen, metadataFields, indexMode, unresolvedMessage, null); + } + @Override public void writeTo(StreamOutput out) { throw new UnsupportedOperationException("not serialized"); @@ -86,7 +99,7 @@ public boolean resolved() { /** * - * This is used by {@link org.elasticsearch.xpack.esql.stats.PlanningMetrics} to collect query statistics + * This is used by {@link PlanTelemetry} to collect query statistics * It can return *
    *
  • "FROM" if this a |FROM idx command
  • @@ -95,7 +108,7 @@ public boolean resolved() { *
*/ @Override - public String commandName() { + public String telemetryLabel() { return commandName; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java index a541142f952e0..997bff70663bd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -189,11 +189,6 @@ public Join replaceChildren(LogicalPlan left, LogicalPlan right) { return new Join(source(), left, right, config); } - @Override - public String commandName() { - return "JOIN"; - } - @Override public int hashCode() { return Objects.hash(config, left(), right()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java index c29cf0ec7f414..5f1f569e3671b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java @@ -9,6 +9,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -27,7 +28,7 @@ /** * Lookup join - specialized LEFT (OUTER) JOIN between the main left side and a lookup index (index_mode = lookup) on the right. */ -public class LookupJoin extends Join implements SurrogateLogicalPlan, PostAnalysisVerificationAware { +public class LookupJoin extends Join implements SurrogateLogicalPlan, PostAnalysisVerificationAware, TelemetryAware { public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, List joinFields) { this(source, left, right, new UsingJoinType(LEFT, joinFields), emptyList(), emptyList(), emptyList()); @@ -77,6 +78,11 @@ protected NodeInfo info() { ); } + @Override + public String telemetryLabel() { + return "LOOKUP JOIN"; + } + @Override public void postAnalysisVerification(Failures failures) { super.postAnalysisVerification(failures); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java index 4f04024d61d46..33e1f385f9eec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java @@ -67,11 +67,6 @@ protected NodeInfo info() { return NodeInfo.create(this, StubRelation::new, output); } - @Override - public String commandName() { - return ""; - } - @Override public int hashCode() { return Objects.hash(StubRelation.class, output); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java index 07432481d2341..d6106bae6b6b8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java @@ -63,14 +63,6 @@ public LocalSupplier supplier() { return supplier; } - @Override - public String commandName() { - // this colud be an empty source, a lookup table or something else - // but it should not be present in a pre-analyzed plan - // maybe we sholud throw exception? - return ""; - } - @Override public boolean expressionsResolved() { return true; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java index fa432537d27e3..99c917ba803a9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Build; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -22,7 +23,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; -public class ShowInfo extends LeafPlan { +public class ShowInfo extends LeafPlan implements TelemetryAware { private final List attributes; @@ -59,7 +60,7 @@ public List> values() { } @Override - public String commandName() { + public String telemetryLabel() { return "SHOW"; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index b40e49df49c84..5a340adca4396 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -73,7 +73,7 @@ import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.mapper.Mapper; -import org.elasticsearch.xpack.esql.stats.PlanningMetrics; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import java.util.ArrayList; import java.util.Arrays; @@ -112,7 +112,7 @@ public interface PlanRunner { private final Mapper mapper; private final PhysicalPlanOptimizer physicalPlanOptimizer; - private final PlanningMetrics planningMetrics; + private final PlanTelemetry planTelemetry; private final IndicesExpressionGrouper indicesExpressionGrouper; private final QueryBuilderResolver queryBuilderResolver; @@ -126,7 +126,7 @@ public EsqlSession( LogicalPlanOptimizer logicalPlanOptimizer, Mapper mapper, Verifier verifier, - PlanningMetrics planningMetrics, + PlanTelemetry planTelemetry, IndicesExpressionGrouper indicesExpressionGrouper, QueryBuilderResolver queryBuilderResolver ) { @@ -140,7 +140,7 @@ public EsqlSession( this.mapper = mapper; this.logicalPlanOptimizer = logicalPlanOptimizer; this.physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(configuration)); - this.planningMetrics = planningMetrics; + this.planTelemetry = planTelemetry; this.indicesExpressionGrouper = indicesExpressionGrouper; this.queryBuilderResolver = queryBuilderResolver; } @@ -280,7 +280,7 @@ private LocalRelation resultToPlan(LogicalPlan plan, Result result) { } private LogicalPlan parse(String query, QueryParams params) { - var parsed = new EsqlParser().createStatement(query, params); + var parsed = new EsqlParser().createStatement(query, params, planTelemetry); LOGGER.debug("Parsed logical plan:\n{}", parsed); return parsed; } @@ -297,7 +297,6 @@ public void analyzedPlan( } Function analyzeAction = (l) -> { - planningMetrics.gatherPreAnalysisMetrics(parsed); Analyzer analyzer = new Analyzer( new AnalyzerContext(configuration, functionRegistry, l.indices, l.lookupIndices, l.enrichResolution), verifier diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java deleted file mode 100644 index 7b452e50fd525..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.stats; - -import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; - -import java.util.HashMap; -import java.util.Locale; -import java.util.Map; - -/** - * This class is responsible for collecting metrics related to ES|QL planning. - */ -public class PlanningMetrics { - private Map commands = new HashMap<>(); - private Map functions = new HashMap<>(); - - public void gatherPreAnalysisMetrics(LogicalPlan plan) { - plan.forEachDown(p -> add(commands, p.commandName())); - plan.forEachExpressionDown(UnresolvedFunction.class, p -> add(functions, p.name().toUpperCase(Locale.ROOT))); - } - - private void add(Map map, String key) { - Integer cmd = map.get(key); - map.put(key, cmd == null ? 1 : cmd + 1); - } - - public Map commands() { - return commands; - } - - public Map functions() { - return functions; - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/FeatureMetric.java similarity index 98% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/FeatureMetric.java index 4cae2a9c247f3..3a36f5b0d7c04 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/FeatureMetric.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/Metrics.java similarity index 99% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/Metrics.java index 092fecb3142db..b8962b47809a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/Metrics.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.util.Maps; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java new file mode 100644 index 0000000000000..6fe1314524f10 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.telemetry; + +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; +import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.function.Function; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Locale; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.common.Strings.format; + +/** + * This class is responsible for collecting metrics related to ES|QL planning. + */ +public class PlanTelemetry { + private final EsqlFunctionRegistry functionRegistry; + private final Set telemetryAwares = new HashSet<>(); + private final Map commands = new HashMap<>(); + private final Map functions = new HashMap<>(); + + public PlanTelemetry(EsqlFunctionRegistry functionRegistry) { + this.functionRegistry = functionRegistry; + } + + private void add(Map map, String key) { + map.compute(key.toUpperCase(Locale.ROOT), (k, count) -> count == null ? 1 : count + 1); + } + + public void command(TelemetryAware command) { + if (telemetryAwares.add(command)) { + if (command.telemetryLabel() == null) { + throw new QlIllegalArgumentException(format("TelemetryAware [{}] has no metric name", command)); + } + add(commands, command.telemetryLabel()); + } + } + + public void function(String name) { + var functionName = functionRegistry.resolveAlias(name); + if (functionRegistry.functionExists(functionName)) { + // The metrics have been collected initially with their uppercase spelling + add(functions, functionName); + } + } + + public void function(Class clazz) { + add(functions, functionRegistry.functionName(clazz)); + } + + public Map commands() { + return commands; + } + + public Map functions() { + return functions; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetricsManager.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetryManager.java similarity index 89% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetricsManager.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetryManager.java index a2d00a1f530e9..2cd536daf389c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetricsManager.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetryManager.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.telemetry.metric.LongCounter; import org.elasticsearch.telemetry.metric.MeterRegistry; @@ -17,7 +17,7 @@ * * @see METERING */ -public class PlanningMetricsManager { +public class PlanTelemetryManager { // APM counters private final LongCounter featuresCounter; @@ -59,7 +59,7 @@ public class PlanningMetricsManager { */ public static final String SUCCESS = "success"; - public PlanningMetricsManager(MeterRegistry meterRegistry) { + public PlanTelemetryManager(MeterRegistry meterRegistry) { featuresCounter = meterRegistry.registerLongCounter( FEATURE_METRICS, "ESQL features, total number of queries that use them", @@ -77,9 +77,9 @@ public PlanningMetricsManager(MeterRegistry meterRegistry) { /** * Publishes the collected metrics to the meter registry */ - public void publish(PlanningMetrics metrics, boolean success) { - metrics.commands().entrySet().forEach(x -> incCommand(x.getKey(), x.getValue(), success)); - metrics.functions().entrySet().forEach(x -> incFunction(x.getKey(), x.getValue(), success)); + public void publish(PlanTelemetry metrics, boolean success) { + metrics.commands().forEach((key, value) -> incCommand(key, value, success)); + metrics.functions().forEach((key, value) -> incFunction(key, value, success)); } private void incCommand(String name, int count, boolean success) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/QueryMetric.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/QueryMetric.java similarity index 93% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/QueryMetric.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/QueryMetric.java index e862006d058ac..567b4b0a84937 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/QueryMetric.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/QueryMetric.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import java.util.Locale; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 02e683542df7c..321897c8a062e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -90,7 +90,7 @@ import org.elasticsearch.xpack.esql.session.EsqlSession.PlanRunner; import org.elasticsearch.xpack.esql.session.Result; import org.elasticsearch.xpack.esql.stats.DisabledSearchStats; -import org.elasticsearch.xpack.esql.stats.PlanningMetrics; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import org.junit.After; import org.junit.Before; import org.mockito.Mockito; @@ -514,7 +514,7 @@ private ActualResults executePlan(BigArrays bigArrays) throws Exception { new LogicalPlanOptimizer(new LogicalOptimizerContext(configuration, foldCtx)), mapper, TEST_VERIFIER, - new PlanningMetrics(), + new PlanTelemetry(functionRegistry), null, EsqlTestUtils.MOCK_QUERY_BUILDER_RESOLVER ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java index e507640c7b23c..cf2de30e44456 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Limit; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.stats.Metrics; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import java.util.List; import java.util.Objects; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index 310d680cfbf41..d99118df7e684 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -248,11 +248,6 @@ public UnaryPlan replaceChild(LogicalPlan newChild) { return new MockFieldAttributeCommand(source(), newChild, field); } - @Override - public String commandName() { - return "MOCK"; - } - @Override public boolean expressionsResolved() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 1536ed7f99fec..1749876a0d3e3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -70,9 +70,9 @@ import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.rule.Rule; import org.elasticsearch.xpack.esql.session.Configuration; -import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchContextStats; import org.elasticsearch.xpack.esql.stats.SearchStats; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import org.elasticsearch.xpack.kql.query.KqlQueryBuilder; import org.junit.Before; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java index 57210fda07f2b..f9732272dbd74 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.xpack.esql.optimizer.TestPlannerOptimizer; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; -import org.elasticsearch.xpack.esql.stats.Metrics; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import org.hamcrest.Matcher; import org.junit.BeforeClass; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java similarity index 99% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java index a3c5cd9168b4f..4c2913031271f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java similarity index 93% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java index eda906b147956..de377fe78588c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; @@ -22,23 +22,23 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyzer; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.DISSECT; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.DROP; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.ENRICH; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.EVAL; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.FROM; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.GROK; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.KEEP; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.MV_EXPAND; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.RENAME; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.ROW; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.SHOW; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.SORT; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.STATS; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.WHERE; -import static org.elasticsearch.xpack.esql.stats.Metrics.FPREFIX; -import static org.elasticsearch.xpack.esql.stats.Metrics.FUNC_PREFIX; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.DISSECT; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.DROP; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.ENRICH; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.EVAL; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.FROM; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.GROK; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.KEEP; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.MV_EXPAND; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.RENAME; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.ROW; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.SHOW; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.SORT; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.STATS; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.WHERE; +import static org.elasticsearch.xpack.esql.telemetry.Metrics.FPREFIX; +import static org.elasticsearch.xpack.esql.telemetry.Metrics.FUNC_PREFIX; public class VerifierMetricsTests extends ESTestCase { From 01edab58ff2941aa6b80f42a028d27b878313eee Mon Sep 17 00:00:00 2001 From: Pawan Kartik Date: Mon, 27 Jan 2025 18:49:13 +0000 Subject: [PATCH 085/383] Fix NPE caused by race condition in async search when minimise round trips is true (#117504) * Fix NPE caused by race condition in async search when minimise round trips is true Previously, the `notifyListShards()` initialised and updated the required pre-requisites (`searchResponse` being amongst them) when a search op began. This function takes in arguments that contain shard-specific details amongst others. Because this information is not immediately available when the search begins, it is not immediately called. In some specific cases, there can be a race condition that can cause the pre-requisities (such as `searchResponse`) to be accessed before they're initialised, causing an NPE. This fix addresses the race condition by splitting the initialisation and subsequent updation amongst 2 different methods. This way, the pre-requisities are always initialised and do not lead to an NPE. * Try: call `notifyListShards()` after `notifySearchStart()` when minimize round trips is true * Add removed code comment * Pass `Clusters` to `SearchTask` rather than using progress listener to signify search start. To prevent polluting the progress listener with unnecessary search specific details, we now pass the `Clusters` object to `SearchTask` when a search op begins. This lets `AsyncSearchTask` access it and use it to initialise `MutableSearchResponse` appropriately. * Use appropriate `clusters` object rather than re-building it * Do not double set `mutableSearchResponse` * Move mutable entities such as shard counts out of `MutableSearchResponse` * Address PR review: revert moving out mutable entities from `MutableSearchResponse` * Update docs/changelog/117504.yaml * Get rid of `SetOnce` for `searchResponse` * Drop redundant check around shards count * Add a test that calls `onListShards()` at last and clarify `updateShardsAndClusters()`'s comment * Fix test: ref count * Address review comment: rewrite comment and test --- docs/changelog/117504.yaml | 6 +++ .../xpack/search/AsyncSearchTask.java | 43 ++++++++----------- .../xpack/search/MutableSearchResponse.java | 34 +++++++++------ .../xpack/search/AsyncSearchTaskTests.java | 42 ++++++++++++++++++ 4 files changed, 87 insertions(+), 38 deletions(-) create mode 100644 docs/changelog/117504.yaml diff --git a/docs/changelog/117504.yaml b/docs/changelog/117504.yaml new file mode 100644 index 0000000000000..91a62c61b88f2 --- /dev/null +++ b/docs/changelog/117504.yaml @@ -0,0 +1,6 @@ +pr: 117504 +summary: Fix NPE caused by race condition in async search when minimise round trips + is true +area: Search +type: bug +issues: [] diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java index 5068ac69e462a..484683fc6ffdd 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.search; import org.apache.lucene.search.TotalHits; -import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; @@ -73,7 +72,7 @@ final class AsyncSearchTask extends SearchTask implements AsyncTask, Releasable private volatile long expirationTimeMillis; private final AtomicBoolean isCancelling = new AtomicBoolean(false); - private final SetOnce searchResponse = new SetOnce<>(); + private final MutableSearchResponse searchResponse; /** * Creates an instance of {@link AsyncSearchTask}. @@ -112,6 +111,7 @@ final class AsyncSearchTask extends SearchTask implements AsyncTask, Releasable this.aggReduceContextSupplier = aggReduceContextSupplierFactory.apply(this::isCancelled); this.progressListener = new Listener(); setProgressListener(progressListener); + searchResponse = new MutableSearchResponse(threadPool.getThreadContext()); } /** @@ -340,7 +340,7 @@ private AsyncSearchResponse getResponseWithHeaders() { } private AsyncSearchResponse getResponse(boolean restoreResponseHeaders) { - MutableSearchResponse mutableSearchResponse = searchResponse.get(); + MutableSearchResponse mutableSearchResponse = searchResponse; assert mutableSearchResponse != null; checkCancellation(); AsyncSearchResponse asyncSearchResponse; @@ -370,7 +370,7 @@ private synchronized void checkCancellation() { * Returns the status from {@link AsyncSearchTask} */ public static AsyncStatusResponse getStatusResponse(AsyncSearchTask asyncTask) { - MutableSearchResponse mutableSearchResponse = asyncTask.searchResponse.get(); + MutableSearchResponse mutableSearchResponse = asyncTask.searchResponse; assert mutableSearchResponse != null; return mutableSearchResponse.toStatusResponse( asyncTask.searchId.getEncoded(), @@ -381,7 +381,7 @@ public static AsyncStatusResponse getStatusResponse(AsyncSearchTask asyncTask) { @Override public void close() { - Releasables.close(searchResponse.get()); + Releasables.close(searchResponse); } class Listener extends SearchProgressActionListener { @@ -420,12 +420,11 @@ protected void onQueryFailure(int shardIndex, SearchShardTarget shardTarget, Exc if (delegate != null) { delegate.onQueryFailure(shardIndex, shardTarget, exc); } - searchResponse.get() - .addQueryFailure( - shardIndex, - // the nodeId is null if all replicas of this shard failed - new ShardSearchFailure(exc, shardTarget.getNodeId() != null ? shardTarget : null) - ); + searchResponse.addQueryFailure( + shardIndex, + // the nodeId is null if all replicas of this shard failed + new ShardSearchFailure(exc, shardTarget.getNodeId() != null ? shardTarget : null) + ); } @Override @@ -467,9 +466,7 @@ protected void onListShards( delegate = new CCSSingleCoordinatorSearchProgressListener(); delegate.onListShards(shards, skipped, clusters, fetchPhase, timeProvider); } - searchResponse.set( - new MutableSearchResponse(shards.size() + skipped.size(), skipped.size(), clusters, threadPool.getThreadContext()) - ); + searchResponse.updateShardsAndClusters(shards.size() + skipped.size(), skipped.size(), clusters); executeInitListeners(); } @@ -496,7 +493,7 @@ public void onPartialReduce(List shards, TotalHits totalHits, Inter */ reducedAggs = () -> InternalAggregations.topLevelReduce(singletonList(aggregations), aggReduceContextSupplier.get()); } - searchResponse.get().updatePartialResponse(shards.size(), totalHits, reducedAggs, reducePhase); + searchResponse.updatePartialResponse(shards.size(), totalHits, reducedAggs, reducePhase); } /** @@ -510,7 +507,7 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna if (delegate != null) { delegate.onFinalReduce(shards, totalHits, aggregations, reducePhase); } - searchResponse.get().updatePartialResponse(shards.size(), totalHits, () -> aggregations, reducePhase); + searchResponse.updatePartialResponse(shards.size(), totalHits, () -> aggregations, reducePhase); } /** @@ -523,24 +520,20 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna @Override public void onClusterResponseMinimizeRoundtrips(String clusterAlias, SearchResponse clusterResponse) { // no need to call the delegate progress listener, since this method is only called for minimize_roundtrips=true - searchResponse.get().updateResponseMinimizeRoundtrips(clusterAlias, clusterResponse); + searchResponse.updateResponseMinimizeRoundtrips(clusterAlias, clusterResponse); } @Override public void onResponse(SearchResponse response) { - searchResponse.get().updateFinalResponse(response, ccsMinimizeRoundtrips); + searchResponse.updateFinalResponse(response, ccsMinimizeRoundtrips); executeCompletionListeners(); } @Override public void onFailure(Exception exc) { - // if the failure occurred before calling onListShards - var r = new MutableSearchResponse(-1, -1, null, threadPool.getThreadContext()); - if (searchResponse.trySet(r) == false) { - r.close(); - } - searchResponse.get() - .updateWithFailure(new ElasticsearchStatusException("error while executing search", ExceptionsHelper.status(exc), exc)); + searchResponse.updateWithFailure( + new ElasticsearchStatusException("error while executing search", ExceptionsHelper.status(exc), exc) + ); executeInitListeners(); executeCompletionListeners(); } diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java index b046b5ca46e83..11ff403237888 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java @@ -39,10 +39,10 @@ * run concurrently to 1 and ensures that we pause the search progress when an {@link AsyncSearchResponse} is built. */ class MutableSearchResponse implements Releasable { - private final int totalShards; - private final int skippedShards; - private final Clusters clusters; - private final AtomicArray queryFailures; + private int totalShards; + private int skippedShards; + private Clusters clusters; + private AtomicArray queryFailures; private final ThreadContext threadContext; private boolean isPartial; @@ -82,23 +82,31 @@ class MutableSearchResponse implements Releasable { /** * Creates a new mutable search response. * - * @param totalShards The number of shards that participate in the request, or -1 to indicate a failure. - * @param skippedShards The number of skipped shards, or -1 to indicate a failure. - * @param clusters The remote clusters statistics. * @param threadContext The thread context to retrieve the final response headers. */ - MutableSearchResponse(int totalShards, int skippedShards, Clusters clusters, ThreadContext threadContext) { - this.totalShards = totalShards; - this.skippedShards = skippedShards; - - this.clusters = clusters; - this.queryFailures = totalShards == -1 ? null : new AtomicArray<>(totalShards - skippedShards); + MutableSearchResponse(ThreadContext threadContext) { this.isPartial = true; this.threadContext = threadContext; this.totalHits = Lucene.TOTAL_HITS_GREATER_OR_EQUAL_TO_ZERO; this.localClusterComplete = false; } + /** + * Updates the response with the number of total and skipped shards. + * + * @param totalShards The number of shards that participate in the request. + * @param skippedShards The number of shards skipped. + *

+ * Shards in this context depend on the value of minimize round trips (MRT): + * They are the shards being searched by this coordinator (local only for MRT=true, local + remote otherwise). + */ + synchronized void updateShardsAndClusters(int totalShards, int skippedShards, Clusters clusters) { + this.totalShards = totalShards; + this.skippedShards = skippedShards; + this.queryFailures = new AtomicArray<>(totalShards - skippedShards); + this.clusters = clusters; + } + /** * Updates the response with the result of a partial reduction. * diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java index 6083436bd09d3..cf08cdbf09367 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.core.async.AsyncExecutionId; import org.elasticsearch.xpack.core.search.action.AsyncSearchResponse; import org.junit.After; @@ -424,6 +425,47 @@ public void onFailure(Exception e) { assertThat(failure.get(), instanceOf(RuntimeException.class)); } + public void testDelayedOnListShardsShouldNotResultInExceptions() throws InterruptedException { + try (AsyncSearchTask task = createAsyncSearchTask()) { + int numShards = randomIntBetween(0, 10); + List shards = new ArrayList<>(); + + // All local shards. + for (int i = 0; i < numShards; i++) { + shards.add(new SearchShard(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, new ShardId("0", "0", 1))); + } + + int numSkippedShards = randomIntBetween(0, 10); + List skippedShards = new ArrayList<>(); + for (int i = 0; i < numSkippedShards; i++) { + skippedShards.add(new SearchShard(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, new ShardId("0", "0", 1))); + } + + int totalShards = numShards + numSkippedShards; + for (int i = 0; i < numShards; i++) { + task.getSearchProgressActionListener() + .onPartialReduce(shards.subList(i, i + 1), new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), null, 0); + } + + task.getSearchProgressActionListener() + .onFinalReduce(shards, new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), null, 0); + + SearchResponse searchResponse = newSearchResponse(totalShards, totalShards, numSkippedShards); + task.getSearchProgressActionListener() + .onClusterResponseMinimizeRoundtrips(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, searchResponse); + + /** + * We're calling onListShards() at last. Previously, this delay would have resulted in an NPE for other `onABC()` methods. + * Now, we should not see any Exceptions or errors (be it NPE or anything else). + */ + task.getSearchProgressActionListener() + .onListShards(shards, skippedShards, SearchResponse.Clusters.EMPTY, false, createTimeProvider()); + + ActionListener.respondAndRelease((AsyncSearchTask.Listener) task.getProgressListener(), searchResponse); + assertCompletionListeners(task, totalShards, totalShards, numSkippedShards, 0, false, false); + } + } + private static SearchResponse newSearchResponse( int totalShards, int successfulShards, From 9bc9ba788bd66c009bcb9dd024bfc9e9211d27ed Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Mon, 27 Jan 2025 14:32:46 -0500 Subject: [PATCH 086/383] Add a replicate_for option to the ILM searchable_snapshot action (#119003) --- docs/changelog/119003.yaml | 5 + .../actions/ilm-searchable-snapshot.asciidoc | 48 ++++++ .../org/elasticsearch/TransportVersions.java | 1 + .../core/ilm/SearchableSnapshotAction.java | 75 ++++++++- .../core/ilm/TimeseriesLifecycleType.java | 102 ++++++++++++ .../WaitUntilReplicateForTimePassesStep.java | 131 +++++++++++++++ .../xpack/core/ilm/LifecyclePolicyTests.java | 3 +- .../ilm/SearchableSnapshotActionTests.java | 32 +++- .../ilm/TimeseriesLifecycleTypeTests.java | 78 +++++++++ ...tUntilReplicateForTimePassesStepTests.java | 156 ++++++++++++++++++ .../actions/SearchableSnapshotActionIT.java | 102 +++++++++++- 11 files changed, 714 insertions(+), 19 deletions(-) create mode 100644 docs/changelog/119003.yaml create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitUntilReplicateForTimePassesStep.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitUntilReplicateForTimePassesStepTests.java diff --git a/docs/changelog/119003.yaml b/docs/changelog/119003.yaml new file mode 100644 index 0000000000000..d7edcf53a8129 --- /dev/null +++ b/docs/changelog/119003.yaml @@ -0,0 +1,5 @@ +pr: 119003 +summary: Add a `replicate_for` option to the ILM `searchable_snapshot` action +area: ILM+SLM +type: enhancement +issues: [] diff --git a/docs/reference/ilm/actions/ilm-searchable-snapshot.asciidoc b/docs/reference/ilm/actions/ilm-searchable-snapshot.asciidoc index 73a77bef09bde..a4b7fb993adea 100644 --- a/docs/reference/ilm/actions/ilm-searchable-snapshot.asciidoc +++ b/docs/reference/ilm/actions/ilm-searchable-snapshot.asciidoc @@ -55,6 +55,13 @@ snapshot retention runs off the index lifecycle management (ILM) policies and is (Required, string) <> used to store the snapshot. +`replicate_for`:: +(Optional, TimeValue) +By default, searchable snapshot indices are mounted without replicas. Using this will +result in a searchable snapshot index being mounted with a single replica for the time period +specified, after which the replica will be removed. This option is only permitted on the +first searchable snapshot action of a policy. + `force_merge_index`:: (Optional, Boolean) Force merges the managed index to one segment. @@ -109,3 +116,44 @@ PUT _ilm/policy/my_policy } } -------------------------------------------------- + +[[ilm-searchable-snapshot-replicate-for-ex]] +===== Mount a searchable snapshot with replicas for fourteen days + +This policy mounts a searchable snapshot in the hot phase with a +single replica and maintains that replica for fourteen days. After +that time has elapsed, the searchable snapshot index will remain (with +no replicas) for another fourteen days, at which point it will proceed +into the delete phase and will be deleted. + +[source,console] +-------------------------------------------------- +PUT _ilm/policy/my_policy +{ + "policy": { + "phases": { + "hot": { + "actions": { + "rollover" : { + "max_primary_shard_size": "50gb" + }, + "searchable_snapshot" : { + "snapshot_repository" : "backing_repo", + "replicate_for": "14d" + } + } + }, + "delete": { + "min_age": "28d", + "actions": { + "delete" : { } + } + } + } + } +} +-------------------------------------------------- + +[NOTE] +If the `replicate_for` option is specified, its value must be +less than the minimum age of the next phase in the policy. diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 8cbdb908f38e5..77aac3b2de96e 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -165,6 +165,7 @@ static TransportVersion def(int id) { public static final TransportVersion ADD_INCLUDE_FAILURE_INDICES_OPTION = def(8_831_00_0); public static final TransportVersion ESQL_RESPONSE_PARTIAL = def(8_832_00_0); public static final TransportVersion RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN = def(8_833_00_0); + public static final TransportVersion ILM_ADD_SEARCHABLE_SNAPSHOT_ADD_REPLICATE_FOR = def(8_834_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java index b746ee8ea7c07..c0f717abaac36 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java @@ -17,10 +17,13 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -33,6 +36,7 @@ import java.util.List; import java.util.Objects; +import static org.elasticsearch.TransportVersions.ILM_ADD_SEARCHABLE_SNAPSHOT_ADD_REPLICATE_FOR; import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOTS_REPOSITORY_NAME_SETTING_KEY; import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOTS_SNAPSHOT_NAME_SETTING_KEY; import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_PARTIAL_SETTING_KEY; @@ -51,6 +55,7 @@ public class SearchableSnapshotAction implements LifecycleAction { public static final ParseField SNAPSHOT_REPOSITORY = new ParseField("snapshot_repository"); public static final ParseField FORCE_MERGE_INDEX = new ParseField("force_merge_index"); public static final ParseField TOTAL_SHARDS_PER_NODE = new ParseField("total_shards_per_node"); + public static final ParseField REPLICATE_FOR = new ParseField("replicate_for"); public static final String CONDITIONAL_DATASTREAM_CHECK_KEY = BranchingStep.NAME + "-on-datastream-check"; public static final String CONDITIONAL_SKIP_ACTION_STEP = BranchingStep.NAME + "-check-prerequisites"; public static final String CONDITIONAL_SKIP_GENERATE_AND_CLEAN = BranchingStep.NAME + "-check-existing-snapshot"; @@ -60,13 +65,19 @@ public class SearchableSnapshotAction implements LifecycleAction { private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( NAME, - a -> new SearchableSnapshotAction((String) a[0], a[1] == null || (boolean) a[1], (Integer) a[2]) + a -> new SearchableSnapshotAction((String) a[0], a[1] == null || (boolean) a[1], (Integer) a[2], (TimeValue) a[3]) ); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), SNAPSHOT_REPOSITORY); PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), FORCE_MERGE_INDEX); PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), TOTAL_SHARDS_PER_NODE); + PARSER.declareField( + ConstructingObjectParser.optionalConstructorArg(), + p -> TimeValue.parseTimeValue(p.textOrNull(), REPLICATE_FOR.getPreferredName()), + REPLICATE_FOR, + ObjectParser.ValueType.STRING + ); } public static SearchableSnapshotAction parse(XContentParser parser) { @@ -77,8 +88,15 @@ public static SearchableSnapshotAction parse(XContentParser parser) { private final boolean forceMergeIndex; @Nullable private final Integer totalShardsPerNode; - - public SearchableSnapshotAction(String snapshotRepository, boolean forceMergeIndex, @Nullable Integer totalShardsPerNode) { + @Nullable + private final TimeValue replicateFor; + + public SearchableSnapshotAction( + String snapshotRepository, + boolean forceMergeIndex, + @Nullable Integer totalShardsPerNode, + @Nullable TimeValue replicateFor + ) { if (Strings.hasText(snapshotRepository) == false) { throw new IllegalArgumentException("the snapshot repository must be specified"); } @@ -89,20 +107,30 @@ public SearchableSnapshotAction(String snapshotRepository, boolean forceMergeInd throw new IllegalArgumentException("[" + TOTAL_SHARDS_PER_NODE.getPreferredName() + "] must be >= 1"); } this.totalShardsPerNode = totalShardsPerNode; + + if (replicateFor != null && replicateFor.millis() <= 0) { + throw new IllegalArgumentException( + "[" + REPLICATE_FOR.getPreferredName() + "] must be positive [" + replicateFor.getStringRep() + "]" + ); + } + this.replicateFor = replicateFor; } public SearchableSnapshotAction(String snapshotRepository, boolean forceMergeIndex) { - this(snapshotRepository, forceMergeIndex, null); + this(snapshotRepository, forceMergeIndex, null, null); } public SearchableSnapshotAction(String snapshotRepository) { - this(snapshotRepository, true, null); + this(snapshotRepository, true, null, null); } public SearchableSnapshotAction(StreamInput in) throws IOException { this.snapshotRepository = in.readString(); this.forceMergeIndex = in.readBoolean(); this.totalShardsPerNode = in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readOptionalInt() : null; + this.replicateFor = in.getTransportVersion().onOrAfter(ILM_ADD_SEARCHABLE_SNAPSHOT_ADD_REPLICATE_FOR) + ? in.readOptionalTimeValue() + : null; } boolean isForceMergeIndex() { @@ -118,6 +146,11 @@ public Integer getTotalShardsPerNode() { return totalShardsPerNode; } + @Nullable + public TimeValue getReplicateFor() { + return replicateFor; + } + @Override public List toSteps(Client client, String phase, StepKey nextStepKey) { assert false; @@ -145,6 +178,8 @@ public List toSteps(Client client, String phase, StepKey nextStepKey, XPac StepKey swapAliasesKey = new StepKey(phase, NAME, SwapAliasesAndDeleteSourceIndexStep.NAME); StepKey replaceDataStreamIndexKey = new StepKey(phase, NAME, ReplaceDataStreamBackingIndexStep.NAME); StepKey deleteIndexKey = new StepKey(phase, NAME, DeleteStep.NAME); + StepKey replicateForKey = new StepKey(phase, NAME, WaitUntilReplicateForTimePassesStep.NAME); + StepKey dropReplicasKey = new StepKey(phase, NAME, UpdateSettingsStep.NAME); // Before going through all these steps, first check if we need to do them at all. For example, the index could already be // a searchable snapshot of the same type and repository, in which case we don't need to do anything. If that is detected, @@ -319,7 +354,7 @@ public List toSteps(Client client, String phase, StepKey nextStepKey, XPac getRestoredIndexPrefix(mountSnapshotKey), storageType, totalShardsPerNode, - 0 + replicateFor != null ? 1 : 0 // if the 'replicate_for' option is set, then have a replica, otherwise don't ); WaitForIndexColorStep waitForGreenIndexHealthStep = new WaitForIndexColorStep( waitForGreenRestoredIndexKey, @@ -327,11 +362,12 @@ public List toSteps(Client client, String phase, StepKey nextStepKey, XPac ClusterHealthStatus.GREEN, getRestoredIndexPrefix(waitForGreenRestoredIndexKey) ); + StepKey keyForReplicateForOrContinue = replicateFor != null ? replicateForKey : nextStepKey; CopyExecutionStateStep copyMetadataStep = new CopyExecutionStateStep( copyMetadataKey, copyLifecyclePolicySettingKey, (index, executionState) -> getRestoredIndexPrefix(copyMetadataKey) + index, - nextStepKey + keyForReplicateForOrContinue ); CopySettingsStep copySettingsStep = new CopySettingsStep( copyLifecyclePolicySettingKey, @@ -364,6 +400,16 @@ public List toSteps(Client client, String phase, StepKey nextStepKey, XPac getRestoredIndexPrefix(swapAliasesKey) ); + // note that the replicateForStep and dropReplicasStep will only be used if replicateFor != null, see the construction of + // the list of steps below + Step replicateForStep = new WaitUntilReplicateForTimePassesStep(replicateForKey, dropReplicasKey, replicateFor); + UpdateSettingsStep dropReplicasStep = new UpdateSettingsStep( + dropReplicasKey, + nextStepKey, + client, + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() + ); + List steps = new ArrayList<>(); steps.add(conditionalSkipActionStep); steps.add(checkNoWriteIndexStep); @@ -382,6 +428,10 @@ public List toSteps(Client client, String phase, StepKey nextStepKey, XPac steps.add(waitForGreenIndexHealthStep); steps.add(copyMetadataStep); steps.add(copySettingsStep); + if (replicateFor != null) { + steps.add(replicateForStep); + steps.add(dropReplicasStep); + } steps.add(isDataStreamBranchingStep); steps.add(replaceDataStreamBackingIndex); steps.add(deleteSourceIndexStep); @@ -426,6 +476,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalInt(totalShardsPerNode); } + if (out.getTransportVersion().onOrAfter(ILM_ADD_SEARCHABLE_SNAPSHOT_ADD_REPLICATE_FOR)) { + out.writeOptionalTimeValue(replicateFor); + } } @Override @@ -436,6 +489,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (totalShardsPerNode != null) { builder.field(TOTAL_SHARDS_PER_NODE.getPreferredName(), totalShardsPerNode); } + if (replicateFor != null) { + builder.field(REPLICATE_FOR.getPreferredName(), replicateFor); + } builder.endObject(); return builder; } @@ -451,12 +507,13 @@ public boolean equals(Object o) { SearchableSnapshotAction that = (SearchableSnapshotAction) o; return Objects.equals(snapshotRepository, that.snapshotRepository) && Objects.equals(forceMergeIndex, that.forceMergeIndex) - && Objects.equals(totalShardsPerNode, that.totalShardsPerNode); + && Objects.equals(totalShardsPerNode, that.totalShardsPerNode) + && Objects.equals(replicateFor, that.replicateFor); } @Override public int hashCode() { - return Objects.hash(snapshotRepository, forceMergeIndex, totalShardsPerNode); + return Objects.hash(snapshotRepository, forceMergeIndex, totalShardsPerNode, replicateFor); } @Nullable diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java index 10a4c7086a0cc..39bd7ad39339a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java @@ -246,6 +246,7 @@ && definesAllocationRules((AllocateAction) phase.getActions().get(AllocateAction validateAllSearchableSnapshotActionsUseSameRepository(phases); validateFrozenPhaseHasSearchableSnapshotAction(phases); validateDownsamplingIntervals(phases); + validateReplicateFor(phases); } static void validateActionsFollowingSearchableSnapshot(Collection phases) { @@ -473,6 +474,107 @@ static void validateDownsamplingIntervals(Collection phases) { } } + /** + * Performs two validations of the 'replicate_for' attribute on searchable_snapshot actions: + * - If 'replicate_for' is present on a searchable_snapshot action, then it is the *first* searchable_snapshot action + * in phase order + * - If 'replicate_for' is present on a searchable_snapshot action, then for any subsequent phases that have an explicit 'min_age' + * the min_age must be greater than or equal to the 'replicate_for' time itself + */ + static void validateReplicateFor(Collection phases) { + final Map phasesWithSearchableSnapshotActions = phases.stream() + .filter(phase -> phase.getActions().containsKey(SearchableSnapshotAction.NAME)) + .collect(Collectors.toMap(Phase::getName, Function.identity())); + + // if there are no phases with searchable_snapshot actions, then none of the rest of this logic applies + if (phasesWithSearchableSnapshotActions.isEmpty()) { + return; + } + + // Order phases and extract the searchable_snapshot action instances per phase + final List orderedPhases = INSTANCE.getOrderedPhases(phasesWithSearchableSnapshotActions); + final var searchableSnapshotActions = orderedPhases.stream() + .map(phase -> Tuple.tuple(phase.getName(), (SearchableSnapshotAction) phase.getActions().get(SearchableSnapshotAction.NAME))) + .toList(); // Returns a list of tuples (phase name, searchable_snapshot action) + + // first validation rule: if there's more than one searchable_snapshot action, then we confirm that 'replicate_for' isn't present + // except possibly on the first searchable_snapshot action (n.b. this doesn't actually check the first action, since the value + // doesn't actually matter) + if (searchableSnapshotActions.size() > 1) { + for (int i = 1; i < searchableSnapshotActions.size(); i++) { // iterating from the second phase/action tuple + final var phaseAndAction = searchableSnapshotActions.get(i); + final String phase = phaseAndAction.v1(); + final boolean hasReplicateFor = phaseAndAction.v2().getReplicateFor() != null; + if (hasReplicateFor) { + throw new IllegalArgumentException( + Strings.format( + "only the first searchable_snapshot action in a policy may specify 'replicate_for', " + + "but it was specified in the [%s] phase", + phase + ) + ); + } + } + } + + final var firstSearchableSnapshotPhase = searchableSnapshotActions.getFirst().v1(); + final var firstSearchableSnapshotAction = searchableSnapshotActions.getFirst().v2(); + // second validation rule: if the first searchable_snapshot action has a 'replicate_for', then the replication time + // must be less than the next explicit min_age (if there is a min_age) + final TimeValue firstReplicateFor = firstSearchableSnapshotAction.getReplicateFor(); + if (firstReplicateFor != null) { + final Map allPhases = phases.stream().collect(Collectors.toMap(Phase::getName, Function.identity())); + final List allPhasesInOrder = INSTANCE.getOrderedPhases(allPhases); + + // find the 'implied min_age' of the phase that contains the searchable_snapshot action with a replicate_for, + // it's the latest non-zero min_age of the phases up to and including the phase in question (reminder that min_age values + // are either absent/zero or increasing) + TimeValue impliedMinAge = TimeValue.ZERO; + for (Phase phase : allPhasesInOrder) { + // if there's a rollover (in the hot phase) then the hot phase is implicitly a 'zero', since we calculate subsequent + // phases from the time of *rollover* + final var phaseMinAge = phase.getActions().containsKey(RolloverAction.NAME) ? TimeValue.ZERO : phase.getMinimumAge(); + + // TimeValue.ZERO is the null value for minimumAge in Phase + if (phaseMinAge != TimeValue.ZERO) { + impliedMinAge = phaseMinAge; + } + // loop until we find the phase that has the searchable_snapshot action with a replicate_for + if (phase.getName().equals(firstSearchableSnapshotPhase)) { + break; + } + } + + boolean afterReplicatorFor = false; + for (Phase phase : allPhasesInOrder) { + // loop until we find the phase after the one that has a searchable_snapshot with replicate_for + if (phase.getName().equals(firstSearchableSnapshotPhase)) { + afterReplicatorFor = true; + continue; // because we don't want to check the min_age on *this* phase, but on the next ones + } + // check the min_age requirement for all phases after the one that has the replicate_for set + if (afterReplicatorFor) { + final var phaseMinAge = phase.getMinimumAge(); + // TimeValue.ZERO is the null value for minimumAge in Phase + final long minAgeDeltaMillis = phaseMinAge.millis() - impliedMinAge.millis(); + if (phaseMinAge != TimeValue.ZERO && minAgeDeltaMillis < firstReplicateFor.millis()) { + throw new IllegalArgumentException( + Strings.format( + "The time a searchable snapshot is replicated in replicate_for [%s] may not exceed the time until the " + + "next phase is configured to begin. Based on the min_age [%s] of the [%s] phase, the maximum time " + + "the snapshot can be replicated is [%s].", + firstReplicateFor, + phaseMinAge, + phase.getName(), + TimeValue.timeValueMillis(minAgeDeltaMillis).toString() + ) + ); + } + } + } + } + } + private static boolean definesAllocationRules(AllocateAction action) { return action.getRequire().isEmpty() == false || action.getInclude().isEmpty() == false || action.getExclude().isEmpty() == false; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitUntilReplicateForTimePassesStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitUntilReplicateForTimePassesStep.java new file mode 100644 index 0000000000000..05081075edfb3 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitUntilReplicateForTimePassesStep.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ilm; + +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.LifecycleExecutionState; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.xpack.core.ilm.step.info.EmptyInfo; +import org.elasticsearch.xpack.core.ilm.step.info.SingleMessageFieldInfo; + +import java.time.Instant; +import java.util.Objects; +import java.util.function.Supplier; + +/** + * This {@link Step} waits until the `replicate_for` time of a searchable_snapshot action to pass. + *

+ * It's an {@link AsyncWaitStep} rather than a {@link ClusterStateWaitStep} because we aren't guaranteed to + * receive a new cluster state in timely fashion when the waiting finishes -- by extending {@link AsyncWaitStep} + * we are guaranteed to check the condition on each ILM execution. + */ +public class WaitUntilReplicateForTimePassesStep extends AsyncWaitStep { + + public static final String NAME = "check-replicate-for-time-passed"; + + private final TimeValue replicateFor; + private final Supplier nowSupplier; + + WaitUntilReplicateForTimePassesStep(StepKey key, StepKey nextStepKey, TimeValue replicateFor, Supplier nowSupplier) { + super(key, nextStepKey, null); + this.replicateFor = replicateFor; + this.nowSupplier = nowSupplier; + } + + WaitUntilReplicateForTimePassesStep(StepKey key, StepKey nextStepKey, TimeValue replicateFor) { + this(key, nextStepKey, replicateFor, Instant::now); + } + + public TimeValue getReplicateFor() { + return this.replicateFor; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), this.replicateFor); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (obj.getClass() != getClass()) { + return false; + } + WaitUntilReplicateForTimePassesStep other = (WaitUntilReplicateForTimePassesStep) obj; + return super.equals(obj) && Objects.equals(this.replicateFor, other.replicateFor); + } + + @Override + public void evaluateCondition(Metadata metadata, Index index, Listener listener, TimeValue masterTimeout) { + IndexMetadata indexMetadata = metadata.index(index); + assert indexMetadata != null + : "the index metadata for index [" + index.getName() + "] must exist in the cluster state for step [" + NAME + "]"; + + final LifecycleExecutionState executionState = metadata.index(index.getName()).getLifecycleExecutionState(); + assert executionState != null + : "the lifecycle execution state for index [" + index.getName() + "] must exist in the cluster state for step [" + NAME + "]"; + + if (replicateFor == null) { + // assert at dev-time, but treat this as a no-op at runtime if somehow this should happen (which it shouldn't) + assert false : "the replicate_for time value for index [" + index.getName() + "] must not be null for step [" + NAME + "]"; + listener.onResponse(true, EmptyInfo.INSTANCE); + return; + } + + final Instant endTime = Instant.ofEpochMilli(executionState.phaseTime() + this.replicateFor.millis()); + final Instant nowTime = nowSupplier.get(); + if (nowTime.isBefore(endTime)) { + final TimeValue remaining = TimeValue.timeValueMillis(endTime.toEpochMilli() - nowTime.toEpochMilli()); + listener.onResponse( + false, + new SingleMessageFieldInfo( + Strings.format( + "Waiting [%s] until the replicate_for time [%s] has elapsed for index [%s] before removing replicas.", + // note: we're sacrificing specificity for stability of string representation. if this string stays the same then + // there isn't a cluster state change to update the string (since it is lazy) -- and we'd rather avoid unnecessary + // cluster state changes. this approach gives us one cluster state change per day, which seems like a reasonable + // balance between precision and efficiency. + approximateTimeRemaining(remaining), + this.replicateFor, + index.getName() + ) + ) + ); + return; + } + + listener.onResponse(true, EmptyInfo.INSTANCE); + } + + private static final TimeValue TWENTY_FOUR_HOURS = TimeValue.timeValueHours(24); + + /** + * Turns a {@link TimeValue} into a very approximate time value String. + * + * @param remaining the time remaining + * @return a String representing the approximate time remaining in days (e.g. "approximately 2d" OR "less than 1d") + */ + // visible for testing + static String approximateTimeRemaining(TimeValue remaining) { + if (remaining.compareTo(TWENTY_FOUR_HOURS) >= 0) { + return "approximately " + Math.round(remaining.daysFrac()) + "d"; + } else { + return "less than 1d"; + } + } + + @Override + public boolean isRetryable() { + return true; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java index 4d34115919710..fd41f17c7c760 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java @@ -221,7 +221,8 @@ public static LifecyclePolicy randomTimeseriesLifecyclePolicy(@Nullable String l new SearchableSnapshotAction( randomAlphaOfLength(10), randomBoolean(), - (randomBoolean() ? null : randomIntBetween(1, 100)) + (randomBoolean() ? null : randomIntBetween(1, 100)), + null ) ) ) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotActionTests.java index 5304b7885f96c..c6119b272a8c0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotActionTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ilm.Step.StepKey; import org.elasticsearch.xpack.core.searchablesnapshots.MountSearchableSnapshotRequest; @@ -32,7 +33,8 @@ public void testToSteps() { List steps = action.toSteps(null, phase, nextStepKey, null); - List expectedSteps = expectedStepKeys(phase, action.isForceMergeIndex()); + List expectedSteps = expectedStepKeys(phase, action.isForceMergeIndex(), action.getReplicateFor() != null); + assertThat(steps.size(), is(expectedSteps.size())); for (int i = 0; i < expectedSteps.size(); i++) { assertThat("steps match expectation at index " + i, steps.get(i).getKey(), is(expectedSteps.get(i))); @@ -88,12 +90,12 @@ public void testCreateWithInvalidTotalShardsPerNode() { IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> new SearchableSnapshotAction("test", true, invalidTotalShardsPerNode) + () -> new SearchableSnapshotAction("test", true, invalidTotalShardsPerNode, null) ); assertEquals("[" + TOTAL_SHARDS_PER_NODE.getPreferredName() + "] must be >= 1", exception.getMessage()); } - private List expectedStepKeys(String phase, boolean forceMergeIndex) { + private List expectedStepKeys(String phase, boolean forceMergeIndex, boolean hasReplicateFor) { return Stream.of( new StepKey(phase, NAME, SearchableSnapshotAction.CONDITIONAL_SKIP_ACTION_STEP), new StepKey(phase, NAME, CheckNotDataStreamWriteIndexStep.NAME), @@ -110,6 +112,8 @@ private List expectedStepKeys(String phase, boolean forceMergeIndex) { new StepKey(phase, NAME, WaitForIndexColorStep.NAME), new StepKey(phase, NAME, CopyExecutionStateStep.NAME), new StepKey(phase, NAME, CopySettingsStep.NAME), + hasReplicateFor ? new StepKey(phase, NAME, WaitUntilReplicateForTimePassesStep.NAME) : null, + hasReplicateFor ? new StepKey(phase, NAME, UpdateSettingsStep.NAME) : null, new StepKey(phase, NAME, SearchableSnapshotAction.CONDITIONAL_DATASTREAM_CHECK_KEY), new StepKey(phase, NAME, ReplaceDataStreamBackingIndexStep.NAME), new StepKey(phase, NAME, DeleteStep.NAME), @@ -134,21 +138,32 @@ protected Writeable.Reader instanceReader() { @Override protected SearchableSnapshotAction mutateInstance(SearchableSnapshotAction instance) { - return switch (randomIntBetween(0, 2)) { + return switch (randomIntBetween(0, 3)) { case 0 -> new SearchableSnapshotAction( randomAlphaOfLengthBetween(5, 10), instance.isForceMergeIndex(), - instance.getTotalShardsPerNode() + instance.getTotalShardsPerNode(), + instance.getReplicateFor() ); case 1 -> new SearchableSnapshotAction( instance.getSnapshotRepository(), instance.isForceMergeIndex() == false, - instance.getTotalShardsPerNode() + instance.getTotalShardsPerNode(), + instance.getReplicateFor() ); case 2 -> new SearchableSnapshotAction( instance.getSnapshotRepository(), instance.isForceMergeIndex(), - instance.getTotalShardsPerNode() == null ? 1 : instance.getTotalShardsPerNode() + randomIntBetween(1, 100) + instance.getTotalShardsPerNode() == null ? 1 : instance.getTotalShardsPerNode() + randomIntBetween(1, 100), + instance.getReplicateFor() + ); + case 3 -> new SearchableSnapshotAction( + instance.getSnapshotRepository(), + instance.isForceMergeIndex(), + instance.getTotalShardsPerNode(), + instance.getReplicateFor() == null + ? TimeValue.timeValueDays(1) + : TimeValue.timeValueDays(instance.getReplicateFor().getDays() + randomIntBetween(1, 10)) ); default -> throw new IllegalArgumentException("Invalid mutation branch"); }; @@ -158,7 +173,8 @@ static SearchableSnapshotAction randomInstance() { return new SearchableSnapshotAction( randomAlphaOfLengthBetween(5, 10), randomBoolean(), - (randomBoolean() ? null : randomIntBetween(1, 100)) + (randomBoolean() ? null : randomIntBetween(1, 100)), + (randomBoolean() ? null : TimeValue.timeValueDays(randomIntBetween(1, 10))) ); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java index f7d1ff5294f58..57bddad8479bf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java @@ -41,6 +41,7 @@ import static org.elasticsearch.xpack.core.ilm.TimeseriesLifecycleType.validateAllSearchableSnapshotActionsUseSameRepository; import static org.elasticsearch.xpack.core.ilm.TimeseriesLifecycleType.validateFrozenPhaseHasSearchableSnapshotAction; import static org.elasticsearch.xpack.core.ilm.TimeseriesLifecycleType.validateMonotonicallyIncreasingPhaseTimings; +import static org.elasticsearch.xpack.core.ilm.TimeseriesLifecycleType.validateReplicateFor; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -801,6 +802,83 @@ public void testValidateFrozenPhaseHasSearchableSnapshot() { } } + public void testValidateReplicateFor() { + IllegalArgumentException e; + + // a searchable_snapshot action with replicate_for set to 10d + final var searchableSnapshotAction = new SearchableSnapshotAction( + "repo", + randomBoolean(), + randomBoolean() ? null : randomIntBetween(1, 100), // the ESTestCase utility can produce zeroes, which we can't have here + TimeValue.timeValueDays(10) + ); + + // first test case: there's a replicate_for, but it isn't on the first searchable_snapshot action + e = expectThrows( + IllegalArgumentException.class, + () -> validateReplicateFor( + List.of( + new Phase( + HOT_PHASE, + TimeValue.ZERO, + Map.of(RolloverAction.NAME, TEST_ROLLOVER_ACTION, SearchableSnapshotAction.NAME, searchableSnapshotAction) + ), + new Phase(COLD_PHASE, TimeValue.ZERO, Map.of(SearchableSnapshotAction.NAME, searchableSnapshotAction)) + ) + ) + ); + assertThat( + e.getMessage(), + is( + "only the first searchable_snapshot action in a policy may specify 'replicate_for', " + + "but it was specified in the [cold] phase" + ) + ); + + // second test case: there's a replicate_for, but the next phase has a shorter min_age + e = expectThrows( + IllegalArgumentException.class, + () -> validateReplicateFor( + List.of( + new Phase( + HOT_PHASE, + TimeValue.ZERO, + Map.of(RolloverAction.NAME, TEST_ROLLOVER_ACTION, SearchableSnapshotAction.NAME, searchableSnapshotAction) + ), + new Phase(WARM_PHASE, TimeValue.timeValueDays(5), Map.of(TEST_MIGRATE_ACTION.getWriteableName(), MigrateAction.ENABLED)) + ) + ) + ); + assertThat( + e.getMessage(), + is( + "The time a searchable snapshot is replicated in replicate_for [10d] may not exceed the time " + + "until the next phase is configured to begin. Based on the min_age [5d] of the [warm] phase, " + + "the maximum time the snapshot can be replicated is [5d]." + ) + ); + + // third test case: there's a replicate_for, but the implied min_age difference isn't sufficient + e = expectThrows( + IllegalArgumentException.class, + () -> validateReplicateFor( + List.of( + new Phase(HOT_PHASE, TimeValue.ZERO, Map.of(RolloverAction.NAME, TEST_ROLLOVER_ACTION)), + new Phase(COLD_PHASE, TimeValue.timeValueDays(5), Map.of(SearchableSnapshotAction.NAME, searchableSnapshotAction)), + new Phase(DELETE_PHASE, TimeValue.timeValueDays(12), Map.of()) + ) + ) + ); + assertThat( + e.getMessage(), + is( + "The time a searchable snapshot is replicated in replicate_for [10d] may not exceed the time " + + "until the next phase is configured to begin. Based on the min_age [12d] of the [delete] phase, " + + "the maximum time the snapshot can be replicated is [7d]." + ) + ); + } + /** * checks whether an ordered list of objects (usually Phase and LifecycleAction) are found in the same * order as the ordered VALID_PHASES/VALID_HOT_ACTIONS/... lists diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitUntilReplicateForTimePassesStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitUntilReplicateForTimePassesStepTests.java new file mode 100644 index 0000000000000..f6d45f8a1cc4e --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitUntilReplicateForTimePassesStepTests.java @@ -0,0 +1,156 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ilm; + +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.LifecycleExecutionState; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xpack.core.ilm.step.info.EmptyInfo; + +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.cluster.metadata.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY; +import static org.elasticsearch.xpack.core.ilm.WaitUntilReplicateForTimePassesStep.approximateTimeRemaining; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class WaitUntilReplicateForTimePassesStepTests extends AbstractStepTestCase { + + @Override + protected WaitUntilReplicateForTimePassesStep createRandomInstance() { + Step.StepKey stepKey = randomStepKey(); + Step.StepKey nextStepKey = randomStepKey(); + TimeValue replicateFor = randomPositiveTimeValue(); + return new WaitUntilReplicateForTimePassesStep(stepKey, nextStepKey, replicateFor, Instant::now); + } + + @Override + protected WaitUntilReplicateForTimePassesStep mutateInstance(WaitUntilReplicateForTimePassesStep instance) { + Step.StepKey key = instance.getKey(); + Step.StepKey nextKey = instance.getNextStepKey(); + TimeValue replicateFor = instance.getReplicateFor(); + + switch (between(0, 2)) { + case 0 -> key = new Step.StepKey(key.phase(), key.action(), key.name() + randomAlphaOfLength(5)); + case 1 -> nextKey = new Step.StepKey(nextKey.phase(), nextKey.action(), nextKey.name() + randomAlphaOfLength(5)); + case 2 -> replicateFor = randomValueOtherThan(replicateFor, ESTestCase::randomPositiveTimeValue); + } + return new WaitUntilReplicateForTimePassesStep(key, nextKey, replicateFor, Instant::now); + } + + @Override + protected WaitUntilReplicateForTimePassesStep copyInstance(WaitUntilReplicateForTimePassesStep instance) { + return new WaitUntilReplicateForTimePassesStep( + instance.getKey(), + instance.getNextStepKey(), + instance.getReplicateFor(), + Instant::now + ); + } + + public void testEvaluateCondition() { + // a mutable box that we can put Instants into + final AtomicReference returnVal = new AtomicReference<>(); + + final WaitUntilReplicateForTimePassesStep step = new WaitUntilReplicateForTimePassesStep( + randomStepKey(), + randomStepKey(), + TimeValue.timeValueHours(1), + () -> returnVal.get() + ); + + final Instant now = Instant.now().truncatedTo(ChronoUnit.MILLIS); + final Instant t1 = now.minus(2, ChronoUnit.HOURS); + final Instant t2 = now.plus(2, ChronoUnit.HOURS); + + final IndexMetadata indexMeta = getIndexMetadata(randomAlphaOfLengthBetween(10, 30), randomAlphaOfLengthBetween(10, 30), step); + final Metadata metadata = Metadata.builder().put(indexMeta, true).build(); + final Index index = indexMeta.getIndex(); + + // if we evaluate the condition now, it hasn't been met, because it hasn't been an hour + returnVal.set(now); + step.evaluateCondition(metadata, index, new AsyncWaitStep.Listener() { + @Override + public void onResponse(boolean complete, ToXContentObject informationContext) { + assertThat(complete, is(false)); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError("Unexpected method call", e); + } + }, MASTER_TIMEOUT); + + returnVal.set(t1); // similarly, if we were in the past, enough time also wouldn't have passed + step.evaluateCondition(metadata, index, new AsyncWaitStep.Listener() { + @Override + public void onResponse(boolean complete, ToXContentObject informationContext) { + assertThat(complete, is(false)); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError("Unexpected method call", e); + } + }, MASTER_TIMEOUT); + + returnVal.set(t2); // but two hours from now in the future, an hour will have passed + step.evaluateCondition(metadata, index, new AsyncWaitStep.Listener() { + @Override + public void onResponse(boolean complete, ToXContentObject informationContext) { + assertThat(complete, is(true)); + assertThat(informationContext, is(EmptyInfo.INSTANCE)); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError("Unexpected method call", e); + } + }, MASTER_TIMEOUT); + } + + public void testApproximateTimeRemaining() { + assertThat(approximateTimeRemaining(TimeValue.ZERO), equalTo("less than 1d")); + + for (int i : new int[] { -2000, 0, 2000 }) { + assertThat( + approximateTimeRemaining(TimeValue.timeValueMillis(TimeValue.timeValueDays(2).millis() + i)), + equalTo("approximately 2d") + ); + } + + assertThat(approximateTimeRemaining(TimeValue.timeValueHours(24)), equalTo("approximately 1d")); + assertThat(approximateTimeRemaining(TimeValue.timeValueMillis(TimeValue.timeValueHours(24).millis() - 1)), equalTo("less than 1d")); + } + + private IndexMetadata getIndexMetadata(String index, String lifecycleName, WaitUntilReplicateForTimePassesStep step) { + IndexMetadata idxMetadata = IndexMetadata.builder(index) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, lifecycleName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + + LifecycleExecutionState.Builder lifecycleState = LifecycleExecutionState.builder(); + lifecycleState.setPhase(step.getKey().phase()); + lifecycleState.setAction(step.getKey().action()); + lifecycleState.setStep(step.getKey().name()); + long stateTimes = System.currentTimeMillis(); + lifecycleState.setPhaseTime(stateTimes); + lifecycleState.setActionTime(stateTimes); + lifecycleState.setStepTime(stateTimes); + lifecycleState.setIndexCreationDate(randomNonNegativeLong()); + return IndexMetadata.builder(idxMetadata).putCustom(ILM_CUSTOM_METADATA_KEY, lifecycleState.build().asMap()).build(); + } +} diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java index bae3064971a5e..708c74960343a 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java @@ -38,6 +38,7 @@ import org.elasticsearch.xpack.core.ilm.SetPriorityAction; import org.elasticsearch.xpack.core.ilm.ShrinkAction; import org.elasticsearch.xpack.core.ilm.Step; +import org.elasticsearch.xpack.core.ilm.WaitUntilReplicateForTimePassesStep; import org.junit.Before; import java.io.IOException; @@ -48,6 +49,7 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING; import static org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createComposableTemplate; @@ -954,7 +956,7 @@ public void testSearchableSnapshotTotalShardsPerNode() throws Exception { new Phase( "frozen", TimeValue.ZERO, - Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean(), totalShardsPerNode)) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean(), totalShardsPerNode, null)) ), null ); @@ -992,6 +994,104 @@ public void testSearchableSnapshotTotalShardsPerNode() throws Exception { ); } + public void testSearchableSnapshotReplicateFor() throws Exception { + createSnapshotRepo(client(), snapshotRepo, randomBoolean()); + + final boolean forceMergeIndex = randomBoolean(); + createPolicy( + client(), + policy, + null, + null, + null, + new Phase( + "cold", + TimeValue.ZERO, + Map.of( + SearchableSnapshotAction.NAME, + new SearchableSnapshotAction(snapshotRepo, forceMergeIndex, null, TimeValue.timeValueHours(2)) + ) + ), + new Phase("delete", TimeValue.timeValueDays(1), Map.of(DeleteAction.NAME, WITH_SNAPSHOT_DELETE)) + ); + + createComposableTemplate( + client(), + randomAlphaOfLengthBetween(5, 10).toLowerCase(Locale.ROOT), + dataStream, + new Template(Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, policy).build(), null, null) + ); + + indexDocument(client(), dataStream, true); + + // rolling over the data stream so we can apply the searchable snapshot policy to a backing index that's not the write index + rolloverMaxOneDocCondition(client(), dataStream); + + String backingIndexName = DataStream.getDefaultBackingIndexName(dataStream, 1L); + String restoredIndexName = SearchableSnapshotAction.FULL_RESTORED_INDEX_PREFIX + backingIndexName; + assertTrue(waitUntil(() -> { + try { + return indexExists(restoredIndexName); + } catch (IOException e) { + return false; + } + }, 30, TimeUnit.SECONDS)); + + // check that the index is in the expected step and has the expected step_info.message + assertBusy(() -> { + triggerStateChange(); + Map explainResponse = explainIndex(client(), restoredIndexName); + assertThat(explainResponse.get("step"), is(WaitUntilReplicateForTimePassesStep.NAME)); + @SuppressWarnings("unchecked") + final var stepInfo = (Map) explainResponse.get("step_info"); + String message = stepInfo == null ? "" : stepInfo.get("message"); + assertThat(message, containsString("Waiting [less than 1d] until the replicate_for time [2h] has elapsed")); + assertThat(message, containsString("for index [" + restoredIndexName + "] before removing replicas.")); + }, 30, TimeUnit.SECONDS); + + // check that it has the right number of replicas + { + Map indexSettings = getIndexSettingsAsMap(restoredIndexName); + assertNotNull("expected number_of_replicas to exist", indexSettings.get(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey())); + Integer numberOfReplicas = Integer.valueOf((String) indexSettings.get(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey())); + assertThat(numberOfReplicas, is(1)); + } + + // tweak the policy to replicate_for hardly any time at all + createPolicy( + client(), + policy, + null, + null, + null, + new Phase( + "cold", + TimeValue.ZERO, + Map.of( + SearchableSnapshotAction.NAME, + new SearchableSnapshotAction(snapshotRepo, forceMergeIndex, null, TimeValue.timeValueSeconds(10)) + ) + ), + new Phase("delete", TimeValue.timeValueDays(1), Map.of(DeleteAction.NAME, WITH_SNAPSHOT_DELETE)) + ); + + // check that the index has progressed because enough time has passed now that the policy is different + assertBusy(() -> { + triggerStateChange(); + Map explainResponse = explainIndex(client(), restoredIndexName); + assertThat(explainResponse.get("phase"), is("cold")); + assertThat(explainResponse.get("step"), is(PhaseCompleteStep.NAME)); + }, 30, TimeUnit.SECONDS); + + // check that it has the right number of replicas + { + Map indexSettings = getIndexSettingsAsMap(restoredIndexName); + assertNotNull("expected number_of_replicas to exist", indexSettings.get(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey())); + Integer numberOfReplicas = Integer.valueOf((String) indexSettings.get(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey())); + assertThat(numberOfReplicas, is(0)); + } + } + /** * Cause a bit of cluster activity using an empty reroute call in case the `wait-for-index-colour` ILM step missed the * notification that partial-index is now GREEN. From e3c5e975f9e49f8d922a395c6f28ea82e8e09777 Mon Sep 17 00:00:00 2001 From: Patrick Doyle <810052+prdoyle@users.noreply.github.com> Date: Mon, 27 Jan 2025 15:57:15 -0500 Subject: [PATCH 087/383] Entitled plugin for testing (#120840) * Entitled plugin * [CI] Auto commit changes from spotless * SuppressForbidden in entitled plugin * Respond to PR comments * Reinstate entitled plugin * Make System_clearProperty package-private --------- Co-authored-by: elasticsearchmachine --- libs/entitlement/qa/build.gradle | 2 + .../qa/entitled-plugin/build.gradle | 34 +++++++++++++ .../src/main/java/module-info.java | 17 +++++++ .../qa/entitled/EntitledActions.java | 22 ++++++++ .../qa/entitled/EntitledPlugin.java | 50 +++++++++++++++++++ .../plugin-metadata/entitlement-policy.yaml | 4 ++ .../qa/entitlement-test-plugin/build.gradle | 6 ++- .../src/main/java/module-info.java | 1 + .../qa/test/RestEntitlementsCheckAction.java | 1 - .../qa/test/WritePropertiesCheckActions.java | 2 + .../entitlement/qa/EntitlementsTestRule.java | 1 + 11 files changed, 137 insertions(+), 3 deletions(-) create mode 100644 libs/entitlement/qa/entitled-plugin/build.gradle create mode 100644 libs/entitlement/qa/entitled-plugin/src/main/java/module-info.java create mode 100644 libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java create mode 100644 libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledPlugin.java create mode 100644 libs/entitlement/qa/entitled-plugin/src/main/plugin-metadata/entitlement-policy.yaml diff --git a/libs/entitlement/qa/build.gradle b/libs/entitlement/qa/build.gradle index b3b1c830a1b82..4c699de114a03 100644 --- a/libs/entitlement/qa/build.gradle +++ b/libs/entitlement/qa/build.gradle @@ -13,5 +13,7 @@ apply plugin: 'elasticsearch.internal-test-artifact' dependencies { javaRestTestImplementation project(':libs:entitlement:qa:entitlement-test-plugin') + javaRestTestImplementation project(':libs:entitlement:qa:entitled-plugin') clusterModules project(':libs:entitlement:qa:entitlement-test-plugin') + clusterModules project(':libs:entitlement:qa:entitled-plugin') } diff --git a/libs/entitlement/qa/entitled-plugin/build.gradle b/libs/entitlement/qa/entitled-plugin/build.gradle new file mode 100644 index 0000000000000..f6dfbd8322c6a --- /dev/null +++ b/libs/entitlement/qa/entitled-plugin/build.gradle @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask + +apply plugin: 'elasticsearch.base-internal-es-plugin' +apply plugin: 'elasticsearch.build' + +esplugin { + name = 'entitled' + description = 'A utility plugin that provides access to functionality denied to the main test plugin' + classname = 'org.elasticsearch.entitlement.qa.entitled.EntitledPlugin' +} + +dependencies { + compileOnly project(':server') + compileOnly project(':libs:logging') + compileOnly project(':libs:entitlement') +} + +tasks.named("javadoc").configure { + enabled = false +} + +tasks.withType(CheckForbiddenApisTask).configureEach { + replaceSignatureFiles 'jdk-signatures' +} + diff --git a/libs/entitlement/qa/entitled-plugin/src/main/java/module-info.java b/libs/entitlement/qa/entitled-plugin/src/main/java/module-info.java new file mode 100644 index 0000000000000..eafac9006daec --- /dev/null +++ b/libs/entitlement/qa/entitled-plugin/src/main/java/module-info.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +module org.elasticsearch.entitlement.qa.entitled { + requires org.elasticsearch.server; + requires org.elasticsearch.entitlement; + requires org.elasticsearch.base; // SuppressForbidden + requires org.elasticsearch.logging; + + exports org.elasticsearch.entitlement.qa.entitled; // Must be unqualified so non-modular IT tests can call us +} diff --git a/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java new file mode 100644 index 0000000000000..282860e1cdf60 --- /dev/null +++ b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.entitled; + +import org.elasticsearch.core.SuppressForbidden; + +public final class EntitledActions { + private EntitledActions() {} + + @SuppressForbidden(reason = "Exposes forbidden APIs for testing purposes") + static void System_clearProperty(String key) { + System.clearProperty(key); + } + +} diff --git a/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledPlugin.java b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledPlugin.java new file mode 100644 index 0000000000000..7a60d92ecc552 --- /dev/null +++ b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledPlugin.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.entitled; + +import org.elasticsearch.entitlement.runtime.api.NotEntitledException; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.plugins.ExtensiblePlugin; +import org.elasticsearch.plugins.Plugin; + +import static org.elasticsearch.entitlement.qa.entitled.EntitledActions.System_clearProperty; + +public class EntitledPlugin extends Plugin implements ExtensiblePlugin { + + /** + * Runs some actions that should be allowed or denied for this plugin, + * to ensure the entitlement system is handling them correctly. + */ + public static void selfTest() { + selfTestEntitled(); + selfTestNotEntitled(); + } + + private static final String SELF_TEST_PROPERTY = "org.elasticsearch.entitlement.qa.selfTest"; + + private static void selfTestEntitled() { + logger.debug("selfTestEntitled"); + System_clearProperty(SELF_TEST_PROPERTY); + } + + private static void selfTestNotEntitled() { + logger.debug("selfTestNotEntitled"); + try { + System.setIn(System.in); + } catch (NotEntitledException e) { + // All is well + return; + } + throw new AssertionError("Expected self-test not to be entitled"); + } + + private static final Logger logger = LogManager.getLogger(EntitledPlugin.class); +} diff --git a/libs/entitlement/qa/entitled-plugin/src/main/plugin-metadata/entitlement-policy.yaml b/libs/entitlement/qa/entitled-plugin/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 0000000000000..81acd4c467f94 --- /dev/null +++ b/libs/entitlement/qa/entitled-plugin/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,4 @@ +org.elasticsearch.entitlement.qa.entitled: + - write_system_properties: + properties: + - org.elasticsearch.entitlement.qa.selfTest diff --git a/libs/entitlement/qa/entitlement-test-plugin/build.gradle b/libs/entitlement/qa/entitlement-test-plugin/build.gradle index f23a8e979e36b..3ee9b510089ba 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/build.gradle +++ b/libs/entitlement/qa/entitlement-test-plugin/build.gradle @@ -17,11 +17,13 @@ esplugin { name = 'entitlement-test-plugin' description = 'A test plugin that invokes methods checked by entitlements' classname = 'org.elasticsearch.entitlement.qa.test.EntitlementTestPlugin' + extendedPlugins = ['entitled'] } dependencies { - implementation project(':server') - implementation project(':libs:logging') + compileOnly project(':server') + compileOnly project(':libs:logging') + compileOnly project(":libs:entitlement:qa:entitled-plugin") } tasks.named("javadoc").configure { diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/module-info.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/module-info.java index a1df03ad5c974..bb4c6fd759426 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/module-info.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/module-info.java @@ -11,6 +11,7 @@ requires org.elasticsearch.server; requires org.elasticsearch.base; requires org.elasticsearch.logging; + requires org.elasticsearch.entitlement.qa.entitled; // Modules we'll attempt to use in order to exercise entitlements requires java.logging; diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java index 8672620dbbd58..1e754f657e260 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java @@ -74,7 +74,6 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { record CheckAction(CheckedRunnable action, boolean isAlwaysDeniedToPlugins, Integer fromJavaVersion) { /** * These cannot be granted to plugins, so our test plugins cannot test the "allowed" case. - * Used both for always-denied entitlements and those granted only to the server itself. */ static CheckAction deniedToPlugins(CheckedRunnable action) { return new CheckAction(action, true, null); diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/WritePropertiesCheckActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/WritePropertiesCheckActions.java index 7d7fcd2175fed..ba4bfa5e896b9 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/WritePropertiesCheckActions.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/WritePropertiesCheckActions.java @@ -10,6 +10,7 @@ package org.elasticsearch.entitlement.qa.test; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.qa.entitled.EntitledPlugin; import java.util.Locale; import java.util.TimeZone; @@ -29,6 +30,7 @@ static void setSystemProperty() { } static void clearSystemProperty() { + EntitledPlugin.selfTest(); // TODO: find a better home System.clearProperty("es.entitlements.checkClearSystemProperty"); } diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java index 1a0a75588f02c..8c9dcb6dd0efe 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java @@ -39,6 +39,7 @@ interface PolicyBuilder { EntitlementsTestRule(boolean modular, PolicyBuilder policyBuilder) { testDir = new TemporaryFolder(); cluster = ElasticsearchCluster.local() + .module("entitled") .module("entitlement-test-plugin", spec -> setupEntitlements(spec, modular, policyBuilder)) .systemProperty("es.entitlements.enabled", "true") .systemProperty("es.entitlements.testdir", () -> testDir.getRoot().getAbsolutePath()) From 842d21e581e6a31bf9c9454c4eff258b1ddf36c0 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Mon, 27 Jan 2025 22:05:36 +0100 Subject: [PATCH 088/383] Skip non-snapshot builds (#120881) --- muted-tests.yml | 2 -- .../xpack/logsdb/seqno/RetentionLeaseRestIT.java | 7 +++++++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index b917c6c92cb0d..572ba6f251b4a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -208,8 +208,6 @@ tests: - class: org.elasticsearch.search.ccs.CrossClusterIT method: testCancel issue: https://github.com/elastic/elasticsearch/issues/108061 -- class: org.elasticsearch.xpack.logsdb.seqno.RetentionLeaseRestIT - issue: https://github.com/elastic/elasticsearch/issues/120434 - class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests method: testInvalidJSON issue: https://github.com/elastic/elasticsearch/issues/120482 diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/seqno/RetentionLeaseRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/seqno/RetentionLeaseRestIT.java index a1fa73768a1d3..fa2d92a8fdb89 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/seqno/RetentionLeaseRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/seqno/RetentionLeaseRestIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.logsdb.seqno; import org.apache.http.util.EntityUtils; +import org.elasticsearch.Build; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; @@ -20,6 +21,7 @@ import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.Before; import org.junit.ClassRule; import java.io.IOException; @@ -35,6 +37,11 @@ public class RetentionLeaseRestIT extends ESRestTestCase { private static final String BULK_INDEX_ENDPOINT = "/%s/_bulk"; private static final String[] DOCUMENT_NAMES = { "alpha", "beta", "gamma", "delta" }; + @Before + public void assumeSnapshotBuild() { + assumeTrue("/{index}/seq_no/add_retention_lease endpoint only available in snapshot builds", Build.current().isSnapshot()); + } + @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) From e0f5a60d324886e8bb054fcf2c40afa7b9c581e2 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Mon, 27 Jan 2025 15:17:48 -0700 Subject: [PATCH 089/383] Document that disabling stack templates is not recommended (#120963) There are many features of the Elasticsearch ecosystem that may malfunction, or fail to work entirely, if these templates are not installed. This commit adds documentation cautioning against disabling the installation of templates. --- docs/reference/indices/index-templates.asciidoc | 3 ++- docs/reference/modules/indices/index_management.asciidoc | 8 +++++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/reference/indices/index-templates.asciidoc b/docs/reference/indices/index-templates.asciidoc index b13921d263f71..ed6a8e89040f2 100644 --- a/docs/reference/indices/index-templates.asciidoc +++ b/docs/reference/indices/index-templates.asciidoc @@ -58,7 +58,8 @@ applying the templates, do one or more of the following: - To disable all built-in index and component templates, set <> to `false` using the -<>. +<>. Note, however, that this is not +recommended, see the <> for more information. - Use a non-overlapping index pattern. diff --git a/docs/reference/modules/indices/index_management.asciidoc b/docs/reference/modules/indices/index_management.asciidoc index 7aea86bd5ac76..bc5b0b76a3776 100644 --- a/docs/reference/modules/indices/index_management.asciidoc +++ b/docs/reference/modules/indices/index_management.asciidoc @@ -37,6 +37,12 @@ If `true`, enables built-in index and component templates. streams. If `false`, {es} disables these index and component templates. Defaults to `true`. +NOTE: It is not recommended to disable the built-in stack templates, as some functionality of {es} +or Kibana will not work correctly when disabled. Features like log and metric collection, as well as +Kibana reporting, may malfunction without the built-in stack templates. Stack templates should only +be disabled temporarily, if necessary, to resolve upgrade issues, then re-enabled after any issues +have been resolved. + This setting affects the following built-in index templates: include::{es-ref-dir}/indices/index-templates.asciidoc[tag=built-in-index-template-patterns] @@ -142,4 +148,4 @@ and certificate forgery. One of `full` (verify the hostname and the certificate path), `certificate` (verify the certificate path, but not the hostname) or `none` (perform no verification - this is strongly discouraged in production environments). -Defaults to `full`. \ No newline at end of file +Defaults to `full`. From f7ee67e57ae5b9e3ffbfdc7c20014c3a0239050e Mon Sep 17 00:00:00 2001 From: Gal Lalouche Date: Tue, 28 Jan 2025 00:49:04 +0200 Subject: [PATCH 090/383] ESQL: Speed type error testing for numeric package (#120962) Following in the same vein as #119678 and #119945, this PR moves the errors tests in the numeric package to their own class. 43,303 tests -> 34,521 tests 6m 30s -> 5m 50s --- .../function/scalar/math/AbsErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/AbsTests.java | 2 +- .../function/scalar/math/AcosErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/AcosTests.java | 2 +- .../function/scalar/math/AsinErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/AsinTests.java | 2 +- .../function/scalar/math/AtanErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/AtanTests.java | 2 +- .../function/scalar/math/CbrtErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/CbrtTests.java | 2 +- .../function/scalar/math/CeilErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/CeilTests.java | 2 +- .../function/scalar/math/CosErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/CosTests.java | 2 +- .../function/scalar/math/CoshErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/CoshTests.java | 2 +- .../function/scalar/math/ETests.java | 5 +-- .../function/scalar/math/ExpErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/ExpTests.java | 2 +- .../function/scalar/math/FloorErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/FloorTests.java | 2 +- .../function/scalar/math/HypotErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/HypotTests.java | 2 +- .../function/scalar/math/Log10ErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/Log10Tests.java | 2 +- .../function/scalar/math/LogErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/LogTests.java | 2 +- .../function/scalar/math/PiTests.java | 5 +-- .../function/scalar/math/PowErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/PowTests.java | 2 +- .../scalar/math/SignumErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/SignumTests.java | 2 +- .../function/scalar/math/SinhTests.java | 2 +- .../function/scalar/math/SqrtErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/SqrtTests.java | 2 +- .../function/scalar/math/TanErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/TanTests.java | 2 +- .../function/scalar/math/TanhErrorTests.java | 37 +++++++++++++++++++ .../function/scalar/math/TanhTests.java | 2 +- .../function/scalar/math/TauTests.java | 5 +-- 40 files changed, 691 insertions(+), 28 deletions(-) create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10ErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhErrorTests.java diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsErrorTests.java new file mode 100644 index 0000000000000..781ee1be1804f --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class AbsErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(AbsTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Abs(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsTests.java index 493e9e0e9d900..d2e9b5c7fd2ca 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsTests.java @@ -63,7 +63,7 @@ public static Iterable parameters() { equalTo(Math.abs(arg)) ); })); - return parameterSuppliersFromTypedDataWithDefaultChecks(false, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, suppliers); } public AbsTests(@Name("TestCase") Supplier testCaseSupplier) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosErrorTests.java new file mode 100644 index 0000000000000..693e04b009db6 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class AcosErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(AcosTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Acos(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java index 6531e7bee90ab..3032c0171ddb3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java @@ -56,7 +56,7 @@ public static Iterable parameters() { ) ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinErrorTests.java new file mode 100644 index 0000000000000..30cb6b7055cad --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class AsinErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(AsinTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Asin(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java index 410dc61ec5fa6..805224396df40 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java @@ -56,7 +56,7 @@ public static Iterable parameters() { ) ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanErrorTests.java new file mode 100644 index 0000000000000..3ef47cf0863d5 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class AtanErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(AtanTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Atan(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanTests.java index b51154515de82..439775c7f9af6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanTests.java @@ -33,7 +33,7 @@ public static Iterable parameters() { Double.POSITIVE_INFINITY, List.of() ); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtErrorTests.java new file mode 100644 index 0000000000000..deb706b242384 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class CbrtErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(CbrtTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Cbrt(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java index d702e28baf9d8..9b86134711433 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java @@ -72,7 +72,7 @@ public static Iterable parameters() { ); suppliers = anyNullIsNull(true, suppliers); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilErrorTests.java new file mode 100644 index 0000000000000..7e3a170116820 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class CeilErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(CeilTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Ceil(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java index ddc099a2ad0b1..a911c34fd298a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java @@ -66,7 +66,7 @@ public static Iterable parameters() { UNSIGNED_LONG_MAX, List.of() ); - return parameterSuppliersFromTypedDataWithDefaultChecks(false, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosErrorTests.java new file mode 100644 index 0000000000000..d105ddae9ba10 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class CosErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(CosTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Cos(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosTests.java index 47dc99f2c13f9..3281dccff22a1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosTests.java @@ -33,7 +33,7 @@ public static Iterable parameters() { Double.POSITIVE_INFINITY, List.of() ); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshErrorTests.java new file mode 100644 index 0000000000000..5a8154d4014e6 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class CoshErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(CoshTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Cosh(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java index ad4208420f481..1e4cdeada4f1f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java @@ -61,7 +61,7 @@ public static Iterable parameters() { ) ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java index f3922a355180d..fd6eca5894e80 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java @@ -29,7 +29,7 @@ public ETests(@Name("TestCase") Supplier testCaseSupp @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedDataWithDefaultChecks( + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( true, List.of( new TestCaseSupplier( @@ -42,8 +42,7 @@ public static Iterable parameters() { equalTo(Math.E) ) ) - ), - (v, p) -> "" + ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpErrorTests.java new file mode 100644 index 0000000000000..8e33b57563673 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class ExpErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(ExpTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Exp(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpTests.java index bc5faf1b2560d..72b393f194a89 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpTests.java @@ -75,7 +75,7 @@ public static Iterable parameters() { suppliers = anyNullIsNull(true, suppliers); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorErrorTests.java new file mode 100644 index 0000000000000..54d2928204d22 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class FloorErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(FloorTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Floor(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java index 1d35e034de908..feda82c900160 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java @@ -50,7 +50,7 @@ public static Iterable parameters() { Double.POSITIVE_INFINITY, List.of() ); - return parameterSuppliersFromTypedDataWithDefaultChecks(false, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotErrorTests.java new file mode 100644 index 0000000000000..87e135a388c4a --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class HypotErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(HypotTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Hypot(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotTests.java index 0161abc2b9560..f19c02ba94432 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotTests.java @@ -36,7 +36,7 @@ public static Iterable parameters() { Double.POSITIVE_INFINITY, List.of() ); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10ErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10ErrorTests.java new file mode 100644 index 0000000000000..489909ceae3c9 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10ErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class Log10ErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(Log10Tests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Log10(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java index 7942320656f3f..63535f68e38a8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java @@ -124,7 +124,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogErrorTests.java new file mode 100644 index 0000000000000..f308210757bc2 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class LogErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(LogTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Log(source, args.get(0), args.size() == 1 ? null : args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java index 0ee277dbcadb2..7d3778346f4ae 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java @@ -187,7 +187,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java index 79742952dbf59..bf44647fbf6c5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java @@ -29,7 +29,7 @@ public PiTests(@Name("TestCase") Supplier testCaseSup @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedDataWithDefaultChecks( + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( true, List.of( new TestCaseSupplier( @@ -42,8 +42,7 @@ public static Iterable parameters() { equalTo(Math.PI) ) ) - ), - (v, p) -> "numeric" + ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowErrorTests.java new file mode 100644 index 0000000000000..f91f8437340ee --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class PowErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(PowTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Pow(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java index 2fc139a5458c3..10e3afc62f889 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java @@ -77,7 +77,7 @@ public static Iterable parameters() { ) ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumErrorTests.java new file mode 100644 index 0000000000000..fc40eefca0f14 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class SignumErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(SignumTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Signum(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java index 4bf1351969d79..0509c853a349c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java @@ -72,7 +72,7 @@ public static Iterable parameters() { suppliers = anyNullIsNull(true, suppliers); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java index d24dcd1f18f8f..37a72c417757d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java @@ -61,7 +61,7 @@ public static Iterable parameters() { ) ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtErrorTests.java new file mode 100644 index 0000000000000..1609b2d27ad53 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class SqrtErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(SqrtTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Sqrt(source, args.getFirst()); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java index 7cba5d6d57d45..f7c5e8b849835 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java @@ -109,7 +109,7 @@ public static Iterable parameters() { "Line -1:-1: java.lang.ArithmeticException: Square root of negative" ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanErrorTests.java new file mode 100644 index 0000000000000..e1ddad987f6a0 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class TanErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(TanTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Tan(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanTests.java index 995894fec5259..c397bbf90be8b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanTests.java @@ -33,7 +33,7 @@ public static Iterable parameters() { Double.POSITIVE_INFINITY, List.of() ); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhErrorTests.java new file mode 100644 index 0000000000000..5594e6ea77262 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class TanhErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(TanhTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Tanh(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, i) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhTests.java index 73a86fd5a114c..9e7a70bbca522 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhTests.java @@ -33,7 +33,7 @@ public static Iterable parameters() { Double.POSITIVE_INFINITY, List.of() ); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java index 40e66333f953e..c139ab1d0c2a7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java @@ -29,7 +29,7 @@ public TauTests(@Name("TestCase") Supplier testCaseSu @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedDataWithDefaultChecks( + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( true, List.of( new TestCaseSupplier( @@ -42,8 +42,7 @@ public static Iterable parameters() { equalTo(Tau.TAU) ) ) - ), - (v, p) -> "numeric" + ) ); } From 75a0a7057f5d59d5290f96e43e8af743019b6e98 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 10:21:54 +1100 Subject: [PATCH 091/383] Mute org.elasticsearch.packaging.test.DockerTests test071BindMountCustomPathWithDifferentUID #120918 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 572ba6f251b4a..5a3a05412ccde 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -275,6 +275,9 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=nodes.stats/11_indices_metrics/indices mappings exact count test for indices level} issue: https://github.com/elastic/elasticsearch/issues/120950 +- class: org.elasticsearch.packaging.test.DockerTests + method: test071BindMountCustomPathWithDifferentUID + issue: https://github.com/elastic/elasticsearch/issues/120918 # Examples: # From 2f81efd0543200c4af9b16b91108fed81d063f44 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Mon, 27 Jan 2025 18:23:00 -0500 Subject: [PATCH 092/383] Re-enable geoip FullClusterRestartIT (#120966) --- .../ingest-geoip/qa/full-cluster-restart/build.gradle | 3 --- .../elasticsearch/ingest/geoip/FullClusterRestartIT.java | 9 ++------- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle index 29cc6d7184bf2..e53e0e080cce6 100644 --- a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle +++ b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle @@ -20,9 +20,6 @@ dependencies { javaRestTestImplementation(testArtifact(project(":qa:full-cluster-restart"), "javaRestTest")) } - -// once we are ready to test migrations from 8.x to 9.x, we can set the compatible version to 8.0.0 -// see https://github.com/elastic/elasticsearch/pull/93666 buildParams.bwcVersions.withWireCompatible(v -> v.before("9.0.0")) { bwcVersion, baseName -> tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) diff --git a/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java b/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java index 392d97321e387..1dfcb524f46a0 100644 --- a/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java +++ b/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java @@ -13,9 +13,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.apache.http.util.EntityUtils; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.client.Request; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; @@ -34,8 +32,6 @@ import static org.hamcrest.Matchers.contains; -@UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) -@LuceneTestCase.AwaitsFix(bugUrl = "we need to figure out the index migrations here for 9.0") public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCase { private static final boolean useFixture = Boolean.getBoolean("geoip_use_service") == false; @@ -46,9 +42,8 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas .distribution(DistributionType.DEFAULT) .version(getOldClusterTestVersion()) .nodes(2) - .setting("indices.memory.shard_inactive_time", "60m") - .setting("xpack.security.enabled", "false") .setting("ingest.geoip.downloader.endpoint", () -> fixture.getAddress(), s -> useFixture) + .setting("xpack.security.enabled", "false") .feature(FeatureFlag.TIME_SERIES_MODE) .build(); @@ -97,7 +92,7 @@ public void testGeoIpSystemFeaturesMigration() throws Exception { Request migrateSystemFeatures = new Request("POST", "/_migration/system_features"); assertOK(client().performRequest(migrateSystemFeatures)); - assertBusy(() -> testCatIndices(".geoip_databases-reindexed-for-8", "my-index-00001")); + assertBusy(() -> testCatIndices(".geoip_databases-reindexed-for-10", "my-index-00001")); assertBusy(() -> testIndexGeoDoc()); Request disableDownloader = new Request("PUT", "/_cluster/settings"); From dddf6481f6231c46218ffc10905302a27ddb1830 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Mon, 27 Jan 2025 15:30:33 -0800 Subject: [PATCH 093/383] Remove unnecessary entitlement (#120959) This removes an unnecessary entitlement from reindex that is causing test failures. --- docs/changelog/120959.yaml | 5 +++++ .../reindex/src/main/plugin-metadata/entitlement-policy.yaml | 2 -- 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/120959.yaml diff --git a/docs/changelog/120959.yaml b/docs/changelog/120959.yaml new file mode 100644 index 0000000000000..2bb01c7f8f5a2 --- /dev/null +++ b/docs/changelog/120959.yaml @@ -0,0 +1,5 @@ +pr: 120959 +summary: Remove unnecessary entitlement +area: Infra/Plugins +type: bug +issues: [] diff --git a/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml b/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml index e9c8a53ef24be..df557f9944253 100644 --- a/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,4 +1,2 @@ ALL-UNNAMED: - outbound_network -org.elasticsearch.painless: - - create_class_loader From 26e5373de1db0a9e7d72cbf79b2a4c6bb7d17cfd Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 10:38:57 +1100 Subject: [PATCH 094/383] Mute org.elasticsearch.packaging.test.DockerTests test171AdditionalCliOptionsAreForwarded #120925 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 5a3a05412ccde..fb670fe2cd916 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -278,6 +278,9 @@ tests: - class: org.elasticsearch.packaging.test.DockerTests method: test071BindMountCustomPathWithDifferentUID issue: https://github.com/elastic/elasticsearch/issues/120918 +- class: org.elasticsearch.packaging.test.DockerTests + method: test171AdditionalCliOptionsAreForwarded + issue: https://github.com/elastic/elasticsearch/issues/120925 # Examples: # From 39def294bdae2c2a5ba80c6c2be25fc42891045c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 17:51:22 +1100 Subject: [PATCH 095/383] Mute org.elasticsearch.xpack.inference.InferenceGetServicesIT org.elasticsearch.xpack.inference.InferenceGetServicesIT #120986 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index fb670fe2cd916..d8cc9fd2f0547 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -281,6 +281,8 @@ tests: - class: org.elasticsearch.packaging.test.DockerTests method: test171AdditionalCliOptionsAreForwarded issue: https://github.com/elastic/elasticsearch/issues/120925 +- class: org.elasticsearch.xpack.inference.InferenceGetServicesIT + issue: https://github.com/elastic/elasticsearch/issues/120986 # Examples: # From ddb97cc0422e5eb5d8f266853b86aefe4636bd9e Mon Sep 17 00:00:00 2001 From: Navarone Feekery <13634519+navarone-feekery@users.noreply.github.com> Date: Tue, 28 Jan 2025 08:11:51 +0100 Subject: [PATCH 096/383] Revert "[Search] Add system index descriptors to Connector indices (#118991)" (#120951) This reverts commit 385e1fdf21fb30c20adf94c29f35703b344b97f5. --- .../xpack/core/ClientHelper.java | 3 - .../elastic-connectors-mappings.json} | 51 +++---- .../elastic-connectors-settings.json | 14 ++ ...lastic-connectors-sync-jobs-mappings.json} | 30 ++-- .../elastic-connectors-sync-jobs.json | 14 ++ .../connector/elastic-connectors.json | 14 ++ .../xpack/application/EnterpriseSearch.java | 9 +- .../connector/ConnectorIndexService.java | 50 +----- .../connector/ConnectorTemplateRegistry.java | 81 +++++++++- .../syncjob/ConnectorSyncJobIndexService.java | 47 +----- .../connector/ConnectorIndexServiceTests.java | 25 +-- .../ConnectorTemplateRegistryTests.java | 144 +++++++++++++++++- .../connector/ConnectorTestUtils.java | 53 ++++++- .../ConnectorSyncJobIndexServiceTests.java | 44 ++---- .../syncjob/ConnectorSyncJobTestUtils.java | 26 ++-- 15 files changed, 375 insertions(+), 230 deletions(-) rename x-pack/plugin/core/template-resources/src/main/resources/{elastic-connectors.json => entsearch/connector/elastic-connectors-mappings.json} (92%) create mode 100644 x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-settings.json rename x-pack/plugin/core/template-resources/src/main/resources/{elastic-connectors-sync-jobs.json => entsearch/connector/elastic-connectors-sync-jobs-mappings.json} (88%) create mode 100644 x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs.json create mode 100644 x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors.json diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java index 680b72cb970c9..9a0d1a58a30a1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java @@ -196,9 +196,6 @@ private static String maybeRewriteSingleAuthenticationHeaderForVersion( public static final String APM_ORIGIN = "apm"; public static final String OTEL_ORIGIN = "otel"; public static final String REINDEX_DATA_STREAM_ORIGIN = "reindex_data_stream"; - // TODO consolidate the Kibana origin with the one defined in org/elasticsearch/kibana/KibanaPlugin.java - public static final String KIBANA_ORIGIN = "kibana"; - public static final String CLOUD_ORIGIN = "cloud"; private ClientHelper() {} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/elastic-connectors.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json similarity index 92% rename from x-pack/plugin/core/template-resources/src/main/resources/elastic-connectors.json rename to x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json index a98018e76f0e0..25409dbf8460e 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/elastic-connectors.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json @@ -1,35 +1,29 @@ { - "settings": { - "index": { - "number_of_shards": "1", - "auto_expand_replicas": "0-1" - } - }, - "mappings": { - "_doc": { - "dynamic": "strict", + "template": { + "aliases": { + ".elastic-connectors": {} + }, + "mappings": { + "dynamic": "false", "_meta": { - "version": "${elastic-connectors.version}", - "managed_index_mappings_version": ${elastic-connectors.managed.index.version} + "pipeline": { + "default_name": "search-default-ingestion", + "default_extract_binary_content": true, + "default_run_ml_inference": true, + "default_reduce_whitespace": true + }, + "version": ${xpack.application.connector.template.version} }, "properties": { "api_key_id": { "type": "keyword" }, - "api_key_secret_id": { - "type": "keyword" - }, "configuration": { - "dynamic": "false", "type": "object" }, "custom_scheduling": { - "dynamic": "false", "type": "object" }, - "deleted": { - "type": "boolean" - }, "description": { "type": "text" }, @@ -37,7 +31,6 @@ "type": "keyword" }, "features": { - "dynamic": "false", "properties": { "filtering_advanced_config": { "type": "boolean" @@ -73,7 +66,6 @@ } }, "filtering": { - "dynamic": "false", "properties": { "active": { "properties": { @@ -86,7 +78,6 @@ "type": "date" }, "value": { - "dynamic": "false", "type": "object" } } @@ -152,7 +143,6 @@ "type": "date" }, "value": { - "dynamic": "false", "type": "object" } } @@ -252,7 +242,6 @@ "type": "keyword" }, "pipeline": { - "dynamic": "false", "properties": { "extract_binary_content": { "type": "boolean" @@ -269,7 +258,6 @@ } }, "scheduling": { - "dynamic": "false", "properties": { "access_control": { "properties": { @@ -310,13 +298,22 @@ "type": "keyword" }, "sync_cursor": { - "dynamic": "false", "type": "object" }, "sync_now": { "type": "boolean" + }, + "deleted": { + "type": "boolean" } } } - } + }, + "_meta": { + "description": "Built-in mappings applied by default to elastic-connectors indices", + "managed": true + }, + "version": ${xpack.application.connector.template.version} } + + diff --git a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-settings.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-settings.json new file mode 100644 index 0000000000000..6ff9510574281 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-settings.json @@ -0,0 +1,14 @@ +{ + "template": { + "settings": { + "hidden": true, + "number_of_shards": "1", + "auto_expand_replicas": "0-1" + } + }, + "_meta": { + "description": "Built-in settings applied by default to connector management indices", + "managed": true + }, + "version": ${xpack.application.connector.template.version} +} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/elastic-connectors-sync-jobs.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs-mappings.json similarity index 88% rename from x-pack/plugin/core/template-resources/src/main/resources/elastic-connectors-sync-jobs.json rename to x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs-mappings.json index 7d1e7fa3a0418..4dd6e0681c7cc 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/elastic-connectors-sync-jobs.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs-mappings.json @@ -1,16 +1,12 @@ { - "settings": { - "index": { - "number_of_shards": "1", - "auto_expand_replicas": "0-1" - } - }, - "mappings": { - "_doc": { - "dynamic": "strict", + "template": { + "aliases": { + ".elastic-connectors-sync-jobs": {} + }, + "mappings": { + "dynamic": "false", "_meta": { - "version": "${elastic-connectors-sync-jobs.version}", - "managed_index_mappings_version": ${elastic-connectors-sync-jobs.managed.index.version} + "version": ${xpack.application.connector.template.version} }, "properties": { "cancelation_requested_at": { @@ -25,11 +21,9 @@ "connector": { "properties": { "configuration": { - "dynamic": "false", "type": "object" }, "filtering": { - "dynamic": "false", "properties": { "advanced_snippet": { "properties": { @@ -97,7 +91,6 @@ "type": "keyword" }, "pipeline": { - "dynamic": "false", "properties": { "extract_binary_content": { "type": "boolean" @@ -117,7 +110,6 @@ "type": "keyword" }, "sync_cursor": { - "dynamic": "false", "type": "object" } } @@ -144,7 +136,6 @@ "type": "date" }, "metadata": { - "dynamic": "false", "type": "object" }, "started_at": { @@ -164,5 +155,10 @@ } } } - } + }, + "_meta": { + "description": "Built-in mappings applied by default to elastic-connectors indices", + "managed": true + }, + "version": ${xpack.application.connector.template.version} } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs.json new file mode 100644 index 0000000000000..db5404a30c6e4 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs.json @@ -0,0 +1,14 @@ +{ + "index_patterns": ["${connectors-sync-jobs.index_pattern}"], + "priority": 100, + "composed_of": [ + "elastic-connectors-settings", + "elastic-connectors-sync-jobs-mappings" + ], + "allow_auto_create": true, + "_meta": { + "description": "Built-in template for elastic-connectors-sync-jobs", + "managed": true + }, + "version": ${xpack.application.connector.template.version} +} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors.json new file mode 100644 index 0000000000000..17c0b1eef0610 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors.json @@ -0,0 +1,14 @@ +{ + "index_patterns": ["${connectors.index_pattern}"], + "priority": 100, + "composed_of": [ + "elastic-connectors-settings", + "elastic-connectors-mappings" + ], + "allow_auto_create": true, + "_meta": { + "description": "Built-in template for elastic-connectors", + "managed": true + }, + "version": ${xpack.application.connector.template.version} +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 4142d907d0c5c..df1c76ccf770f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -46,7 +46,6 @@ import org.elasticsearch.xpack.application.analytics.action.TransportPutAnalyticsCollectionAction; import org.elasticsearch.xpack.application.analytics.ingest.AnalyticsEventIngestConfig; import org.elasticsearch.xpack.application.connector.ConnectorAPIFeature; -import org.elasticsearch.xpack.application.connector.ConnectorIndexService; import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; import org.elasticsearch.xpack.application.connector.action.DeleteConnectorAction; import org.elasticsearch.xpack.application.connector.action.GetConnectorAction; @@ -125,7 +124,6 @@ import org.elasticsearch.xpack.application.connector.secrets.action.TransportGetConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.TransportPostConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.TransportPutConnectorSecretAction; -import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.ClaimConnectorSyncJobAction; @@ -479,12 +477,7 @@ public Collection createComponents(PluginServices services) { @Override public Collection getSystemIndexDescriptors(Settings settings) { Collection systemIndices = new ArrayList<>( - List.of( - SearchApplicationIndexService.getSystemIndexDescriptor(), - QueryRulesIndexService.getSystemIndexDescriptor(), - ConnectorSyncJobIndexService.getSystemIndexDescriptor(), - ConnectorIndexService.getSystemIndexDescriptor() - ) + List.of(SearchApplicationIndexService.getSystemIndexDescriptor(), QueryRulesIndexService.getSystemIndexDescriptor()) ); if (ConnectorSecretsFeature.isEnabled()) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index a9ca8552feeea..3120124c17523 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -10,12 +10,10 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; @@ -35,7 +33,6 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.query.WildcardQueryBuilder; -import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; @@ -62,7 +59,6 @@ import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationState; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; -import org.elasticsearch.xpack.core.template.TemplateUtils; import java.time.Instant; import java.util.ArrayList; @@ -80,7 +76,6 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.application.connector.ConnectorFiltering.fromXContentBytesConnectorFiltering; import static org.elasticsearch.xpack.application.connector.ConnectorFiltering.sortFilteringRulesByOrder; -import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTORS_ALLOWED_PRODUCT_ORIGINS; import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.MANAGED_CONNECTOR_INDEX_PREFIX; import static org.elasticsearch.xpack.core.ClientHelper.CONNECTORS_ORIGIN; @@ -92,20 +87,7 @@ public class ConnectorIndexService { // The client to interact with the system index (internal user). private final Client clientWithOrigin; - // TODO use proper version IDs (see org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java) - // TODO if this version is updated, a test should be added to - // javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java - private static final int CONNECTORS_INDEX_VERSION = 1; - // TODO rename to CONNECTOR_ALIAS_NAME - public static final String CONNECTOR_INDEX_NAME = ".elastic-connectors"; - public static final String CONNECTOR_INDEX_PREFIX = ".elastic-connectors-v"; - public static final String CONNECTOR_CONCRETE_INDEX_NAME = CONNECTOR_INDEX_PREFIX + CONNECTORS_INDEX_VERSION; - // The index pattern needs a stricter regex to prevent conflicts with .elastic-connectors-sync-jobs - - public static final String CONNECTOR_INDEX_NAME_PATTERN = CONNECTOR_INDEX_PREFIX + "*"; - - private static final String CONNECTORS_MAPPING_VERSION_VARIABLE = "elastic-connectors.version"; - private static final String CONNECTORS_MAPPING_MANAGED_VERSION_VARIABLE = "elastic-connectors.managed.index.version"; + public static final String CONNECTOR_INDEX_NAME = ConnectorTemplateRegistry.CONNECTOR_INDEX_NAME_PATTERN; /** * @param client A client for executing actions on the connector index @@ -114,36 +96,6 @@ public ConnectorIndexService(Client client) { this.clientWithOrigin = new OriginSettingClient(client, CONNECTORS_ORIGIN); } - /** - * Returns the {@link SystemIndexDescriptor} for the Connector system index. - * - * @return The {@link SystemIndexDescriptor} for the Connector system index. - */ - public static SystemIndexDescriptor getSystemIndexDescriptor() { - PutIndexTemplateRequest request = new PutIndexTemplateRequest(); - String templateSource = TemplateUtils.loadTemplate( - "/elastic-connectors.json", - Version.CURRENT.toString(), - CONNECTORS_MAPPING_VERSION_VARIABLE, - Map.of(CONNECTORS_MAPPING_MANAGED_VERSION_VARIABLE, Integer.toString(CONNECTORS_INDEX_VERSION)) - ); - request.source(templateSource, XContentType.JSON); - - // The index pattern needs a stricter regex to prevent conflicts with .elastic-connectors-sync-jobs - return SystemIndexDescriptor.builder() - .setIndexPattern(CONNECTOR_INDEX_NAME_PATTERN) - .setPrimaryIndex(CONNECTOR_CONCRETE_INDEX_NAME) - .setAliasName(CONNECTOR_INDEX_NAME) - .setDescription("Search connectors") - .setMappings(request.mappings()) - .setSettings(request.settings()) - .setOrigin(CONNECTORS_ORIGIN) - .setType(SystemIndexDescriptor.Type.EXTERNAL_MANAGED) - .setAllowedElasticProductOrigins(CONNECTORS_ALLOWED_PRODUCT_ORIGINS) - .setNetNew() - .build(); - } - /** * Creates or updates the {@link Connector} in the underlying index with a specific doc ID * if connectorId is provided. Otherwise, the connector doc is indexed with auto-generated doc ID. diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java index 97ac05c443ad0..fd35acc89db5c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java @@ -8,23 +8,25 @@ package org.elasticsearch.xpack.application.connector; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.template.IndexTemplateConfig; import org.elasticsearch.xpack.core.template.IndexTemplateRegistry; import org.elasticsearch.xpack.core.template.IngestPipelineConfig; import org.elasticsearch.xpack.core.template.JsonIngestPipelineConfig; +import java.io.IOException; +import java.util.HashMap; import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.core.ClientHelper.CLOUD_ORIGIN; -import static org.elasticsearch.xpack.core.ClientHelper.CONNECTORS_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.ENT_SEARCH_ORIGIN; -import static org.elasticsearch.xpack.core.ClientHelper.KIBANA_ORIGIN; public class ConnectorTemplateRegistry extends IndexTemplateRegistry { @@ -32,6 +34,13 @@ public class ConnectorTemplateRegistry extends IndexTemplateRegistry { static final int REGISTRY_VERSION = 3; // Connector indices constants + + public static final String CONNECTOR_INDEX_NAME_PATTERN = ".elastic-connectors-v1"; + public static final String CONNECTOR_TEMPLATE_NAME = "elastic-connectors"; + + public static final String CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN = ".elastic-connectors-sync-jobs-v1"; + public static final String CONNECTOR_SYNC_JOBS_TEMPLATE_NAME = "elastic-connectors-sync-jobs"; + public static final String ACCESS_CONTROL_INDEX_PREFIX = ".search-acl-filter-"; public static final String ACCESS_CONTROL_INDEX_NAME_PATTERN = ".search-acl-filter-*"; public static final String ACCESS_CONTROL_TEMPLATE_NAME = "search-acl-filter"; @@ -49,8 +58,51 @@ public class ConnectorTemplateRegistry extends IndexTemplateRegistry { // Variable used to replace template version in index templates public static final String TEMPLATE_VERSION_VARIABLE = "xpack.application.connector.template.version"; - // Sources allowed to access system indices using X-elastic-product-origin header - public static final List CONNECTORS_ALLOWED_PRODUCT_ORIGINS = List.of(KIBANA_ORIGIN, CONNECTORS_ORIGIN, CLOUD_ORIGIN); + private static final String MAPPINGS_SUFFIX = "-mappings"; + + private static final String SETTINGS_SUFFIX = "-settings"; + + private static final String JSON_EXTENSION = ".json"; + + static final Map COMPONENT_TEMPLATES; + + static { + final Map componentTemplates = new HashMap<>(); + for (IndexTemplateConfig config : List.of( + new IndexTemplateConfig( + CONNECTOR_TEMPLATE_NAME + MAPPINGS_SUFFIX, + ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_TEMPLATE_NAME + MAPPINGS_SUFFIX + JSON_EXTENSION, + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ), + new IndexTemplateConfig( + CONNECTOR_TEMPLATE_NAME + SETTINGS_SUFFIX, + ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_TEMPLATE_NAME + SETTINGS_SUFFIX + JSON_EXTENSION, + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ), + new IndexTemplateConfig( + CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + MAPPINGS_SUFFIX, + ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + MAPPINGS_SUFFIX + JSON_EXTENSION, + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ), + new IndexTemplateConfig( + CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + SETTINGS_SUFFIX, + ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_TEMPLATE_NAME + SETTINGS_SUFFIX + JSON_EXTENSION, + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ) + )) { + + try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, config.loadBytes())) { + componentTemplates.put(config.getTemplateName(), ComponentTemplate.parse(parser)); + } catch (IOException e) { + throw new AssertionError(e); + } + } + COMPONENT_TEMPLATES = Map.copyOf(componentTemplates); + } @Override protected List getIngestPipelines() { @@ -65,6 +117,20 @@ protected List getIngestPipelines() { } static final Map COMPOSABLE_INDEX_TEMPLATES = parseComposableTemplates( + new IndexTemplateConfig( + CONNECTOR_TEMPLATE_NAME, + ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_TEMPLATE_NAME + ".json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + Map.of("connectors.index_pattern", CONNECTOR_INDEX_NAME_PATTERN) + ), + new IndexTemplateConfig( + CONNECTOR_SYNC_JOBS_TEMPLATE_NAME, + ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + ".json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + Map.of("connectors-sync-jobs.index_pattern", CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN) + ), new IndexTemplateConfig( ACCESS_CONTROL_TEMPLATE_NAME, ROOT_TEMPLATE_RESOURCE_PATH + ACCESS_CONTROL_TEMPLATE_NAME + ".json", @@ -88,6 +154,11 @@ protected String getOrigin() { return ENT_SEARCH_ORIGIN; } + @Override + protected Map getComponentTemplateConfigs() { + return COMPONENT_TEMPLATES; + } + @Override protected Map getComposableTemplateConfigs() { return COMPOSABLE_INDEX_TEMPLATES; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 85de2f900ddff..f46d915a7123f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -11,12 +11,10 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; @@ -42,7 +40,6 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; -import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -52,10 +49,10 @@ import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorFiltering; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; +import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; import org.elasticsearch.xpack.application.connector.filtering.FilteringRules; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; -import org.elasticsearch.xpack.core.template.TemplateUtils; import java.io.IOException; import java.time.Instant; @@ -72,7 +69,6 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.application.connector.ConnectorIndexService.CONNECTOR_INDEX_NAME; -import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTORS_ALLOWED_PRODUCT_ORIGINS; import static org.elasticsearch.xpack.core.ClientHelper.CONNECTORS_ORIGIN; /** @@ -85,17 +81,7 @@ public class ConnectorSyncJobIndexService { // The client to interact with the system index (internal user). private final Client clientWithOrigin; - // TODO use proper version IDs (see org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java) - // TODO if this version is updated, a test should be added to - // javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java - private static final int CONNECTOR_SYNC_JOB_INDEX_VERSION = 1; - public static final String CONNECTOR_SYNC_JOB_INDEX_NAME = ".elastic-connectors-sync-jobs"; - public static final String CONNECTOR_SYNC_JOB_INDEX_PREFIX = ".elastic-connectors-sync-jobs-v"; - public static final String CONNECTOR_SYNC_JOB_CONCRETE_INDEX_NAME = CONNECTOR_SYNC_JOB_INDEX_PREFIX + CONNECTOR_SYNC_JOB_INDEX_VERSION; - public static final String CONNECTOR_SYNC_JOB_INDEX_NAME_PATTERN = CONNECTOR_SYNC_JOB_INDEX_NAME + "*"; - - private static final String CONNECTOR_SYNC_JOB_MAPPING_VERSION_VARIABLE = "elastic-connectors-sync-jobs.version"; - private static final String CONNECTOR_SYNC_JOB_MAPPING_MANAGED_VERSION_VARIABLE = "elastic-connectors-sync-jobs.managed.index.version"; + public static final String CONNECTOR_SYNC_JOB_INDEX_NAME = ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN; /** * @param client A client for executing actions on the connectors sync jobs index. @@ -104,35 +90,6 @@ public ConnectorSyncJobIndexService(Client client) { this.clientWithOrigin = new OriginSettingClient(client, CONNECTORS_ORIGIN); } - /** - * Returns the {@link SystemIndexDescriptor} for the Connector system index. - * - * @return The {@link SystemIndexDescriptor} for the Connector system index. - */ - public static SystemIndexDescriptor getSystemIndexDescriptor() { - PutIndexTemplateRequest request = new PutIndexTemplateRequest(); - String templateSource = TemplateUtils.loadTemplate( - "/elastic-connectors-sync-jobs.json", - Version.CURRENT.toString(), - CONNECTOR_SYNC_JOB_MAPPING_VERSION_VARIABLE, - Map.of(CONNECTOR_SYNC_JOB_MAPPING_MANAGED_VERSION_VARIABLE, Integer.toString(CONNECTOR_SYNC_JOB_INDEX_VERSION)) - ); - request.source(templateSource, XContentType.JSON); - - return SystemIndexDescriptor.builder() - .setIndexPattern(CONNECTOR_SYNC_JOB_INDEX_NAME_PATTERN) - .setPrimaryIndex(CONNECTOR_SYNC_JOB_CONCRETE_INDEX_NAME) - .setAliasName(CONNECTOR_SYNC_JOB_INDEX_NAME) - .setDescription("Search connectors sync jobs") - .setMappings(request.mappings()) - .setSettings(request.settings()) - .setOrigin(CONNECTORS_ORIGIN) - .setType(SystemIndexDescriptor.Type.EXTERNAL_MANAGED) - .setAllowedElasticProductOrigins(CONNECTORS_ALLOWED_PRODUCT_ORIGINS) - .setNetNew() - .build(); - } - /** * @param request Request for creating a connector sync job. * @param listener Listener to respond to a successful response or an error. diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 53a8c7ac96944..7b6d9c9b14df9 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -14,9 +14,7 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; -import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.MockScriptPlugin; @@ -61,6 +59,7 @@ import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.getRandomConnectorFeatures; import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.getRandomCronExpression; import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.randomConnectorFeatureEnabled; +import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.registerSimplifiedConnectorIndexTemplates; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; @@ -73,6 +72,7 @@ public class ConnectorIndexServiceTests extends ESSingleNodeTestCase { @Before public void setup() { + registerSimplifiedConnectorIndexTemplates(indicesAdmin()); this.connectorIndexService = new ConnectorIndexService(client()); } @@ -80,7 +80,6 @@ public void setup() { protected Collection> getPlugins() { List> plugins = new ArrayList<>(super.getPlugins()); plugins.add(MockPainlessScriptEngine.TestPlugin.class); - plugins.add(ConnectorIndexServiceTests.TestPlugin.class); return plugins; } @@ -1613,24 +1612,4 @@ public void execute() { } } - /** - * Test plugin to register the {@link ConnectorIndexService} system index descriptor. - */ - public static class TestPlugin extends Plugin implements SystemIndexPlugin { - @Override - public Collection getSystemIndexDescriptors(Settings settings) { - return List.of(ConnectorIndexService.getSystemIndexDescriptor()); - } - - @Override - public String getFeatureName() { - return this.getClass().getSimpleName(); - } - - @Override - public String getFeatureDescription() { - return this.getClass().getCanonicalName(); - } - } - } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java index 89bdabe78300c..068b99626af9d 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java @@ -55,13 +55,15 @@ import java.util.stream.Collectors; import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.ACCESS_CONTROL_INDEX_NAME_PATTERN; +import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTOR_INDEX_NAME_PATTERN; +import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.oneOf; import static org.hamcrest.Matchers.sameInstance; -import static org.junit.Assert.assertNotNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; @@ -90,6 +92,14 @@ public void testThatNonExistingComposableTemplatesAreAddedImmediately() throws E DiscoveryNode node = DiscoveryNodeUtils.create("node"); DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); Map existingComponentTemplates = Map.of( + ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-mappings", + ConnectorTemplateRegistry.REGISTRY_VERSION, + ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-settings", + ConnectorTemplateRegistry.REGISTRY_VERSION, + ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-mappings", + ConnectorTemplateRegistry.REGISTRY_VERSION, + ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-settings", + ConnectorTemplateRegistry.REGISTRY_VERSION, ConnectorTemplateRegistry.ACCESS_CONTROL_TEMPLATE_NAME, ConnectorTemplateRegistry.REGISTRY_VERSION ); @@ -115,6 +125,131 @@ public void testThatNonExistingComposableTemplatesAreAddedImmediately() throws E }); } + public void testThatNonExistingComponentTemplatesAreAddedImmediately() throws Exception { + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + + ClusterChangedEvent event = createClusterChangedEvent( + Collections.emptyMap(), + Collections.emptyMap(), + Collections.singletonMap(ConnectorTemplateRegistry.SEARCH_DEFAULT_PIPELINE_NAME, ConnectorTemplateRegistry.REGISTRY_VERSION), + Collections.emptyMap(), + nodes + ); + + AtomicInteger calledTimes = new AtomicInteger(0); + client.setVerifier((action, request, listener) -> verifyComponentTemplateInstalled(calledTimes, action, request, listener)); + registry.clusterChanged(event); + assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getComponentTemplateConfigs().size()))); + + calledTimes.set(0); + + // attempting to register the event multiple times as a race condition can yield this test flaky, namely: + // when calling registry.clusterChanged(newEvent) the templateCreationsInProgress state that the IndexTemplateRegistry maintains + // might've not yet been updated to reflect that the first template registration was complete, so a second template registration + // will not be issued anymore, leaving calledTimes to 0 + assertBusy(() -> { + // now delete all templates from the cluster state and let's retry + ClusterChangedEvent newEvent = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes); + registry.clusterChanged(newEvent); + assertThat(calledTimes.get(), greaterThan(4)); + }); + } + + public void testThatVersionedOldComponentTemplatesAreUpgraded() throws Exception { + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + + ClusterChangedEvent event = createClusterChangedEvent( + Collections.emptyMap(), + Collections.singletonMap( + ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-settings", + ConnectorTemplateRegistry.REGISTRY_VERSION - 1 + ), + Collections.singletonMap(ConnectorTemplateRegistry.SEARCH_DEFAULT_PIPELINE_NAME, ConnectorTemplateRegistry.REGISTRY_VERSION), + Collections.emptyMap(), + nodes + ); + AtomicInteger calledTimes = new AtomicInteger(0); + client.setVerifier((action, request, listener) -> verifyComponentTemplateInstalled(calledTimes, action, request, listener)); + registry.clusterChanged(event); + assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getComponentTemplateConfigs().size()))); + } + + public void testThatUnversionedOldComponentTemplatesAreUpgraded() throws Exception { + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + + ClusterChangedEvent event = createClusterChangedEvent( + Collections.emptyMap(), + Collections.singletonMap(ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-mappings", null), + Collections.singletonMap(ConnectorTemplateRegistry.SEARCH_DEFAULT_PIPELINE_NAME, ConnectorTemplateRegistry.REGISTRY_VERSION), + Collections.emptyMap(), + nodes + ); + AtomicInteger calledTimes = new AtomicInteger(0); + client.setVerifier((action, request, listener) -> verifyComponentTemplateInstalled(calledTimes, action, request, listener)); + registry.clusterChanged(event); + assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getComponentTemplateConfigs().size()))); + } + + public void testSameOrHigherVersionComponentTemplateNotUpgraded() { + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + + Map versions = new HashMap<>(); + versions.put(ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-mappings", ConnectorTemplateRegistry.REGISTRY_VERSION); + versions.put(ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-settings", ConnectorTemplateRegistry.REGISTRY_VERSION); + versions.put(ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-mappings", ConnectorTemplateRegistry.REGISTRY_VERSION); + versions.put(ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-settings", ConnectorTemplateRegistry.REGISTRY_VERSION); + versions.put(ConnectorTemplateRegistry.ACCESS_CONTROL_TEMPLATE_NAME, ConnectorTemplateRegistry.REGISTRY_VERSION); + ClusterChangedEvent sameVersionEvent = createClusterChangedEvent(Collections.emptyMap(), versions, nodes); + client.setVerifier((action, request, listener) -> { + if (action == PutPipelineTransportAction.TYPE) { + // Ignore this, it's verified in another test + return AcknowledgedResponse.TRUE; + } + if (action instanceof PutComponentTemplateAction) { + fail("template should not have been re-installed"); + return null; + } else if (action == ILMActions.PUT) { + // Ignore this, it's verified in another test + return AcknowledgedResponse.TRUE; + } else if (action == TransportPutComposableIndexTemplateAction.TYPE) { + // Ignore this, it's verified in another test + return AcknowledgedResponse.TRUE; + } else { + fail("client called with unexpected request:" + request.toString()); + return null; + } + }); + registry.clusterChanged(sameVersionEvent); + + versions.clear(); + versions.put( + ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-mappings", + ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) + ); + versions.put( + ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-settings", + ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) + ); + versions.put( + ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-mappings", + ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) + ); + versions.put( + ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-settings", + ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) + ); + versions.put( + ConnectorTemplateRegistry.ACCESS_CONTROL_TEMPLATE_NAME, + ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) + ); + ClusterChangedEvent higherVersionEvent = createClusterChangedEvent(Collections.emptyMap(), versions, nodes); + registry.clusterChanged(higherVersionEvent); + } + public void testThatMissingMasterNodeDoesNothing() { DiscoveryNode localNode = DiscoveryNodeUtils.create("node"); DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").add(localNode).build(); @@ -125,7 +260,7 @@ public void testThatMissingMasterNodeDoesNothing() { }); ClusterChangedEvent event = createClusterChangedEvent( - Collections.singletonMap(ConnectorTemplateRegistry.ACCESS_CONTROL_TEMPLATE_NAME, null), + Collections.singletonMap(ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME, null), Collections.emptyMap(), nodes ); @@ -222,7 +357,10 @@ private ActionResponse verifyComposableTemplateInstalled( assertThat(putRequest.indexTemplate().version(), equalTo((long) ConnectorTemplateRegistry.REGISTRY_VERSION)); final List indexPatterns = putRequest.indexTemplate().indexPatterns(); assertThat(indexPatterns, hasSize(1)); - assertThat(indexPatterns, contains(ACCESS_CONTROL_INDEX_NAME_PATTERN)); + assertThat( + indexPatterns, + contains(oneOf(ACCESS_CONTROL_INDEX_NAME_PATTERN, CONNECTOR_INDEX_NAME_PATTERN, CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN)) + ); assertNotNull(listener); return new TestPutIndexTemplateResponse(true); } else { diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index 3f2f47e190882..c563bc0a14ee3 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.client.internal.IndicesAdminClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.XContentType; @@ -26,6 +27,7 @@ import org.elasticsearch.xpack.application.connector.filtering.FilteringValidation; import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationInfo; import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationState; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobType; import org.elasticsearch.xpack.core.scheduler.Cron; @@ -45,14 +47,55 @@ import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomInt; import static org.elasticsearch.test.ESTestCase.randomList; +import static org.elasticsearch.test.ESTestCase.randomLong; import static org.elasticsearch.test.ESTestCase.randomLongBetween; -import static org.elasticsearch.test.ESTestCase.randomNonNegativeLong; -import static org.elasticsearch.test.ESTestCase.randomShort; +import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTOR_INDEX_NAME_PATTERN; +import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN; +import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME; +import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME; public final class ConnectorTestUtils { public static final String NULL_STRING = null; + /** + * Registers index templates for instances of {@link Connector} and {@link ConnectorSyncJob} with essential field mappings. This method + * only includes mappings for fields relevant to test cases, specifying field types to ensure correct ES query logic behavior. + * + * @param indicesAdminClient The Elasticsearch indices admin client used for template registration. + */ + + public static void registerSimplifiedConnectorIndexTemplates(IndicesAdminClient indicesAdminClient) { + + indicesAdminClient.preparePutTemplate(CONNECTOR_TEMPLATE_NAME) + .setPatterns(List.of(CONNECTOR_INDEX_NAME_PATTERN)) + .setVersion(0) + .setMapping( + "service_type", + "type=keyword,store=true", + "status", + "type=keyword,store=true", + "index_name", + "type=keyword,store=true", + "configuration", + "type=object" + ) + .get(); + + indicesAdminClient.preparePutTemplate(CONNECTOR_SYNC_JOBS_TEMPLATE_NAME) + .setPatterns(List.of(CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN)) + .setVersion(0) + .setMapping( + "job_type", + "type=keyword,store=true", + "connector.id", + "type=keyword,store=true", + "status", + "type=keyword,store=true" + ) + .get(); + } + public static PutConnectorAction.Request getRandomPutConnectorActionRequest() { return new PutConnectorAction.Request( randomAlphaOfLengthBetween(5, 15), @@ -101,9 +144,9 @@ public static ConnectorSyncInfo getRandomConnectorSyncInfo() { return new ConnectorSyncInfo.Builder().setLastAccessControlSyncError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setLastAccessControlSyncScheduledAt(randomFrom(new Instant[] { null, ConnectorTestUtils.randomInstant() })) .setLastAccessControlSyncStatus(randomFrom(new ConnectorSyncStatus[] { null, getRandomSyncStatus() })) - .setLastDeletedDocumentCount(randomNonNegativeLong()) + .setLastDeletedDocumentCount(randomLong()) .setLastIncrementalSyncScheduledAt(randomFrom(new Instant[] { null, ConnectorTestUtils.randomInstant() })) - .setLastIndexedDocumentCount(randomNonNegativeLong()) + .setLastIndexedDocumentCount(randomLong()) .setLastSyncError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setLastSyncScheduledAt(randomFrom(new Instant[] { null, ConnectorTestUtils.randomInstant() })) .setLastSyncStatus(randomFrom(new ConnectorSyncStatus[] { null, getRandomSyncStatus() })) @@ -154,7 +197,7 @@ private static FilteringValidation getRandomFilteringValidationError() { public static ConnectorFiltering getRandomConnectorFiltering() { Instant currentTimestamp = Instant.now(); - int order = randomShort(); + int order = randomInt(); return new ConnectorFiltering.Builder().setActive( new FilteringRules.Builder().setAdvancedSnippet( diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index fe6d97a871e0c..f6c0a54f107b4 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -20,11 +20,8 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.reindex.BulkByScrollResponse; -import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -61,6 +58,7 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.ACCESS_CONTROL_INDEX_PREFIX; +import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.registerSimplifiedConnectorIndexTemplates; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -88,12 +86,14 @@ protected Collection> getPlugins() { List> plugins = new ArrayList<>(super.getPlugins()); // Reindex plugin is required for testDeleteAllSyncJobsByConnectorId (supports delete_by_query) plugins.add(ReindexPlugin.class); - plugins.add(TestPlugin.class); return plugins; } @Before public void setup() throws Exception { + + registerSimplifiedConnectorIndexTemplates(indicesAdmin()); + connectorOneId = createConnector(ConnectorTestUtils.getRandomConnector()); connectorTwoId = createConnector(ConnectorTestUtils.getRandomConnector()); connectorThreeId = createConnector(ConnectorTestUtils.getRandomConnectorWithDetachedIndex()); @@ -805,18 +805,18 @@ public void testUpdateConnectorSyncJobIngestionStats() throws Exception { Instant requestLastSeen = request.getLastSeen(); Map metadata = request.getMetadata(); - Long deletedDocumentCountAfterUpdate = ((Number) syncJobSourceAfterUpdate.get( + Long deletedDocumentCountAfterUpdate = (Long) syncJobSourceAfterUpdate.get( ConnectorSyncJob.DELETED_DOCUMENT_COUNT_FIELD.getPreferredName() - )).longValue(); - Long indexedDocumentCountAfterUpdate = ((Number) syncJobSourceAfterUpdate.get( + ); + Long indexedDocumentCountAfterUpdate = (Long) syncJobSourceAfterUpdate.get( ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName() - )).longValue(); - Long indexedDocumentVolumeAfterUpdate = ((Number) syncJobSourceAfterUpdate.get( + ); + Long indexedDocumentVolumeAfterUpdate = (Long) syncJobSourceAfterUpdate.get( ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName() - )).longValue(); - Long totalDocumentCountAfterUpdate = ((Number) syncJobSourceAfterUpdate.get( + ); + Long totalDocumentCountAfterUpdate = (Long) syncJobSourceAfterUpdate.get( ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName() - )).longValue(); + ); Instant lastSeenAfterUpdate = Instant.parse( (String) syncJobSourceAfterUpdate.get(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName()) ); @@ -1411,24 +1411,4 @@ private String updateConnectorSyncJobStatusWithoutStateMachineGuard(String syncJ // wait 10 seconds for connector creation return index.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).getId(); } - - /** - * Test plugin to register the {@link ConnectorSyncJobIndexService} system index descriptor. - */ - public static class TestPlugin extends Plugin implements SystemIndexPlugin { - @Override - public Collection getSystemIndexDescriptors(Settings settings) { - return List.of(ConnectorSyncJobIndexService.getSystemIndexDescriptor()); - } - - @Override - public String getFeatureName() { - return this.getClass().getSimpleName(); - } - - @Override - public String getFeatureDescription() { - return this.getClass().getCanonicalName(); - } - } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index 1e6426051e04b..e72bf04fb7e55 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -36,7 +36,7 @@ import static org.elasticsearch.test.ESTestCase.randomInt; import static org.elasticsearch.test.ESTestCase.randomLong; import static org.elasticsearch.test.ESTestCase.randomMap; -import static org.elasticsearch.test.ESTestCase.randomNonNegativeInt; +import static org.elasticsearch.test.ESTestCase.randomNonNegativeLong; public class ConnectorSyncJobTestUtils { @@ -51,11 +51,11 @@ public static ConnectorSyncJob getRandomConnectorSyncJob() { .setCompletedAt(randomFrom(new Instant[] { null, randomInstantBetween(lowerBoundInstant, upperBoundInstant) })) .setConnector(ConnectorTestUtils.getRandomSyncJobConnectorInfo()) .setCreatedAt(randomInstantBetween(lowerBoundInstant, upperBoundInstant)) - .setDeletedDocumentCount(randomNonNegativeInt()) + .setDeletedDocumentCount(randomLong()) .setError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setId(randomAlphaOfLength(10)) - .setIndexedDocumentCount(randomNonNegativeInt()) - .setIndexedDocumentVolume(randomNonNegativeInt()) + .setIndexedDocumentCount(randomLong()) + .setIndexedDocumentVolume(randomLong()) .setJobType(getRandomConnectorJobType()) .setLastSeen(randomFrom(new Instant[] { null, randomInstantBetween(lowerBoundInstant, upperBoundInstant) })) .setMetadata( @@ -67,7 +67,7 @@ public static ConnectorSyncJob getRandomConnectorSyncJob() { ) .setStartedAt(randomFrom(new Instant[] { null, randomInstantBetween(lowerBoundInstant, upperBoundInstant) })) .setStatus(ConnectorTestUtils.getRandomSyncStatus()) - .setTotalDocumentCount(randomNonNegativeInt()) + .setTotalDocumentCount(randomLong()) .setTriggerMethod(getRandomConnectorSyncJobTriggerMethod()) .setWorkerHostname(randomAlphaOfLength(10)) .build(); @@ -156,10 +156,10 @@ public static UpdateConnectorSyncJobIngestionStatsAction.Request getRandomUpdate return new UpdateConnectorSyncJobIngestionStatsAction.Request( randomAlphaOfLength(10), - (long) randomNonNegativeInt(), - (long) randomNonNegativeInt(), - (long) randomNonNegativeInt(), - (long) randomNonNegativeInt(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), randomInstantBetween(lowerBoundInstant, upperBoundInstant), randomMap(2, 3, () -> new Tuple<>(randomAlphaOfLength(4), randomAlphaOfLength(4))) ); @@ -173,10 +173,10 @@ public static UpdateConnectorSyncJobIngestionStatsAction.Request getRandomUpdate return new UpdateConnectorSyncJobIngestionStatsAction.Request( syncJobId, - (long) randomNonNegativeInt(), - (long) randomNonNegativeInt(), - (long) randomNonNegativeInt(), - (long) randomNonNegativeInt(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), randomInstantBetween(lowerBoundInstant, upperBoundInstant), randomMap(2, 3, () -> new Tuple<>(randomAlphaOfLength(4), randomAlphaOfLength(4))) ); From d91d51600e0e35e24087f6fcf793ec0efd8907d5 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Tue, 28 Jan 2025 08:54:33 +0100 Subject: [PATCH 097/383] ESQL - Add Match function options (#120360) --- docs/changelog/120360.yaml | 5 + docs/reference/esql/esql-syntax.asciidoc | 44 + .../esql/functions/description/match.asciidoc | 2 +- .../esql/functions/examples/match.asciidoc | 10 +- .../functionNamedParams/match.asciidoc | 20 + .../functions/kibana/definition/match.json | 208 ++- .../esql/functions/kibana/docs/match.md | 3 + .../esql/functions/layout/match.asciidoc | 1 + .../esql/functions/parameters/match.asciidoc | 3 + .../esql/functions/signature/match.svg | 2 +- .../esql/functions/types/match.asciidoc | 60 +- .../src/main/resources/kql-function.csv-spec | 5 +- .../src/main/resources/map-functions.csv-spec | 122 -- .../main/resources/match-function.csv-spec | 49 +- .../main/resources/match-operator.csv-spec | 5 +- .../src/main/resources/qstr-function.csv-spec | 5 +- .../esql/src/main/antlr/EsqlBaseLexer.g4 | 4 +- .../esql/src/main/antlr/EsqlBaseLexer.tokens | 2 + .../esql/src/main/antlr/EsqlBaseParser.g4 | 2 +- .../esql/src/main/antlr/EsqlBaseParser.tokens | 2 + .../scalar/map/LogWithBaseInMapEvaluator.java | 139 -- .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../esql/expression/ExpressionWritables.java | 2 - .../function/EsqlFunctionRegistry.java | 12 +- .../esql/expression/function/MapParam.java | 2 + .../AbstractMatchFullTextFunction.java | 219 --- .../function/fulltext/FullTextFunction.java | 54 +- .../function/fulltext/FullTextWritables.java | 3 - .../expression/function/fulltext/Match.java | 435 +++++- .../function/fulltext/MatchOperator.java | 36 +- .../expression/function/fulltext/Term.java | 8 +- .../function/scalar/map/LogWithBaseInMap.java | 221 --- .../fulltext/MatchQueryPredicate.java | 74 - .../xpack/esql/parser/EsqlBaseLexer.interp | 6 +- .../xpack/esql/parser/EsqlBaseLexer.java | 889 ++++++------ .../xpack/esql/parser/EsqlBaseParser.interp | 6 +- .../xpack/esql/parser/EsqlBaseParser.java | 1212 ++++++++--------- .../xpack/esql/querydsl/query/MatchQuery.java | 59 +- .../xpack/esql/analysis/AnalyzerTests.java | 48 +- .../xpack/esql/analysis/VerifierTests.java | 100 +- .../function/AbstractFunctionTestCase.java | 48 +- .../AbstractMatchFullTextFunctionTests.java | 9 +- .../function/fulltext/MatchErrorTests.java | 23 +- .../fulltext/MatchOperatorErrorTests.java | 77 ++ .../function/fulltext/MatchOperatorTests.java | 11 +- .../function/fulltext/MatchTests.java | 53 +- .../function/fulltext/TermTests.java | 2 +- .../LogWithBaseInMapSerializationTests.java | 38 - .../MatchQuerySerializationTests.java | 33 - .../LocalPhysicalPlanOptimizerTests.java | 32 + .../optimizer/LogicalPlanOptimizerTests.java | 28 +- .../esql/parser/StatementParserTests.java | 50 +- .../esql/querydsl/query/MatchQueryTests.java | 18 +- .../rest-api-spec/test/esql/60_usage.yml | 5 +- 54 files changed, 2256 insertions(+), 2257 deletions(-) create mode 100644 docs/changelog/120360.yaml create mode 100644 docs/reference/esql/functions/functionNamedParams/match.asciidoc delete mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/map-functions.csv-spec delete mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/map/LogWithBaseInMapEvaluator.java delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/AbstractMatchFullTextFunction.java delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/map/LogWithBaseInMap.java delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/fulltext/MatchQueryPredicate.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorErrorTests.java delete mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogWithBaseInMapSerializationTests.java delete mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/fulltext/MatchQuerySerializationTests.java diff --git a/docs/changelog/120360.yaml b/docs/changelog/120360.yaml new file mode 100644 index 0000000000000..d590cbf8f36c6 --- /dev/null +++ b/docs/changelog/120360.yaml @@ -0,0 +1,5 @@ +pr: 120360 +summary: ESQL - Add Match function options +area: ES|QL +type: feature +issues: [] diff --git a/docs/reference/esql/esql-syntax.asciidoc b/docs/reference/esql/esql-syntax.asciidoc index ba1c4ca820381..4a6a83f7095aa 100644 --- a/docs/reference/esql/esql-syntax.asciidoc +++ b/docs/reference/esql/esql-syntax.asciidoc @@ -169,3 +169,47 @@ Timespan literals are not whitespace sensitive. These expressions are all valid: * `1 day` * `1 day` +[discrete] +[[esql-function-named-params]] +==== Function named parameters + +Some functions like <> use named parameters to provide additional options. + +Named parameters allow specifying name value pairs, using the following syntax: + +`{"option_name": option_value, "another_option_name": another_value}` + +Valid value types are strings, numbers and booleans. + +An example using <>: + +[source,console] +---- +POST /_query +{ +"query": """ +FROM library +| WHERE match(author, "Frank Herbert", {"minimum_should_match": 2, "operator": "AND"}) +| LIMIT 5 +""" +} +---- +// TEST[setup:library] + +You can also use <> in function named parameters: + +[source,console] +---- +POST /_query +{ +"query": """ +FROM library +| EVAL year = DATE_EXTRACT("year", release_date) +| WHERE page_count > ? AND match(author, ?, {"minimum_should_match": ?}) +| LIMIT 5 +""", +"params": [300, "Frank Herbert", 2] +} +---- +// TEST[setup:library] + diff --git a/docs/reference/esql/functions/description/match.asciidoc b/docs/reference/esql/functions/description/match.asciidoc index 0724f0f108e3c..2375fcc3b4521 100644 --- a/docs/reference/esql/functions/description/match.asciidoc +++ b/docs/reference/esql/functions/description/match.asciidoc @@ -2,4 +2,4 @@ *Description* -Use `MATCH` to perform a <> on the specified field. Using `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL. Match can be used on fields from the text family like <> and <>, as well as other field types like keyword, boolean, dates, and numeric types. For a simplified syntax, you can use the <> `:` operator instead of `MATCH`. `MATCH` returns true if the provided query matches the row. +Use `MATCH` to perform a <> on the specified field. Using `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL. Match can be used on fields from the text family like <> and <>, as well as other field types like keyword, boolean, dates, and numeric types. Match can use <> to specify additional options for the match query. All <> are supported. For a simplified syntax, you can use the <> `:` operator instead of `MATCH`. `MATCH` returns true if the provided query matches the row. diff --git a/docs/reference/esql/functions/examples/match.asciidoc b/docs/reference/esql/functions/examples/match.asciidoc index 3f31d68ea9abb..afb77c388c830 100644 --- a/docs/reference/esql/functions/examples/match.asciidoc +++ b/docs/reference/esql/functions/examples/match.asciidoc @@ -1,6 +1,6 @@ // This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. -*Example* +*Examples* [source.merge.styled,esql] ---- @@ -10,4 +10,12 @@ include::{esql-specs}/match-function.csv-spec[tag=match-with-field] |=== include::{esql-specs}/match-function.csv-spec[tag=match-with-field-result] |=== +[source.merge.styled,esql] +---- +include::{esql-specs}/match-function.csv-spec[tag=match-with-named-function-params] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/match-function.csv-spec[tag=match-with-named-function-params-result] +|=== diff --git a/docs/reference/esql/functions/functionNamedParams/match.asciidoc b/docs/reference/esql/functions/functionNamedParams/match.asciidoc new file mode 100644 index 0000000000000..924a4be0efb0e --- /dev/null +++ b/docs/reference/esql/functions/functionNamedParams/match.asciidoc @@ -0,0 +1,20 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported function named parameters* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +name | types | description +fuzziness | [keyword] | Maximum edit distance allowed for matching. +auto_generate_synonyms_phrase_query | [boolean] | If true, match phrase queries are automatically created for multi-term synonyms. +analyzer | [keyword] | Analyzer used to convert the text in the query value into token. +minimum_should_match | [integer] | Minimum number of clauses that must match for a document to be returned. +zero_terms_query | [keyword] | Number of beginning characters left unchanged for fuzzy matching. +boost | [float] | Floating point number used to decrease or increase the relevance scores of the query. +fuzzy_transpositions | [boolean] | If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba). +fuzzy_rewrite | [keyword] | Method used to rewrite the query. See the rewrite parameter for valid values and more information. +prefix_length | [integer] | Number of beginning characters left unchanged for fuzzy matching. +lenient | [boolean] | If false, format-based errors, such as providing a text query value for a numeric field, are returned. +operator | [keyword] | Boolean logic used to interpret text in the query value. +max_expansions | [integer] | Maximum number of terms to which the query will expand. +|=== diff --git a/docs/reference/esql/functions/kibana/definition/match.json b/docs/reference/esql/functions/kibana/definition/match.json index eb206cb9ddf4d..23a81ba34e387 100644 --- a/docs/reference/esql/functions/kibana/definition/match.json +++ b/docs/reference/esql/functions/kibana/definition/match.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "match", - "description" : "Use `MATCH` to perform a <> on the specified field.\nUsing `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL.\n\nMatch can be used on fields from the text family like <> and <>,\nas well as other field types like keyword, boolean, dates, and numeric types.\n\nFor a simplified syntax, you can use the <> `:` operator instead of `MATCH`.\n\n`MATCH` returns true if the provided query matches the row.", + "description" : "Use `MATCH` to perform a <> on the specified field.\nUsing `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL.\n\nMatch can be used on fields from the text family like <> and <>,\nas well as other field types like keyword, boolean, dates, and numeric types.\n\nMatch can use <> to specify additional options for the match query.\nAll <> are supported.\n\nFor a simplified syntax, you can use the <> `:` operator instead of `MATCH`.\n\n`MATCH` returns true if the provided query matches the row.", "signatures" : [ { "params" : [ @@ -17,6 +17,13 @@ "type" : "boolean", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -35,6 +42,13 @@ "type" : "keyword", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -53,6 +67,13 @@ "type" : "date", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -71,6 +92,13 @@ "type" : "keyword", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -89,6 +117,13 @@ "type" : "date_nanos", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -107,6 +142,13 @@ "type" : "keyword", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -125,6 +167,13 @@ "type" : "double", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -143,6 +192,13 @@ "type" : "integer", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -161,6 +217,13 @@ "type" : "keyword", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -179,6 +242,13 @@ "type" : "long", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -197,6 +267,13 @@ "type" : "double", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -215,6 +292,13 @@ "type" : "integer", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -233,6 +317,13 @@ "type" : "keyword", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -251,6 +342,13 @@ "type" : "long", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -269,6 +367,13 @@ "type" : "ip", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -287,6 +392,13 @@ "type" : "keyword", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -305,6 +417,13 @@ "type" : "keyword", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -323,6 +442,13 @@ "type" : "double", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -341,6 +467,13 @@ "type" : "integer", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -359,6 +492,13 @@ "type" : "keyword", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -377,6 +517,13 @@ "type" : "long", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -395,6 +542,13 @@ "type" : "keyword", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -413,6 +567,13 @@ "type" : "double", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -431,6 +592,13 @@ "type" : "integer", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -449,6 +617,13 @@ "type" : "keyword", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -467,6 +642,13 @@ "type" : "long", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -485,6 +667,13 @@ "type" : "unsigned_long", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -503,6 +692,13 @@ "type" : "keyword", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -521,6 +717,13 @@ "type" : "version", "optional" : false, "description" : "Value to find in the provided field." + }, + { + "name" : "options", + "type" : "function named parameters", + "mapParams" : "{name='fuzziness', values=[AUTO, 1, 2], description='Maximum edit distance allowed for matching.'}, {name='auto_generate_synonyms_phrase_query', values=[true, false], description='If true, match phrase queries are automatically created for multi-term synonyms.'}, {name='analyzer', values=[standard], description='Analyzer used to convert the text in the query value into token.'}, {name='minimum_should_match', values=[2], description='Minimum number of clauses that must match for a document to be returned.'}, {name='zero_terms_query', values=[none, all], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='boost', values=[2.5], description='Floating point number used to decrease or increase the relevance scores of the query.'}, {name='fuzzy_transpositions', values=[true, false], description='If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba).'}, {name='fuzzy_rewrite', values=[constant_score_blended, constant_score, constant_score_boolean, top_terms_blended_freqs_N, top_terms_boost_N, top_terms_N], description='Method used to rewrite the query. See the rewrite parameter for valid values and more information.'}, {name='prefix_length', values=[1], description='Number of beginning characters left unchanged for fuzzy matching.'}, {name='lenient', values=[true, false], description='If false, format-based errors, such as providing a text query value for a numeric field, are returned.'}, {name='operator', values=[AND, OR], description='Boolean logic used to interpret text in the query value.'}, {name='max_expansions', values=[50], description='Maximum number of terms to which the query will expand.'}", + "optional" : true, + "description" : "Match additional options as <>. See <> for more information." } ], "variadic" : false, @@ -528,7 +731,8 @@ } ], "examples" : [ - "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" + "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;", + "FROM books \n| WHERE MATCH(title, \"Hobbit Back Again\", {\"operator\": \"AND\"})\n| KEEP title;" ], "preview" : true, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/docs/match.md b/docs/reference/esql/functions/kibana/docs/match.md index 80bf84351c188..6526d9e84168e 100644 --- a/docs/reference/esql/functions/kibana/docs/match.md +++ b/docs/reference/esql/functions/kibana/docs/match.md @@ -9,6 +9,9 @@ Using `MATCH` is equivalent to using the `match` query in the Elasticsearch Quer Match can be used on fields from the text family like <> and <>, as well as other field types like keyword, boolean, dates, and numeric types. +Match can use <> to specify additional options for the match query. +All <> are supported. + For a simplified syntax, you can use the <> `:` operator instead of `MATCH`. `MATCH` returns true if the provided query matches the row. diff --git a/docs/reference/esql/functions/layout/match.asciidoc b/docs/reference/esql/functions/layout/match.asciidoc index e62c81548c2b1..7765cc707e390 100644 --- a/docs/reference/esql/functions/layout/match.asciidoc +++ b/docs/reference/esql/functions/layout/match.asciidoc @@ -14,4 +14,5 @@ image::esql/functions/signature/match.svg[Embedded,opts=inline] include::../parameters/match.asciidoc[] include::../description/match.asciidoc[] include::../types/match.asciidoc[] +include::../functionNamedParams/match.asciidoc[] include::../examples/match.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/match.asciidoc b/docs/reference/esql/functions/parameters/match.asciidoc index 46f6acad9e128..5ded9745025ab 100644 --- a/docs/reference/esql/functions/parameters/match.asciidoc +++ b/docs/reference/esql/functions/parameters/match.asciidoc @@ -7,3 +7,6 @@ Field that the query will target. `query`:: Value to find in the provided field. + +`options`:: +(Optional) Match additional options as <>. See <> for more information. diff --git a/docs/reference/esql/functions/signature/match.svg b/docs/reference/esql/functions/signature/match.svg index 14ddb87468e70..2f05a3802fd80 100644 --- a/docs/reference/esql/functions/signature/match.svg +++ b/docs/reference/esql/functions/signature/match.svg @@ -1 +1 @@ -MATCH(field,query) \ No newline at end of file +MATCH(field,query,options) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/match.asciidoc b/docs/reference/esql/functions/types/match.asciidoc index 402277af44749..9b6ad09142c34 100644 --- a/docs/reference/esql/functions/types/match.asciidoc +++ b/docs/reference/esql/functions/types/match.asciidoc @@ -4,34 +4,34 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -field | query | result -boolean | boolean | boolean -boolean | keyword | boolean -date | date | boolean -date | keyword | boolean -date_nanos | date_nanos | boolean -date_nanos | keyword | boolean -double | double | boolean -double | integer | boolean -double | keyword | boolean -double | long | boolean -integer | double | boolean -integer | integer | boolean -integer | keyword | boolean -integer | long | boolean -ip | ip | boolean -ip | keyword | boolean -keyword | keyword | boolean -long | double | boolean -long | integer | boolean -long | keyword | boolean -long | long | boolean -text | keyword | boolean -unsigned_long | double | boolean -unsigned_long | integer | boolean -unsigned_long | keyword | boolean -unsigned_long | long | boolean -unsigned_long | unsigned_long | boolean -version | keyword | boolean -version | version | boolean +field | query | options | result +boolean | boolean | named parameters | boolean +boolean | keyword | named parameters | boolean +date | date | named parameters | boolean +date | keyword | named parameters | boolean +date_nanos | date_nanos | named parameters | boolean +date_nanos | keyword | named parameters | boolean +double | double | named parameters | boolean +double | integer | named parameters | boolean +double | keyword | named parameters | boolean +double | long | named parameters | boolean +integer | double | named parameters | boolean +integer | integer | named parameters | boolean +integer | keyword | named parameters | boolean +integer | long | named parameters | boolean +ip | ip | named parameters | boolean +ip | keyword | named parameters | boolean +keyword | keyword | named parameters | boolean +long | double | named parameters | boolean +long | integer | named parameters | boolean +long | keyword | named parameters | boolean +long | long | named parameters | boolean +text | keyword | named parameters | boolean +unsigned_long | double | named parameters | boolean +unsigned_long | integer | named parameters | boolean +unsigned_long | keyword | named parameters | boolean +unsigned_long | long | named parameters | boolean +unsigned_long | unsigned_long | named parameters | boolean +version | keyword | named parameters | boolean +version | version | named parameters | boolean |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/kql-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/kql-function.csv-spec index 02be58efac774..f9dfbc8634c6f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/kql-function.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/kql-function.csv-spec @@ -10,8 +10,9 @@ FROM books | WHERE KQL("author: Faulkner") | KEEP book_no, author | SORT book_no -| LIMIT 5; +| LIMIT 5 // end::kql-with-field[] +; // tag::kql-with-field-result[] book_no:keyword | author:text @@ -20,8 +21,8 @@ book_no:keyword | author:text 2847 | Colleen Faulkner 2883 | William Faulkner 3293 | Danny Faulkner -; // end::kql-with-field-result[] +; kqlWithMultipleFields required_capability: kql_function diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/map-functions.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/map-functions.csv-spec deleted file mode 100644 index 37e493c55835a..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/map-functions.csv-spec +++ /dev/null @@ -1,122 +0,0 @@ -// Tests to validate maps as inputs to functions, these functions are under snapshot only - -logWithBaseInMapEval -required_capability: optional_named_argument_map_for_function -ROW value = 8.0 -| EVAL l = log_with_base_in_map(value, {"base":2.0}) -; - -value: double |l:double -8.0 |3.0 -; - -logWithOptionalMapMissingEval -required_capability: optional_named_argument_map_for_function -ROW value = 8.0 -| EVAL l = round(log_with_base_in_map(value)) -; - -value: double |l:double -8.0 |2.0 -; - - -logWithBaseInMapEvalIndex -required_capability: optional_named_argument_map_for_function -FROM employees -| WHERE emp_no IN (10001, 10003) -| EVAL l = log_with_base_in_map(languages, {"base":2.0}) -| KEEP emp_no, languages, l -| SORT emp_no -; - -emp_no:integer |languages:integer |l:double -10001 |2 |1.0 -10003 |4 |2.0 -; - -logWithOptionalMapMissingEvalIndex -required_capability: optional_named_argument_map_for_function -FROM employees -| WHERE emp_no IN (10001, 10003) -| EVAL l = round(log_with_base_in_map(languages)) -| KEEP emp_no, languages, l -| SORT emp_no -; - -emp_no:integer |languages:integer |l:double -10001 |2 |1.0 -10003 |4 |1.0 -; - -logWithBaseInMapWhereTrueIndex -required_capability: optional_named_argument_map_for_function -FROM employees -| WHERE emp_no IN (10001, 10003) AND log_with_base_in_map(languages, {"base":2.0}) > 1 -| KEEP emp_no, languages -| SORT emp_no -; - -emp_no:integer |languages:integer -10003 |4 -; - -logWithOptionalMapMissingWhereTrueIndex -required_capability: optional_named_argument_map_for_function -FROM employees -| WHERE emp_no IN (10001, 10003) AND log_with_base_in_map(languages) > 1 -| KEEP emp_no, languages -| SORT emp_no -; - -emp_no:integer |languages:integer -10003 |4 -; - -logWithBaseInMapWhereFalseIndex -required_capability: optional_named_argument_map_for_function -FROM employees -| WHERE emp_no IN (10001, 10003) AND log_with_base_in_map(languages, {"base":2.0}) < 0 -| KEEP emp_no, languages -| SORT emp_no -; - -emp_no:integer |languages:integer -; - -logWithOptionalMapMissingWhereFalseIndex -required_capability: optional_named_argument_map_for_function -FROM employees -| WHERE emp_no IN (10001, 10003) AND log_with_base_in_map(languages) < 0 -| KEEP emp_no, languages -| SORT emp_no -; - -emp_no:integer |languages:integer -; - -logWithBaseInMapSortIndex -required_capability: optional_named_argument_map_for_function -FROM employees -| WHERE emp_no IN (10001, 10003) -| SORT log_with_base_in_map(languages, {"base":2.0}) desc -| KEEP emp_no -; - -emp_no:integer -10003 -10001 -; - -logWithOptionalMapMissingSortIndex -required_capability: optional_named_argument_map_for_function -FROM employees -| WHERE emp_no IN (10001, 10003) -| SORT log_with_base_in_map(languages) desc -| KEEP emp_no -; - -emp_no:integer -10003 -10001 -; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec index 8a576e841a32e..39af991a9fc41 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec @@ -10,8 +10,9 @@ FROM books | WHERE MATCH(author, "Faulkner") | KEEP book_no, author | SORT book_no -| LIMIT 5; +| LIMIT 5 // end::match-with-field[] +; // tag::match-with-field-result[] book_no:keyword | author:text @@ -20,8 +21,8 @@ book_no:keyword | author:text 2847 | Colleen Faulkner 2883 | William Faulkner 3293 | Danny Faulkner -; // end::match-with-field-result[] +; matchWithMultipleFunctions required_capability: match_function @@ -673,3 +674,47 @@ from semantic_text host:keyword | semantic_text_field:text "host1" | live long and prosper ; + +testMatchWithOptionsFuzziness +required_capability: match_function +required_capability: match_function_options + +from books +| where match(title, "Pings", {"fuzziness": 1}) +| keep book_no; +ignoreOrder:true + +book_no:keyword +2714 +2675 +4023 +7140 +; + +testMatchWithOptionsOperator +required_capability: match_function +required_capability: match_function_options + +// tag::match-with-named-function-params[] +FROM books +| WHERE MATCH(title, "Hobbit Back Again", {"operator": "AND"}) +| KEEP title; +// end::match-with-named-function-params[] + +// tag::match-with-named-function-params-result[] +title:text +The Hobbit or There and Back Again +// end::match-with-named-function-params-result[] +; + +testMatchWithOptionsMinimumShouldMatch +required_capability: match_function +required_capability: match_function_options + +from books +| where match(title, "Hobbit Back Again", {"minimum_should_match": 2}) +| keep title; + +title:text +The Hobbit or There and Back Again +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec index 6ccf0ea734175..e0559a9bfe011 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec @@ -10,8 +10,9 @@ FROM books | WHERE author:"Faulkner" | KEEP book_no, author | SORT book_no -| LIMIT 5; +| LIMIT 5 // end::match-with-field[] +; // tag::match-with-field-result[] book_no:keyword | author:text @@ -20,8 +21,8 @@ book_no:keyword | author:text 2847 | Colleen Faulkner 2883 | William Faulkner 3293 | Danny Faulkner -; // end::match-with-field-result[] +; matchWithMultipleFunctions required_capability: match_operator_colon diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec index 2c84bdae6b32e..d2812a861da22 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec @@ -10,8 +10,9 @@ FROM books | WHERE QSTR("author: Faulkner") | KEEP book_no, author | SORT book_no -| LIMIT 5; +| LIMIT 5 // end::qstr-with-field[] +; // tag::qstr-with-field-result[] book_no:keyword | author:text @@ -20,8 +21,8 @@ book_no:keyword | author:text 2847 | Colleen Faulkner 2883 | William Faulkner 3293 | Danny Faulkner -; // end::qstr-with-field-result[] +; qstrWithMultipleFields required_capability: qstr_function diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 12d990550f0f9..5b731b5dac9d2 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -216,8 +216,8 @@ ASTERISK : '*'; SLASH : '/'; PERCENT : '%'; -LEFT_BRACES : {this.isDevVersion()}? '{'; -RIGHT_BRACES : {this.isDevVersion()}? '}'; +LEFT_BRACES : '{'; +RIGHT_BRACES : '}'; NESTED_WHERE : WHERE -> type(WHERE); diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 366b455f16402..218884913960f 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -181,6 +181,8 @@ CLOSING_METRICS_WS=130 '*'=66 '/'=67 '%'=68 +'{'=69 +'}'=70 ']'=73 'metadata'=82 'as'=91 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index e72c0fdafd73c..c66da879a5709 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -110,7 +110,7 @@ functionName ; mapExpression - : {this.isDevVersion()}? LEFT_BRACES entryExpression (COMMA entryExpression)* RIGHT_BRACES + : LEFT_BRACES entryExpression (COMMA entryExpression)* RIGHT_BRACES ; entryExpression diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 366b455f16402..218884913960f 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -181,6 +181,8 @@ CLOSING_METRICS_WS=130 '*'=66 '/'=67 '%'=68 +'{'=69 +'}'=70 ']'=73 'metadata'=82 'as'=91 diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/map/LogWithBaseInMapEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/map/LogWithBaseInMapEvaluator.java deleted file mode 100644 index 11c28c2a1f692..0000000000000 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/map/LogWithBaseInMapEvaluator.java +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.map; - -import java.lang.ArithmeticException; -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.compute.operator.Warnings; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LogWithBaseInMap}. - * This class is generated. Do not edit it. - */ -public final class LogWithBaseInMapEvaluator implements EvalOperator.ExpressionEvaluator { - private final Source source; - - private final EvalOperator.ExpressionEvaluator value; - - private final double base; - - private final DriverContext driverContext; - - private Warnings warnings; - - public LogWithBaseInMapEvaluator(Source source, EvalOperator.ExpressionEvaluator value, - double base, DriverContext driverContext) { - this.source = source; - this.value = value; - this.base = base; - this.driverContext = driverContext; - } - - @Override - public Block eval(Page page) { - try (DoubleBlock valueBlock = (DoubleBlock) value.eval(page)) { - DoubleVector valueVector = valueBlock.asVector(); - if (valueVector == null) { - return eval(page.getPositionCount(), valueBlock); - } - return eval(page.getPositionCount(), valueVector); - } - } - - public DoubleBlock eval(int positionCount, DoubleBlock valueBlock) { - try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - if (valueBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (valueBlock.getValueCount(p) != 1) { - if (valueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - try { - result.appendDouble(LogWithBaseInMap.process(valueBlock.getDouble(valueBlock.getFirstValueIndex(p)), this.base)); - } catch (ArithmeticException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public DoubleBlock eval(int positionCount, DoubleVector valueVector) { - try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendDouble(LogWithBaseInMap.process(valueVector.getDouble(p), this.base)); - } catch (ArithmeticException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "LogWithBaseInMapEvaluator[" + "value=" + value + ", base=" + base + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(value); - } - - private Warnings warnings() { - if (warnings == null) { - this.warnings = Warnings.createWarnings( - driverContext.warningsMode(), - source.source().getLineNumber(), - source.source().getColumnNumber(), - source.text() - ); - } - return warnings; - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final EvalOperator.ExpressionEvaluator.Factory value; - - private final double base; - - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value, double base) { - this.source = source; - this.value = value; - this.base = base; - } - - @Override - public LogWithBaseInMapEvaluator get(DriverContext context) { - return new LogWithBaseInMapEvaluator(source, value.get(context), base, context); - } - - @Override - public String toString() { - return "LogWithBaseInMapEvaluator[" + "value=" + value + ", base=" + base + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index ef46d71ac1de1..0179027ea7fd6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -759,7 +759,12 @@ public enum Cap { /** * Disabled support for index aliases in lookup joins */ - LOOKUP_JOIN_NO_ALIASES(JOIN_LOOKUP_V12.isEnabled()); + LOOKUP_JOIN_NO_ALIASES(JOIN_LOOKUP_V12.isEnabled()), + + /** + * Support match options in match function + */ + MATCH_FUNCTION_OPTIONS; private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/ExpressionWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/ExpressionWritables.java index a5ced6645196d..dba0ec799f312 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/ExpressionWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/ExpressionWritables.java @@ -31,7 +31,6 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion; -import org.elasticsearch.xpack.esql.expression.function.scalar.map.LogWithBaseInMap; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Acos; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; @@ -162,7 +161,6 @@ public static List unaryScalars() { entries.add(IsNull.ENTRY); entries.add(Length.ENTRY); entries.add(Log10.ENTRY); - entries.add(LogWithBaseInMap.ENTRY); entries.add(LTrim.ENTRY); entries.add(Neg.ENTRY); entries.add(Not.ENTRY); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 9c469d55c21c4..a614a473ebe41 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -70,7 +70,6 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.date.Now; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.IpPrefix; -import org.elasticsearch.xpack.esql.expression.function.scalar.map.LogWithBaseInMap; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Acos; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; @@ -434,7 +433,7 @@ private static FunctionDefinition[][] functions() { // fulltext functions new FunctionDefinition[] { def(Kql.class, uni(Kql::new), "kql"), - def(Match.class, bi(Match::new), "match"), + def(Match.class, tri(Match::new), "match"), def(QueryString.class, uni(QueryString::new), "qstr") } }; } @@ -445,9 +444,6 @@ private static FunctionDefinition[][] snapshotFunctions() { // The delay() function is for debug/snapshot environments only and should never be enabled in a non-snapshot build. // This is an experimental function and can be removed without notice. def(Delay.class, Delay::new, "delay"), - // log_with_base_in_map is for debug/snapshot environments only - // and should never be enabled in a non-snapshot build. They are for the purpose of testing MapExpression only. - def(LogWithBaseInMap.class, LogWithBaseInMap::new, "log_with_base_in_map"), def(Rate.class, Rate::withUnresolvedTimestamp, "rate"), def(Term.class, bi(Term::new), "term") } }; } @@ -546,8 +542,8 @@ public String toString() { public static class MapArgSignature extends ArgSignature { private final Map mapParams; - public MapArgSignature(String description, boolean optional, Map mapParams) { - super("map", new String[] { "map" }, description, optional); + public MapArgSignature(String name, String description, boolean optional, Map mapParams) { + super(name, new String[] { "map" }, description, optional); this.mapParams = mapParams; } @@ -673,7 +669,7 @@ public static ArgSignature mapParam(MapParam mapParam) { MapEntryArgSignature mapArg = new MapEntryArgSignature(param.name(), valueHint, type, param.description()); params.put(param.name(), mapArg); } - return new EsqlFunctionRegistry.MapArgSignature(desc, mapParam.optional(), params); + return new EsqlFunctionRegistry.MapArgSignature(mapParam.name(), desc, mapParam.optional(), params); } public static ArgSignature paramWithoutAnnotation(String name) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/MapParam.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/MapParam.java index 87b6df1827680..34c31c58e8fba 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/MapParam.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/MapParam.java @@ -24,6 +24,8 @@ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.PARAMETER) public @interface MapParam { + String name(); + MapParamEntry[] params() default {}; String description() default ""; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/AbstractMatchFullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/AbstractMatchFullTextFunction.java deleted file mode 100644 index 86f1f6e30108c..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/AbstractMatchFullTextFunction.java +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.function.fulltext; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; -import org.elasticsearch.xpack.esql.common.Failure; -import org.elasticsearch.xpack.esql.common.Failures; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.FoldContext; -import org.elasticsearch.xpack.esql.core.querydsl.query.Query; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; -import org.elasticsearch.xpack.esql.core.util.NumericUtils; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; -import org.elasticsearch.xpack.esql.planner.TranslatorHandler; -import org.elasticsearch.xpack.esql.querydsl.query.MatchQuery; -import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; - -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNotNull; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNotNullAndFoldable; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; -import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; -import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataType.IP; -import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataType.SEMANTIC_TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; -import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; -import static org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison.formatIncompatibleTypesMessage; - -/** - * This class contains the common functionalities between the match function ({@link Match}) and match operator ({@link MatchOperator}), - * so the two subclasses just contains the different code - */ -public abstract class AbstractMatchFullTextFunction extends FullTextFunction implements PostOptimizationVerificationAware { - public static final Set FIELD_DATA_TYPES = Set.of( - KEYWORD, - TEXT, - SEMANTIC_TEXT, - BOOLEAN, - DATETIME, - DATE_NANOS, - DOUBLE, - INTEGER, - IP, - LONG, - UNSIGNED_LONG, - VERSION - ); - public static final Set QUERY_DATA_TYPES = Set.of( - KEYWORD, - BOOLEAN, - DATETIME, - DATE_NANOS, - DOUBLE, - INTEGER, - IP, - LONG, - UNSIGNED_LONG, - VERSION - ); - protected final Expression field; - - protected AbstractMatchFullTextFunction( - Source source, - Expression query, - List children, - QueryBuilder queryBuilder, - Expression field - ) { - super(source, query, children, queryBuilder); - this.field = field; - } - - public Expression field() { - return field; - } - - @Override - protected TypeResolution resolveNonQueryParamTypes() { - return isNotNull(field, sourceText(), FIRST).and( - isType( - field, - FIELD_DATA_TYPES::contains, - sourceText(), - FIRST, - "keyword, text, boolean, date, date_nanos, double, integer, ip, long, unsigned_long, version" - ) - ); - } - - @Override - protected TypeResolution resolveQueryParamType() { - return isType( - query(), - QUERY_DATA_TYPES::contains, - sourceText(), - queryParamOrdinal(), - "keyword, boolean, date, date_nanos, double, integer, ip, long, unsigned_long, version" - ).and(isNotNullAndFoldable(query(), sourceText(), queryParamOrdinal())); - } - - @Override - protected TypeResolution checkParamCompatibility() { - DataType fieldType = field().dataType(); - DataType queryType = query().dataType(); - - // Field and query types should match. If the query is a string, then it can match any field type. - if ((fieldType == queryType) || (queryType == KEYWORD)) { - return TypeResolution.TYPE_RESOLVED; - } - - if (fieldType.isNumeric() && queryType.isNumeric()) { - // When doing an unsigned long query, field must be an unsigned long - if ((queryType == UNSIGNED_LONG && fieldType != UNSIGNED_LONG) == false) { - return TypeResolution.TYPE_RESOLVED; - } - } - - return new TypeResolution(formatIncompatibleTypesMessage(fieldType, queryType, sourceText())); - } - - @Override - public void postOptimizationVerification(Failures failures) { - Expression fieldExpression = field(); - // Field may be converted to other data type (field_name :: data_type), so we need to check the original field - if (fieldExpression instanceof AbstractConvertFunction convertFunction) { - fieldExpression = convertFunction.field(); - } - if (fieldExpression instanceof FieldAttribute == false) { - failures.add( - Failure.fail( - field, - "[{}] {} cannot operate on [{}], which is not a field from an index mapping", - functionName(), - functionType(), - field.sourceText() - ) - ); - } - } - - @Override - public Object queryAsObject() { - Object queryAsObject = query().fold(FoldContext.small() /* TODO remove me */); - - // Convert BytesRef to string for string-based values - if (queryAsObject instanceof BytesRef bytesRef) { - return switch (query().dataType()) { - case IP -> EsqlDataTypeConverter.ipToString(bytesRef); - case VERSION -> EsqlDataTypeConverter.versionToString(bytesRef); - default -> bytesRef.utf8ToString(); - }; - } - - // Converts specific types to the correct type for the query - if (query().dataType() == DataType.UNSIGNED_LONG) { - return NumericUtils.unsignedLongAsBigInteger((Long) queryAsObject); - } else if (query().dataType() == DataType.DATETIME && queryAsObject instanceof Long) { - // When casting to date and datetime, we get a long back. But Match query needs a date string - return EsqlDataTypeConverter.dateTimeToString((Long) queryAsObject); - } else if (query().dataType() == DATE_NANOS && queryAsObject instanceof Long) { - return EsqlDataTypeConverter.nanoTimeToString((Long) queryAsObject); - } - - return queryAsObject; - } - - @Override - protected Query translate(TranslatorHandler handler) { - Expression fieldExpression = field; - // Field may be converted to other data type (field_name :: data_type), so we need to check the original field - if (fieldExpression instanceof AbstractConvertFunction convertFunction) { - fieldExpression = convertFunction.field(); - } - if (fieldExpression instanceof FieldAttribute fieldAttribute) { - String fieldName = fieldAttribute.name(); - if (fieldAttribute.field() instanceof MultiTypeEsField multiTypeEsField) { - // If we have multiple field types, we allow the query to be done, but getting the underlying field name - fieldName = multiTypeEsField.getName(); - } - // Make query lenient so mixed field types can be queried when a field type is incompatible with the value provided - return new MatchQuery(source(), fieldName, queryAsObject(), Map.of("lenient", "true")); - } - - throw new IllegalArgumentException("Match must have a field attribute as the first argument"); - } - - @Override - public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { - return new Match(source(), field, query(), queryBuilder); - } - - protected ParamOrdinal queryParamOrdinal() { - return SECOND; - } - -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index d2e2135a4bf83..687c3b1d23ec5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -72,34 +72,25 @@ protected final TypeResolution resolveType() { return new TypeResolution("Unresolved children"); } - return resolveNonQueryParamTypes().and(resolveQueryParamType().and(checkParamCompatibility())); + return resolveParams(); } /** - * Checks parameter specific compatibility, to be overriden by subclasses + * Resolves the type for the function parameters, as part of the type resolution for the function * - * @return TypeResolution for param compatibility + * @return type resolution for the function parameters */ - protected TypeResolution checkParamCompatibility() { - return TypeResolution.TYPE_RESOLVED; + protected TypeResolution resolveParams() { + return resolveQuery(DEFAULT); } /** * Resolves the type for the query parameter, as part of the type resolution for the function * - * @return type resolution for query parameter + * @return type resolution for the query parameter */ - protected TypeResolution resolveQueryParamType() { - return isString(query(), sourceText(), queryParamOrdinal()).and(isNotNullAndFoldable(query(), sourceText(), queryParamOrdinal())); - } - - /** - * Subclasses can override this method for custom type resolution for additional function parameters - * - * @return type resolution for non-query parameter types - */ - protected TypeResolution resolveNonQueryParamTypes() { - return TypeResolution.TYPE_RESOLVED; + protected TypeResolution resolveQuery(TypeResolutions.ParamOrdinal queryOrdinal) { + return isString(query(), sourceText(), queryOrdinal).and(isNotNullAndFoldable(query(), sourceText(), queryOrdinal)); } public Expression query() { @@ -120,15 +111,6 @@ public Object queryAsObject() { return queryAsObject; } - /** - * Returns the param ordinal for the query parameter so it can be used in error messages - * - * @return Query ordinal for the - */ - protected TypeResolutions.ParamOrdinal queryParamOrdinal() { - return DEFAULT; - } - @Override public Nullability nullable() { return Nullability.FALSE; @@ -283,26 +265,6 @@ private static boolean onlyFullTextFunctionsInExpression(Expression expression) return false; } - /** - * Checks whether an expression contains a full text function as part of it - * - * @param expression expression to check - * @return true if the expression or any of its children is a full text function, false otherwise - */ - private static boolean anyFullTextFunctionsInExpression(Expression expression) { - if (expression instanceof FullTextFunction) { - return true; - } - - for (Expression child : expression.children()) { - if (anyFullTextFunctionsInExpression(child)) { - return true; - } - } - - return false; - } - /** * Checks all commands that exist before a specific type satisfy conditions. * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java index f42433c22e775..5c0a3857d7783 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; -import org.elasticsearch.xpack.esql.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.expression.predicate.fulltext.MultiMatchQueryPredicate; import java.util.ArrayList; @@ -21,11 +20,9 @@ public class FullTextWritables { public static List getNamedWriteables() { List entries = new ArrayList<>(); - entries.add(MatchQueryPredicate.ENTRY); entries.add(MultiMatchQueryPredicate.ENTRY); entries.add(QueryString.ENTRY); entries.add(Match.ENTRY); - entries.add(MatchOperator.ENTRY); entries.add(Kql.ENTRY); if (EsqlCapabilities.Cap.TERM_FUNCTION.isEnabled()) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java index ea5f3d9b83543..3223e96da7136 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java @@ -7,30 +7,136 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; +import org.elasticsearch.xpack.esql.common.Failure; +import org.elasticsearch.xpack.esql.common.Failures; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.expression.EntryExpression; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.MapExpression; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.type.DataTypeConverter; +import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; +import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.MapParam; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; +import org.elasticsearch.xpack.esql.querydsl.query.MatchQuery; +import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.io.IOException; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +import static java.util.Map.entry; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.index.query.AbstractQueryBuilder.BOOST_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.ANALYZER_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.FUZZY_REWRITE_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.FUZZY_TRANSPOSITIONS_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.GENERATE_SYNONYMS_PHRASE_QUERY; +import static org.elasticsearch.index.query.MatchQueryBuilder.LENIENT_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.MAX_EXPANSIONS_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.MINIMUM_SHOULD_MATCH_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.OPERATOR_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.PREFIX_LENGTH_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.ZERO_TERMS_QUERY_FIELD; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.THIRD; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isFoldable; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isMapExpression; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNotNull; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNotNullAndFoldable; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.FLOAT; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.SEMANTIC_TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; +import static org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison.formatIncompatibleTypesMessage; /** * Full text function that performs a {@link org.elasticsearch.xpack.esql.querydsl.query.MatchQuery} . */ -public class Match extends AbstractMatchFullTextFunction { +public class Match extends FullTextFunction implements OptionalArgument, PostOptimizationVerificationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Match", Match::readFrom); + public static final Set FIELD_DATA_TYPES = Set.of( + KEYWORD, + TEXT, + SEMANTIC_TEXT, + BOOLEAN, + DATETIME, + DATE_NANOS, + DOUBLE, + INTEGER, + IP, + LONG, + UNSIGNED_LONG, + VERSION + ); + public static final Set QUERY_DATA_TYPES = Set.of( + KEYWORD, + BOOLEAN, + DATETIME, + DATE_NANOS, + DOUBLE, + INTEGER, + IP, + LONG, + UNSIGNED_LONG, + VERSION + ); + + protected final Expression field; + + // Options for match function. They don't need to be serialized as the data nodes will retrieve them from the query builder + private final transient Expression options; - private transient Boolean isOperator; + public static final Map ALLOWED_OPTIONS = Map.ofEntries( + entry(ANALYZER_FIELD.getPreferredName(), KEYWORD), + entry(GENERATE_SYNONYMS_PHRASE_QUERY.getPreferredName(), BOOLEAN), + entry(Fuzziness.FIELD.getPreferredName(), KEYWORD), + entry(BOOST_FIELD.getPreferredName(), FLOAT), + entry(FUZZY_TRANSPOSITIONS_FIELD.getPreferredName(), BOOLEAN), + entry(FUZZY_REWRITE_FIELD.getPreferredName(), KEYWORD), + entry(LENIENT_FIELD.getPreferredName(), BOOLEAN), + entry(MAX_EXPANSIONS_FIELD.getPreferredName(), INTEGER), + entry(MINIMUM_SHOULD_MATCH_FIELD.getPreferredName(), KEYWORD), + entry(OPERATOR_FIELD.getPreferredName(), KEYWORD), + entry(PREFIX_LENGTH_FIELD.getPreferredName(), INTEGER), + entry(ZERO_TERMS_QUERY_FIELD.getPreferredName(), KEYWORD) + ); @FunctionInfo( returnType = "boolean", @@ -42,10 +148,15 @@ public class Match extends AbstractMatchFullTextFunction { Match can be used on fields from the text family like <> and <>, as well as other field types like keyword, boolean, dates, and numeric types. + Match can use <> to specify additional options for the match query. + All <> are supported. + For a simplified syntax, you can use the <> `:` operator instead of `MATCH`. `MATCH` returns true if the provided query matches the row.""", - examples = { @Example(file = "match-function", tag = "match-with-field") } + examples = { + @Example(file = "match-function", tag = "match-with-field"), + @Example(file = "match-function", tag = "match-with-named-function-params") } ) public Match( Source source, @@ -58,13 +169,105 @@ public Match( name = "query", type = { "keyword", "boolean", "date", "date_nanos", "double", "integer", "ip", "long", "unsigned_long", "version" }, description = "Value to find in the provided field." - ) Expression matchQuery + ) Expression matchQuery, + @MapParam( + name = "options", + params = { + @MapParam.MapParamEntry( + name = "analyzer", + type = "keyword", + valueHint = { "standard" }, + description = "Analyzer used to convert the text in the query value into token." + ), + @MapParam.MapParamEntry( + name = "auto_generate_synonyms_phrase_query", + type = "boolean", + valueHint = { "true", "false" }, + description = "If true, match phrase queries are automatically created for multi-term synonyms." + ), + @MapParam.MapParamEntry( + name = "fuzziness", + type = "keyword", + valueHint = { "AUTO", "1", "2" }, + description = "Maximum edit distance allowed for matching." + ), + @MapParam.MapParamEntry( + name = "boost", + type = "float", + valueHint = { "2.5" }, + description = "Floating point number used to decrease or increase the relevance scores of the query." + ), + @MapParam.MapParamEntry( + name = "fuzzy_transpositions", + type = "boolean", + valueHint = { "true", "false" }, + description = "If true, edits for fuzzy matching include transpositions of two adjacent characters (ab → ba)." + ), + @MapParam.MapParamEntry( + name = "fuzzy_rewrite", + type = "keyword", + valueHint = { + "constant_score_blended", + "constant_score", + "constant_score_boolean", + "top_terms_blended_freqs_N", + "top_terms_boost_N", + "top_terms_N" }, + description = "Method used to rewrite the query. See the rewrite parameter for valid values and more information." + ), + @MapParam.MapParamEntry( + name = "lenient", + type = "boolean", + valueHint = { "true", "false" }, + description = "If false, format-based errors, such as providing a text query value for a numeric field, are returned." + ), + @MapParam.MapParamEntry( + name = "max_expansions", + type = "integer", + valueHint = { "50" }, + description = "Maximum number of terms to which the query will expand." + ), + @MapParam.MapParamEntry( + name = "minimum_should_match", + type = "integer", + valueHint = { "2" }, + description = "Minimum number of clauses that must match for a document to be returned." + ), + @MapParam.MapParamEntry( + name = "operator", + type = "keyword", + valueHint = { "AND", "OR" }, + description = "Boolean logic used to interpret text in the query value." + ), + @MapParam.MapParamEntry( + name = "prefix_length", + type = "integer", + valueHint = { "1" }, + description = "Number of beginning characters left unchanged for fuzzy matching." + ), + @MapParam.MapParamEntry( + name = "zero_terms_query", + type = "keyword", + valueHint = { "none", "all" }, + description = "Number of beginning characters left unchanged for fuzzy matching." + ) }, + description = "Match additional options as <>." + + " See <> for more information.", + optional = true + ) Expression options ) { - this(source, field, matchQuery, null); + this(source, field, matchQuery, options, null); } - public Match(Source source, Expression field, Expression matchQuery, QueryBuilder queryBuilder) { - super(source, matchQuery, List.of(field, matchQuery), queryBuilder, field); + public Match(Source source, Expression field, Expression matchQuery, Expression options, QueryBuilder queryBuilder) { + super(source, matchQuery, options == null ? List.of(field, matchQuery) : List.of(field, matchQuery, options), queryBuilder); + this.field = field; + this.options = options; + } + + @Override + public String getWriteableName() { + return ENTRY.name; } private static Match readFrom(StreamInput in) throws IOException { @@ -75,11 +278,12 @@ private static Match readFrom(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); } - return new Match(source, field, query, queryBuilder); + return new Match(source, field, query, null, queryBuilder); } + // This is not meant to be overriden by MatchOperator - MatchOperator should be serialized to Match @Override - public void writeTo(StreamOutput out) throws IOException { + public final void writeTo(StreamOutput out) throws IOException { source().writeTo(out); out.writeNamedWriteable(field()); out.writeNamedWriteable(query()); @@ -89,17 +293,220 @@ public void writeTo(StreamOutput out) throws IOException { } @Override - public String getWriteableName() { - return ENTRY.name; + protected TypeResolution resolveParams() { + return resolveField().and(resolveQuery()).and(resolveOptions()).and(checkParamCompatibility()); + } + + private TypeResolution resolveField() { + return isNotNull(field, sourceText(), FIRST).and( + isType( + field, + FIELD_DATA_TYPES::contains, + sourceText(), + FIRST, + "keyword, text, boolean, date, date_nanos, double, integer, ip, long, unsigned_long, version" + ) + ); + } + + private TypeResolution resolveQuery() { + return isType( + query(), + QUERY_DATA_TYPES::contains, + sourceText(), + SECOND, + "keyword, boolean, date, date_nanos, double, integer, ip, long, unsigned_long, version" + ).and(isNotNullAndFoldable(query(), sourceText(), SECOND)); + } + + private TypeResolution checkParamCompatibility() { + DataType fieldType = field().dataType(); + DataType queryType = query().dataType(); + + // Field and query types should match. If the query is a string, then it can match any field type. + if ((fieldType == queryType) || (queryType == KEYWORD)) { + return TypeResolution.TYPE_RESOLVED; + } + + if (fieldType.isNumeric() && queryType.isNumeric()) { + // When doing an unsigned long query, field must be an unsigned long + if ((queryType == UNSIGNED_LONG && fieldType != UNSIGNED_LONG) == false) { + return TypeResolution.TYPE_RESOLVED; + } + } + + return new TypeResolution(formatIncompatibleTypesMessage(fieldType, queryType, sourceText())); + } + + private TypeResolution resolveOptions() { + if (options() != null) { + TypeResolution resolution = isNotNull(options(), sourceText(), THIRD); + if (resolution.unresolved()) { + return resolution; + } + // MapExpression does not have a DataType associated with it + resolution = isMapExpression(options(), sourceText(), THIRD); + if (resolution.unresolved()) { + return resolution; + } + + try { + matchQueryOptions(); + } catch (InvalidArgumentException e) { + return new TypeResolution(e.getMessage()); + } + } + return TypeResolution.TYPE_RESOLVED; + } + + private Map matchQueryOptions() throws InvalidArgumentException { + + if (options() == null) { + return Map.of(LENIENT_FIELD.getPreferredName(), true); + } + + Map matchOptions = new HashMap<>(); + // Match is lenient by default to avoid failing on incompatible types + matchOptions.put(LENIENT_FIELD.getPreferredName(), true); + + for (EntryExpression entry : ((MapExpression) options()).entryExpressions()) { + Expression optionExpr = entry.key(); + Expression valueExpr = entry.value(); + TypeResolution resolution = isFoldable(optionExpr, sourceText(), SECOND).and(isFoldable(valueExpr, sourceText(), SECOND)); + if (resolution.unresolved()) { + throw new InvalidArgumentException(resolution.message()); + } + Object optionExprLiteral = ((Literal) optionExpr).value(); + Object valueExprLiteral = ((Literal) valueExpr).value(); + String optionName = optionExprLiteral instanceof BytesRef br ? br.utf8ToString() : optionExprLiteral.toString(); + String optionValue = valueExprLiteral instanceof BytesRef br ? br.utf8ToString() : valueExprLiteral.toString(); + // validate the optionExpr is supported + DataType dataType = ALLOWED_OPTIONS.get(optionName); + if (dataType == null) { + throw new InvalidArgumentException( + format(null, "Invalid option [{}] in [{}], expected one of {}", optionName, sourceText(), ALLOWED_OPTIONS.keySet()) + ); + } + try { + matchOptions.put(optionName, DataTypeConverter.convert(optionValue, dataType)); + } catch (InvalidArgumentException e) { + throw new InvalidArgumentException( + format(null, "Invalid option [{}] in [{}], {}", optionName, sourceText(), e.getMessage()) + ); + } + } + + return matchOptions; + } + + public Expression field() { + return field; + } + + public Expression options() { + return options; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Match::new, field(), query(), options(), queryBuilder()); } @Override public Expression replaceChildren(List newChildren) { - return new Match(source(), newChildren.get(0), newChildren.get(1), queryBuilder()); + return new Match( + source(), + newChildren.get(0), + newChildren.get(1), + newChildren.size() > 2 ? newChildren.get(2) : null, + queryBuilder() + ); } @Override - protected NodeInfo info() { - return NodeInfo.create(this, Match::new, field(), query(), queryBuilder()); + public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { + return new Match(source(), field, query(), options(), queryBuilder); + } + + @Override + public void postOptimizationVerification(Failures failures) { + Expression fieldExpression = field(); + // Field may be converted to other data type (field_name :: data_type), so we need to check the original field + if (fieldExpression instanceof AbstractConvertFunction convertFunction) { + fieldExpression = convertFunction.field(); + } + if (fieldExpression instanceof FieldAttribute == false) { + failures.add( + Failure.fail( + field, + "[{}] {} cannot operate on [{}], which is not a field from an index mapping", + functionName(), + functionType(), + field.sourceText() + ) + ); + } + } + + @Override + public Object queryAsObject() { + Object queryAsObject = query().fold(FoldContext.small() /* TODO remove me */); + + // Convert BytesRef to string for string-based values + if (queryAsObject instanceof BytesRef bytesRef) { + return switch (query().dataType()) { + case IP -> EsqlDataTypeConverter.ipToString(bytesRef); + case VERSION -> EsqlDataTypeConverter.versionToString(bytesRef); + default -> bytesRef.utf8ToString(); + }; + } + + // Converts specific types to the correct type for the query + if (query().dataType() == DataType.UNSIGNED_LONG) { + return NumericUtils.unsignedLongAsBigInteger((Long) queryAsObject); + } else if (query().dataType() == DataType.DATETIME && queryAsObject instanceof Long) { + // When casting to date and datetime, we get a long back. But Match query needs a date string + return EsqlDataTypeConverter.dateTimeToString((Long) queryAsObject); + } else if (query().dataType() == DATE_NANOS && queryAsObject instanceof Long) { + return EsqlDataTypeConverter.nanoTimeToString((Long) queryAsObject); + } + + return queryAsObject; + } + + @Override + protected Query translate(TranslatorHandler handler) { + Expression fieldExpression = field; + // Field may be converted to other data type (field_name :: data_type), so we need to check the original field + if (fieldExpression instanceof AbstractConvertFunction convertFunction) { + fieldExpression = convertFunction.field(); + } + if (fieldExpression instanceof FieldAttribute fieldAttribute) { + String fieldName = fieldAttribute.name(); + if (fieldAttribute.field() instanceof MultiTypeEsField multiTypeEsField) { + // If we have multiple field types, we allow the query to be done, but getting the underlying field name + fieldName = multiTypeEsField.getName(); + } + // Make query lenient so mixed field types can be queried when a field type is incompatible with the value provided + return new MatchQuery(source(), fieldName, queryAsObject(), matchQueryOptions()); + } + + throw new IllegalArgumentException("Match must have a field attribute as the first argument"); + } + + @Override + public boolean equals(Object o) { + // Match does not serialize options, as they get included in the query builder. We need to override equals and hashcode to + // ignore options when comparing two Match functions + if (o == null || getClass() != o.getClass()) return false; + Match match = (Match) o; + return Objects.equals(field(), match.field()) + && Objects.equals(query(), match.query()) + && Objects.equals(queryBuilder(), match.queryBuilder()); + } + + @Override + public int hashCode() { + return Objects.hash(field(), query(), queryBuilder()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperator.java index e3e4bc4678089..38b5022b34351 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperator.java @@ -7,30 +7,24 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import java.io.IOException; import java.util.List; /** * This class performs a {@link org.elasticsearch.xpack.esql.querydsl.query.MatchQuery} using an operator. + * This is used as a convenience for generating documentation and for error message purposes - it's a way to represent + * the match operator in the function syntax. + * Serialization is provided as a way to pass the corresponding tests - serialization must be done to a Match class. */ public class MatchOperator extends Match { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( - Expression.class, - "MatchOperator", - MatchOperator::readFrom - ); - @FunctionInfo( returnType = "boolean", operator = ":", @@ -60,15 +54,11 @@ public MatchOperator( description = "Value to find in the provided field." ) Expression matchQuery ) { - super(source, field, matchQuery); + super(source, field, matchQuery, null, null); } - private static Match readFrom(StreamInput in) throws IOException { - Source source = Source.readFrom((PlanStreamInput) in); - Expression field = in.readNamedWriteable(Expression.class); - Expression query = in.readNamedWriteable(Expression.class); - - return new MatchOperator(source, field, query); + private MatchOperator(Source source, Expression field, Expression matchQuery, QueryBuilder queryBuilder) { + super(source, field, matchQuery, null, queryBuilder); } @Override @@ -81,11 +71,6 @@ public String functionName() { return ":"; } - @Override - public String getWriteableName() { - return ENTRY.name; - } - @Override protected NodeInfo info() { return NodeInfo.create(this, MatchOperator::new, field(), query()); @@ -93,6 +78,11 @@ protected NodeInfo info() { @Override public Expression replaceChildren(List newChildren) { - return new MatchOperator(source(), newChildren.get(0), newChildren.get(1)); + return new MatchOperator(source(), newChildren.get(0), newChildren.get(1), queryBuilder()); + } + + @Override + public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { + return new MatchOperator(source(), field, query(), queryBuilder); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java index c96d1c46fa529..4db1c38694757 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java @@ -95,8 +95,12 @@ public String getWriteableName() { } @Override - protected TypeResolution resolveNonQueryParamTypes() { - return isNotNull(field, sourceText(), FIRST).and(isString(field, sourceText(), FIRST)).and(super.resolveNonQueryParamTypes()); + protected TypeResolution resolveParams() { + return resolveField().and(resolveQuery(SECOND)); + } + + private TypeResolution resolveField() { + return isNotNull(field, sourceText(), FIRST).and(isString(field, sourceText(), FIRST)); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/map/LogWithBaseInMap.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/map/LogWithBaseInMap.java deleted file mode 100644 index 447bac738162b..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/map/LogWithBaseInMap.java +++ /dev/null @@ -1,221 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.function.scalar.map; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.ann.Fixed; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.xpack.esql.core.expression.EntryExpression; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.MapExpression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; -import org.elasticsearch.xpack.esql.expression.function.MapParam; -import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; -import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; - -import static org.elasticsearch.common.logging.LoggerMessageFormat.format; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isFoldable; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isMapExpression; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; -import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; - -public class LogWithBaseInMap extends EsqlScalarFunction implements OptionalArgument { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( - Expression.class, - "LogWithBaseInMap", - LogWithBaseInMap::new - ); - - private final Expression number; - - private final Expression map; - - private static final String BASE = "base"; - - @FunctionInfo( - returnType = "double", - description = "Returns the logarithm of a value to a base. The input can be any numeric value, " - + "the return value is always a double.\n" - + "\n" - + "Logs of zero, negative numbers, and base of one return `null` as well as a warning." - ) - public LogWithBaseInMap( - Source source, - @Param( - name = "number", - type = { "double", "integer", "long" }, - description = "Numeric expression. If `null`, the function returns `null`." - ) Expression number, - @MapParam( - params = { @MapParam.MapParamEntry(name = "base", valueHint = { "2", "2.0" }) }, - description = "Input value. The input is a valid constant map expression.", - optional = true - ) Expression option - ) { - super(source, option == null ? Collections.singletonList(number) : List.of(number, option)); - this.number = number; - this.map = option; - } - - private LogWithBaseInMap(StreamInput in) throws IOException { - this( - Source.readFrom((PlanStreamInput) in), - in.readNamedWriteable(Expression.class), - in.readOptionalNamedWriteable(Expression.class) - ); - } - - @Override - public final void writeTo(StreamOutput out) throws IOException { - source().writeTo(out); - out.writeNamedWriteable(number); - out.writeOptionalNamedWriteable(map); - } - - @Override - public String getWriteableName() { - return ENTRY.name; - } - - @Override - protected TypeResolution resolveType() { - if (childrenResolved() == false) { - return new TypeResolution("Unresolved children"); - } - // validate field type - TypeResolution resolution = isNumeric(number, sourceText(), FIRST); - if (resolution.unresolved()) { - return resolution; - } - - if (map != null) { - // MapExpression does not have a DataType associated with it - resolution = isMapExpression(map, sourceText(), SECOND); - if (resolution.unresolved()) { - return resolution; - } - return validateOptions(); - } - return TypeResolution.TYPE_RESOLVED; - } - - @Override - public DataType dataType() { - return DOUBLE; - } - - @Override - public boolean foldable() { - return number.foldable(); - } - - @Override - public Expression replaceChildren(List newChildren) { - return new LogWithBaseInMap(source(), newChildren.get(0), newChildren.size() > 1 ? newChildren.get(1) : null); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, LogWithBaseInMap::new, number, map); - } - - @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { - var valueEval = Cast.cast(source(), number.dataType(), DataType.DOUBLE, toEvaluator.apply(number)); - double base = Math.E; - if (map instanceof MapExpression me) { - Expression b = me.get(BASE); - if (b != null && b.foldable()) { - Object v = b.fold(toEvaluator.foldCtx()); - if (v instanceof BytesRef br) { - v = br.utf8ToString(); - } - base = Double.parseDouble(v.toString()); - } - } - return new LogWithBaseInMapEvaluator.Factory(source(), valueEval, base); - } - - @Evaluator(warnExceptions = { ArithmeticException.class }) - static double process(double value, @Fixed double base) throws ArithmeticException { - if (base <= 0d || value <= 0d) { - throw new ArithmeticException("Log of non-positive number"); - } - if (base == 1d) { - throw new ArithmeticException("Log of base 1"); - } - return Math.log10(value) / Math.log10(base); - } - - public Expression number() { - return number; - } - - public Expression base() { - return map; - } - - private TypeResolution validateOptions() { - for (EntryExpression entry : ((MapExpression) map).entryExpressions()) { - Expression key = entry.key(); - Expression value = entry.value(); - TypeResolution resolution = isFoldable(key, sourceText(), SECOND).and(isFoldable(value, sourceText(), SECOND)); - if (resolution.unresolved()) { - return resolution; - } - Object k = key instanceof Literal l ? l.value() : null; - Object v = value instanceof Literal l ? l.value() : null; - if (k == null) { - return new TypeResolution( - format(null, "Invalid option key in [{}], expected a literal value but got [{}]", sourceText(), key.sourceText()) - ); - } - - if (v == null) { - return new TypeResolution( - format(null, "Invalid option value in [{}], expected a constant value but got [{}]", sourceText(), value.sourceText()) - ); - } - String base = k instanceof BytesRef br ? br.utf8ToString() : k.toString(); - String number = v instanceof BytesRef br ? br.utf8ToString() : v.toString(); - // validate the key is in SUPPORTED_OPTIONS - if (base.equals(BASE) == false) { - return new TypeResolution( - format(null, "Invalid option key in [{}], expected base but got [{}]", sourceText(), key.sourceText()) - ); - } - // validate the value is valid for the key provided - try { - Double.parseDouble(number); - } catch (NumberFormatException e) { - return new TypeResolution( - format(null, "Invalid option value in [{}], expected a numeric number but got [{}]", sourceText(), v) - ); - } - - } - return TypeResolution.TYPE_RESOLVED; - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/fulltext/MatchQueryPredicate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/fulltext/MatchQueryPredicate.java deleted file mode 100644 index 66c6d8995b24e..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/fulltext/MatchQueryPredicate.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.expression.predicate.fulltext; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static java.util.Collections.singletonList; - -public class MatchQueryPredicate extends FullTextPredicate { - - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( - Expression.class, - "MatchQueryPredicate", - MatchQueryPredicate::new - ); - - private final Expression field; - - public MatchQueryPredicate(Source source, Expression field, String query, String options) { - super(source, query, options, singletonList(field)); - this.field = field; - } - - MatchQueryPredicate(StreamInput in) throws IOException { - super(in); - assert super.children().size() == 1; - field = super.children().get(0); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, MatchQueryPredicate::new, field, query(), options()); - } - - @Override - public MatchQueryPredicate replaceChildren(List newChildren) { - return new MatchQueryPredicate(source(), newChildren.get(0), query(), options()); - } - - public Expression field() { - return field; - } - - @Override - public int hashCode() { - return Objects.hash(field, super.hashCode()); - } - - @Override - public boolean equals(Object obj) { - if (super.equals(obj)) { - MatchQueryPredicate other = (MatchQueryPredicate) obj; - return Objects.equals(field, other.field); - } - return false; - } - - @Override - public String getWriteableName() { - return ENTRY.name; - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 92274ebe15513..1d050bd91e66c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -68,8 +68,8 @@ null '*' '/' '%' -null -null +'{' +'}' null null ']' @@ -507,4 +507,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 130, 1629, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 2, 214, 7, 214, 2, 215, 7, 215, 2, 216, 7, 216, 2, 217, 7, 217, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 664, 8, 24, 11, 24, 12, 24, 665, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 674, 8, 25, 10, 25, 12, 25, 677, 9, 25, 1, 25, 3, 25, 680, 8, 25, 1, 25, 3, 25, 683, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 692, 8, 26, 10, 26, 12, 26, 695, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 703, 8, 27, 11, 27, 12, 27, 704, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 724, 8, 33, 1, 33, 4, 33, 727, 8, 33, 11, 33, 12, 33, 728, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 738, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 745, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 750, 8, 39, 10, 39, 12, 39, 753, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 761, 8, 39, 10, 39, 12, 39, 764, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 771, 8, 39, 1, 39, 3, 39, 774, 8, 39, 3, 39, 776, 8, 39, 1, 40, 4, 40, 779, 8, 40, 11, 40, 12, 40, 780, 1, 41, 4, 41, 784, 8, 41, 11, 41, 12, 41, 785, 1, 41, 1, 41, 5, 41, 790, 8, 41, 10, 41, 12, 41, 793, 9, 41, 1, 41, 1, 41, 4, 41, 797, 8, 41, 11, 41, 12, 41, 798, 1, 41, 4, 41, 802, 8, 41, 11, 41, 12, 41, 803, 1, 41, 1, 41, 5, 41, 808, 8, 41, 10, 41, 12, 41, 811, 9, 41, 3, 41, 813, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 819, 8, 41, 11, 41, 12, 41, 820, 1, 41, 1, 41, 3, 41, 825, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 3, 81, 959, 8, 81, 1, 81, 5, 81, 962, 8, 81, 10, 81, 12, 81, 965, 9, 81, 1, 81, 1, 81, 4, 81, 969, 8, 81, 11, 81, 12, 81, 970, 3, 81, 973, 8, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 5, 84, 987, 8, 84, 10, 84, 12, 84, 990, 9, 84, 1, 84, 1, 84, 3, 84, 994, 8, 84, 1, 84, 4, 84, 997, 8, 84, 11, 84, 12, 84, 998, 3, 84, 1001, 8, 84, 1, 85, 1, 85, 4, 85, 1005, 8, 85, 11, 85, 12, 85, 1006, 1, 85, 1, 85, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 3, 102, 1084, 8, 102, 1, 103, 4, 103, 1087, 8, 103, 11, 103, 12, 103, 1088, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 3, 114, 1138, 8, 114, 1, 115, 1, 115, 3, 115, 1142, 8, 115, 1, 115, 5, 115, 1145, 8, 115, 10, 115, 12, 115, 1148, 9, 115, 1, 115, 1, 115, 3, 115, 1152, 8, 115, 1, 115, 4, 115, 1155, 8, 115, 11, 115, 12, 115, 1156, 3, 115, 1159, 8, 115, 1, 116, 1, 116, 4, 116, 1163, 8, 116, 11, 116, 12, 116, 1164, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 136, 4, 136, 1250, 8, 136, 11, 136, 12, 136, 1251, 1, 136, 1, 136, 3, 136, 1256, 8, 136, 1, 136, 4, 136, 1259, 8, 136, 11, 136, 12, 136, 1260, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 4, 169, 1406, 8, 169, 11, 169, 12, 169, 1407, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 217, 1, 217, 1, 217, 1, 217, 1, 217, 2, 693, 762, 0, 218, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 69, 174, 70, 176, 0, 178, 71, 180, 72, 182, 73, 184, 74, 186, 0, 188, 75, 190, 76, 192, 77, 194, 78, 196, 0, 198, 0, 200, 79, 202, 80, 204, 81, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 0, 218, 82, 220, 0, 222, 83, 224, 0, 226, 0, 228, 84, 230, 85, 232, 86, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 0, 246, 0, 248, 87, 250, 88, 252, 89, 254, 90, 256, 0, 258, 0, 260, 0, 262, 0, 264, 0, 266, 0, 268, 91, 270, 0, 272, 92, 274, 93, 276, 94, 278, 0, 280, 0, 282, 95, 284, 96, 286, 0, 288, 97, 290, 0, 292, 98, 294, 99, 296, 100, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 0, 314, 0, 316, 101, 318, 102, 320, 103, 322, 0, 324, 0, 326, 0, 328, 0, 330, 0, 332, 0, 334, 104, 336, 105, 338, 106, 340, 0, 342, 107, 344, 108, 346, 109, 348, 110, 350, 0, 352, 0, 354, 111, 356, 112, 358, 113, 360, 114, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 0, 374, 0, 376, 115, 378, 116, 380, 117, 382, 0, 384, 0, 386, 0, 388, 0, 390, 118, 392, 119, 394, 120, 396, 0, 398, 0, 400, 0, 402, 0, 404, 121, 406, 0, 408, 0, 410, 0, 412, 0, 414, 0, 416, 122, 418, 123, 420, 124, 422, 0, 424, 0, 426, 0, 428, 125, 430, 126, 432, 127, 434, 0, 436, 0, 438, 128, 440, 129, 442, 130, 444, 0, 446, 0, 448, 0, 450, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1656, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 182, 1, 0, 0, 0, 1, 184, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 1, 192, 1, 0, 0, 0, 1, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 2, 202, 1, 0, 0, 0, 2, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 3, 230, 1, 0, 0, 0, 3, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 240, 1, 0, 0, 0, 4, 242, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 4, 252, 1, 0, 0, 0, 4, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 5, 274, 1, 0, 0, 0, 5, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 282, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 6, 294, 1, 0, 0, 0, 6, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 7, 318, 1, 0, 0, 0, 7, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 8, 336, 1, 0, 0, 0, 8, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 9, 346, 1, 0, 0, 0, 9, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 10, 358, 1, 0, 0, 0, 10, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 11, 378, 1, 0, 0, 0, 11, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 12, 392, 1, 0, 0, 0, 12, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 13, 412, 1, 0, 0, 0, 13, 414, 1, 0, 0, 0, 13, 416, 1, 0, 0, 0, 13, 418, 1, 0, 0, 0, 13, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 14, 424, 1, 0, 0, 0, 14, 426, 1, 0, 0, 0, 14, 428, 1, 0, 0, 0, 14, 430, 1, 0, 0, 0, 14, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 15, 442, 1, 0, 0, 0, 15, 444, 1, 0, 0, 0, 15, 446, 1, 0, 0, 0, 15, 448, 1, 0, 0, 0, 15, 450, 1, 0, 0, 0, 16, 452, 1, 0, 0, 0, 18, 462, 1, 0, 0, 0, 20, 469, 1, 0, 0, 0, 22, 478, 1, 0, 0, 0, 24, 485, 1, 0, 0, 0, 26, 495, 1, 0, 0, 0, 28, 502, 1, 0, 0, 0, 30, 509, 1, 0, 0, 0, 32, 516, 1, 0, 0, 0, 34, 524, 1, 0, 0, 0, 36, 536, 1, 0, 0, 0, 38, 545, 1, 0, 0, 0, 40, 551, 1, 0, 0, 0, 42, 558, 1, 0, 0, 0, 44, 565, 1, 0, 0, 0, 46, 573, 1, 0, 0, 0, 48, 581, 1, 0, 0, 0, 50, 596, 1, 0, 0, 0, 52, 608, 1, 0, 0, 0, 54, 619, 1, 0, 0, 0, 56, 627, 1, 0, 0, 0, 58, 635, 1, 0, 0, 0, 60, 643, 1, 0, 0, 0, 62, 652, 1, 0, 0, 0, 64, 663, 1, 0, 0, 0, 66, 669, 1, 0, 0, 0, 68, 686, 1, 0, 0, 0, 70, 702, 1, 0, 0, 0, 72, 708, 1, 0, 0, 0, 74, 712, 1, 0, 0, 0, 76, 714, 1, 0, 0, 0, 78, 716, 1, 0, 0, 0, 80, 719, 1, 0, 0, 0, 82, 721, 1, 0, 0, 0, 84, 730, 1, 0, 0, 0, 86, 732, 1, 0, 0, 0, 88, 737, 1, 0, 0, 0, 90, 739, 1, 0, 0, 0, 92, 744, 1, 0, 0, 0, 94, 775, 1, 0, 0, 0, 96, 778, 1, 0, 0, 0, 98, 824, 1, 0, 0, 0, 100, 826, 1, 0, 0, 0, 102, 829, 1, 0, 0, 0, 104, 833, 1, 0, 0, 0, 106, 837, 1, 0, 0, 0, 108, 839, 1, 0, 0, 0, 110, 842, 1, 0, 0, 0, 112, 844, 1, 0, 0, 0, 114, 846, 1, 0, 0, 0, 116, 851, 1, 0, 0, 0, 118, 853, 1, 0, 0, 0, 120, 859, 1, 0, 0, 0, 122, 865, 1, 0, 0, 0, 124, 868, 1, 0, 0, 0, 126, 871, 1, 0, 0, 0, 128, 876, 1, 0, 0, 0, 130, 881, 1, 0, 0, 0, 132, 883, 1, 0, 0, 0, 134, 887, 1, 0, 0, 0, 136, 892, 1, 0, 0, 0, 138, 898, 1, 0, 0, 0, 140, 901, 1, 0, 0, 0, 142, 903, 1, 0, 0, 0, 144, 909, 1, 0, 0, 0, 146, 911, 1, 0, 0, 0, 148, 916, 1, 0, 0, 0, 150, 919, 1, 0, 0, 0, 152, 922, 1, 0, 0, 0, 154, 925, 1, 0, 0, 0, 156, 927, 1, 0, 0, 0, 158, 930, 1, 0, 0, 0, 160, 932, 1, 0, 0, 0, 162, 935, 1, 0, 0, 0, 164, 937, 1, 0, 0, 0, 166, 939, 1, 0, 0, 0, 168, 941, 1, 0, 0, 0, 170, 943, 1, 0, 0, 0, 172, 945, 1, 0, 0, 0, 174, 948, 1, 0, 0, 0, 176, 951, 1, 0, 0, 0, 178, 972, 1, 0, 0, 0, 180, 974, 1, 0, 0, 0, 182, 979, 1, 0, 0, 0, 184, 1000, 1, 0, 0, 0, 186, 1002, 1, 0, 0, 0, 188, 1010, 1, 0, 0, 0, 190, 1012, 1, 0, 0, 0, 192, 1016, 1, 0, 0, 0, 194, 1020, 1, 0, 0, 0, 196, 1024, 1, 0, 0, 0, 198, 1029, 1, 0, 0, 0, 200, 1034, 1, 0, 0, 0, 202, 1038, 1, 0, 0, 0, 204, 1042, 1, 0, 0, 0, 206, 1046, 1, 0, 0, 0, 208, 1051, 1, 0, 0, 0, 210, 1055, 1, 0, 0, 0, 212, 1059, 1, 0, 0, 0, 214, 1063, 1, 0, 0, 0, 216, 1067, 1, 0, 0, 0, 218, 1071, 1, 0, 0, 0, 220, 1083, 1, 0, 0, 0, 222, 1086, 1, 0, 0, 0, 224, 1090, 1, 0, 0, 0, 226, 1094, 1, 0, 0, 0, 228, 1098, 1, 0, 0, 0, 230, 1102, 1, 0, 0, 0, 232, 1106, 1, 0, 0, 0, 234, 1110, 1, 0, 0, 0, 236, 1115, 1, 0, 0, 0, 238, 1119, 1, 0, 0, 0, 240, 1123, 1, 0, 0, 0, 242, 1128, 1, 0, 0, 0, 244, 1137, 1, 0, 0, 0, 246, 1158, 1, 0, 0, 0, 248, 1162, 1, 0, 0, 0, 250, 1166, 1, 0, 0, 0, 252, 1170, 1, 0, 0, 0, 254, 1174, 1, 0, 0, 0, 256, 1178, 1, 0, 0, 0, 258, 1183, 1, 0, 0, 0, 260, 1187, 1, 0, 0, 0, 262, 1191, 1, 0, 0, 0, 264, 1195, 1, 0, 0, 0, 266, 1200, 1, 0, 0, 0, 268, 1205, 1, 0, 0, 0, 270, 1208, 1, 0, 0, 0, 272, 1212, 1, 0, 0, 0, 274, 1216, 1, 0, 0, 0, 276, 1220, 1, 0, 0, 0, 278, 1224, 1, 0, 0, 0, 280, 1229, 1, 0, 0, 0, 282, 1234, 1, 0, 0, 0, 284, 1239, 1, 0, 0, 0, 286, 1246, 1, 0, 0, 0, 288, 1255, 1, 0, 0, 0, 290, 1262, 1, 0, 0, 0, 292, 1266, 1, 0, 0, 0, 294, 1270, 1, 0, 0, 0, 296, 1274, 1, 0, 0, 0, 298, 1278, 1, 0, 0, 0, 300, 1284, 1, 0, 0, 0, 302, 1288, 1, 0, 0, 0, 304, 1292, 1, 0, 0, 0, 306, 1296, 1, 0, 0, 0, 308, 1300, 1, 0, 0, 0, 310, 1304, 1, 0, 0, 0, 312, 1308, 1, 0, 0, 0, 314, 1313, 1, 0, 0, 0, 316, 1318, 1, 0, 0, 0, 318, 1322, 1, 0, 0, 0, 320, 1326, 1, 0, 0, 0, 322, 1330, 1, 0, 0, 0, 324, 1335, 1, 0, 0, 0, 326, 1339, 1, 0, 0, 0, 328, 1344, 1, 0, 0, 0, 330, 1349, 1, 0, 0, 0, 332, 1353, 1, 0, 0, 0, 334, 1357, 1, 0, 0, 0, 336, 1361, 1, 0, 0, 0, 338, 1365, 1, 0, 0, 0, 340, 1369, 1, 0, 0, 0, 342, 1374, 1, 0, 0, 0, 344, 1379, 1, 0, 0, 0, 346, 1383, 1, 0, 0, 0, 348, 1387, 1, 0, 0, 0, 350, 1391, 1, 0, 0, 0, 352, 1396, 1, 0, 0, 0, 354, 1405, 1, 0, 0, 0, 356, 1409, 1, 0, 0, 0, 358, 1413, 1, 0, 0, 0, 360, 1417, 1, 0, 0, 0, 362, 1421, 1, 0, 0, 0, 364, 1426, 1, 0, 0, 0, 366, 1430, 1, 0, 0, 0, 368, 1434, 1, 0, 0, 0, 370, 1438, 1, 0, 0, 0, 372, 1443, 1, 0, 0, 0, 374, 1447, 1, 0, 0, 0, 376, 1451, 1, 0, 0, 0, 378, 1455, 1, 0, 0, 0, 380, 1459, 1, 0, 0, 0, 382, 1463, 1, 0, 0, 0, 384, 1469, 1, 0, 0, 0, 386, 1473, 1, 0, 0, 0, 388, 1477, 1, 0, 0, 0, 390, 1481, 1, 0, 0, 0, 392, 1485, 1, 0, 0, 0, 394, 1489, 1, 0, 0, 0, 396, 1493, 1, 0, 0, 0, 398, 1498, 1, 0, 0, 0, 400, 1502, 1, 0, 0, 0, 402, 1506, 1, 0, 0, 0, 404, 1512, 1, 0, 0, 0, 406, 1521, 1, 0, 0, 0, 408, 1525, 1, 0, 0, 0, 410, 1529, 1, 0, 0, 0, 412, 1533, 1, 0, 0, 0, 414, 1537, 1, 0, 0, 0, 416, 1541, 1, 0, 0, 0, 418, 1545, 1, 0, 0, 0, 420, 1549, 1, 0, 0, 0, 422, 1553, 1, 0, 0, 0, 424, 1558, 1, 0, 0, 0, 426, 1564, 1, 0, 0, 0, 428, 1570, 1, 0, 0, 0, 430, 1574, 1, 0, 0, 0, 432, 1578, 1, 0, 0, 0, 434, 1582, 1, 0, 0, 0, 436, 1588, 1, 0, 0, 0, 438, 1594, 1, 0, 0, 0, 440, 1598, 1, 0, 0, 0, 442, 1602, 1, 0, 0, 0, 444, 1606, 1, 0, 0, 0, 446, 1612, 1, 0, 0, 0, 448, 1618, 1, 0, 0, 0, 450, 1624, 1, 0, 0, 0, 452, 453, 7, 0, 0, 0, 453, 454, 7, 1, 0, 0, 454, 455, 7, 2, 0, 0, 455, 456, 7, 2, 0, 0, 456, 457, 7, 3, 0, 0, 457, 458, 7, 4, 0, 0, 458, 459, 7, 5, 0, 0, 459, 460, 1, 0, 0, 0, 460, 461, 6, 0, 0, 0, 461, 17, 1, 0, 0, 0, 462, 463, 7, 0, 0, 0, 463, 464, 7, 6, 0, 0, 464, 465, 7, 7, 0, 0, 465, 466, 7, 8, 0, 0, 466, 467, 1, 0, 0, 0, 467, 468, 6, 1, 1, 0, 468, 19, 1, 0, 0, 0, 469, 470, 7, 3, 0, 0, 470, 471, 7, 9, 0, 0, 471, 472, 7, 6, 0, 0, 472, 473, 7, 1, 0, 0, 473, 474, 7, 4, 0, 0, 474, 475, 7, 10, 0, 0, 475, 476, 1, 0, 0, 0, 476, 477, 6, 2, 2, 0, 477, 21, 1, 0, 0, 0, 478, 479, 7, 3, 0, 0, 479, 480, 7, 11, 0, 0, 480, 481, 7, 12, 0, 0, 481, 482, 7, 13, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 3, 0, 0, 484, 23, 1, 0, 0, 0, 485, 486, 7, 3, 0, 0, 486, 487, 7, 14, 0, 0, 487, 488, 7, 8, 0, 0, 488, 489, 7, 13, 0, 0, 489, 490, 7, 12, 0, 0, 490, 491, 7, 1, 0, 0, 491, 492, 7, 9, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 4, 3, 0, 494, 25, 1, 0, 0, 0, 495, 496, 7, 15, 0, 0, 496, 497, 7, 6, 0, 0, 497, 498, 7, 7, 0, 0, 498, 499, 7, 16, 0, 0, 499, 500, 1, 0, 0, 0, 500, 501, 6, 5, 4, 0, 501, 27, 1, 0, 0, 0, 502, 503, 7, 17, 0, 0, 503, 504, 7, 6, 0, 0, 504, 505, 7, 7, 0, 0, 505, 506, 7, 18, 0, 0, 506, 507, 1, 0, 0, 0, 507, 508, 6, 6, 0, 0, 508, 29, 1, 0, 0, 0, 509, 510, 7, 18, 0, 0, 510, 511, 7, 3, 0, 0, 511, 512, 7, 3, 0, 0, 512, 513, 7, 8, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 6, 7, 1, 0, 515, 31, 1, 0, 0, 0, 516, 517, 7, 13, 0, 0, 517, 518, 7, 1, 0, 0, 518, 519, 7, 16, 0, 0, 519, 520, 7, 1, 0, 0, 520, 521, 7, 5, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 8, 0, 0, 523, 33, 1, 0, 0, 0, 524, 525, 7, 16, 0, 0, 525, 526, 7, 11, 0, 0, 526, 527, 5, 95, 0, 0, 527, 528, 7, 3, 0, 0, 528, 529, 7, 14, 0, 0, 529, 530, 7, 8, 0, 0, 530, 531, 7, 12, 0, 0, 531, 532, 7, 9, 0, 0, 532, 533, 7, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 535, 6, 9, 5, 0, 535, 35, 1, 0, 0, 0, 536, 537, 7, 6, 0, 0, 537, 538, 7, 3, 0, 0, 538, 539, 7, 9, 0, 0, 539, 540, 7, 12, 0, 0, 540, 541, 7, 16, 0, 0, 541, 542, 7, 3, 0, 0, 542, 543, 1, 0, 0, 0, 543, 544, 6, 10, 6, 0, 544, 37, 1, 0, 0, 0, 545, 546, 7, 6, 0, 0, 546, 547, 7, 7, 0, 0, 547, 548, 7, 19, 0, 0, 548, 549, 1, 0, 0, 0, 549, 550, 6, 11, 0, 0, 550, 39, 1, 0, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 7, 10, 0, 0, 553, 554, 7, 7, 0, 0, 554, 555, 7, 19, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 6, 12, 7, 0, 557, 41, 1, 0, 0, 0, 558, 559, 7, 2, 0, 0, 559, 560, 7, 7, 0, 0, 560, 561, 7, 6, 0, 0, 561, 562, 7, 5, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 13, 0, 0, 564, 43, 1, 0, 0, 0, 565, 566, 7, 2, 0, 0, 566, 567, 7, 5, 0, 0, 567, 568, 7, 12, 0, 0, 568, 569, 7, 5, 0, 0, 569, 570, 7, 2, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 6, 14, 0, 0, 572, 45, 1, 0, 0, 0, 573, 574, 7, 19, 0, 0, 574, 575, 7, 10, 0, 0, 575, 576, 7, 3, 0, 0, 576, 577, 7, 6, 0, 0, 577, 578, 7, 3, 0, 0, 578, 579, 1, 0, 0, 0, 579, 580, 6, 15, 0, 0, 580, 47, 1, 0, 0, 0, 581, 582, 4, 16, 0, 0, 582, 583, 7, 1, 0, 0, 583, 584, 7, 9, 0, 0, 584, 585, 7, 13, 0, 0, 585, 586, 7, 1, 0, 0, 586, 587, 7, 9, 0, 0, 587, 588, 7, 3, 0, 0, 588, 589, 7, 2, 0, 0, 589, 590, 7, 5, 0, 0, 590, 591, 7, 12, 0, 0, 591, 592, 7, 5, 0, 0, 592, 593, 7, 2, 0, 0, 593, 594, 1, 0, 0, 0, 594, 595, 6, 16, 0, 0, 595, 49, 1, 0, 0, 0, 596, 597, 4, 17, 1, 0, 597, 598, 7, 13, 0, 0, 598, 599, 7, 7, 0, 0, 599, 600, 7, 7, 0, 0, 600, 601, 7, 18, 0, 0, 601, 602, 7, 20, 0, 0, 602, 603, 7, 8, 0, 0, 603, 604, 5, 95, 0, 0, 604, 605, 5, 128020, 0, 0, 605, 606, 1, 0, 0, 0, 606, 607, 6, 17, 8, 0, 607, 51, 1, 0, 0, 0, 608, 609, 4, 18, 2, 0, 609, 610, 7, 16, 0, 0, 610, 611, 7, 3, 0, 0, 611, 612, 7, 5, 0, 0, 612, 613, 7, 6, 0, 0, 613, 614, 7, 1, 0, 0, 614, 615, 7, 4, 0, 0, 615, 616, 7, 2, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 6, 18, 9, 0, 618, 53, 1, 0, 0, 0, 619, 620, 4, 19, 3, 0, 620, 621, 7, 21, 0, 0, 621, 622, 7, 7, 0, 0, 622, 623, 7, 1, 0, 0, 623, 624, 7, 9, 0, 0, 624, 625, 1, 0, 0, 0, 625, 626, 6, 19, 10, 0, 626, 55, 1, 0, 0, 0, 627, 628, 4, 20, 4, 0, 628, 629, 7, 15, 0, 0, 629, 630, 7, 20, 0, 0, 630, 631, 7, 13, 0, 0, 631, 632, 7, 13, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 6, 20, 10, 0, 634, 57, 1, 0, 0, 0, 635, 636, 4, 21, 5, 0, 636, 637, 7, 13, 0, 0, 637, 638, 7, 3, 0, 0, 638, 639, 7, 15, 0, 0, 639, 640, 7, 5, 0, 0, 640, 641, 1, 0, 0, 0, 641, 642, 6, 21, 10, 0, 642, 59, 1, 0, 0, 0, 643, 644, 4, 22, 6, 0, 644, 645, 7, 6, 0, 0, 645, 646, 7, 1, 0, 0, 646, 647, 7, 17, 0, 0, 647, 648, 7, 10, 0, 0, 648, 649, 7, 5, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 6, 22, 10, 0, 651, 61, 1, 0, 0, 0, 652, 653, 4, 23, 7, 0, 653, 654, 7, 13, 0, 0, 654, 655, 7, 7, 0, 0, 655, 656, 7, 7, 0, 0, 656, 657, 7, 18, 0, 0, 657, 658, 7, 20, 0, 0, 658, 659, 7, 8, 0, 0, 659, 660, 1, 0, 0, 0, 660, 661, 6, 23, 10, 0, 661, 63, 1, 0, 0, 0, 662, 664, 8, 22, 0, 0, 663, 662, 1, 0, 0, 0, 664, 665, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 667, 1, 0, 0, 0, 667, 668, 6, 24, 0, 0, 668, 65, 1, 0, 0, 0, 669, 670, 5, 47, 0, 0, 670, 671, 5, 47, 0, 0, 671, 675, 1, 0, 0, 0, 672, 674, 8, 23, 0, 0, 673, 672, 1, 0, 0, 0, 674, 677, 1, 0, 0, 0, 675, 673, 1, 0, 0, 0, 675, 676, 1, 0, 0, 0, 676, 679, 1, 0, 0, 0, 677, 675, 1, 0, 0, 0, 678, 680, 5, 13, 0, 0, 679, 678, 1, 0, 0, 0, 679, 680, 1, 0, 0, 0, 680, 682, 1, 0, 0, 0, 681, 683, 5, 10, 0, 0, 682, 681, 1, 0, 0, 0, 682, 683, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 684, 685, 6, 25, 11, 0, 685, 67, 1, 0, 0, 0, 686, 687, 5, 47, 0, 0, 687, 688, 5, 42, 0, 0, 688, 693, 1, 0, 0, 0, 689, 692, 3, 68, 26, 0, 690, 692, 9, 0, 0, 0, 691, 689, 1, 0, 0, 0, 691, 690, 1, 0, 0, 0, 692, 695, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 693, 691, 1, 0, 0, 0, 694, 696, 1, 0, 0, 0, 695, 693, 1, 0, 0, 0, 696, 697, 5, 42, 0, 0, 697, 698, 5, 47, 0, 0, 698, 699, 1, 0, 0, 0, 699, 700, 6, 26, 11, 0, 700, 69, 1, 0, 0, 0, 701, 703, 7, 24, 0, 0, 702, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 702, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 707, 6, 27, 11, 0, 707, 71, 1, 0, 0, 0, 708, 709, 5, 124, 0, 0, 709, 710, 1, 0, 0, 0, 710, 711, 6, 28, 12, 0, 711, 73, 1, 0, 0, 0, 712, 713, 7, 25, 0, 0, 713, 75, 1, 0, 0, 0, 714, 715, 7, 26, 0, 0, 715, 77, 1, 0, 0, 0, 716, 717, 5, 92, 0, 0, 717, 718, 7, 27, 0, 0, 718, 79, 1, 0, 0, 0, 719, 720, 8, 28, 0, 0, 720, 81, 1, 0, 0, 0, 721, 723, 7, 3, 0, 0, 722, 724, 7, 29, 0, 0, 723, 722, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 726, 1, 0, 0, 0, 725, 727, 3, 74, 29, 0, 726, 725, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 726, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 83, 1, 0, 0, 0, 730, 731, 5, 64, 0, 0, 731, 85, 1, 0, 0, 0, 732, 733, 5, 96, 0, 0, 733, 87, 1, 0, 0, 0, 734, 738, 8, 30, 0, 0, 735, 736, 5, 96, 0, 0, 736, 738, 5, 96, 0, 0, 737, 734, 1, 0, 0, 0, 737, 735, 1, 0, 0, 0, 738, 89, 1, 0, 0, 0, 739, 740, 5, 95, 0, 0, 740, 91, 1, 0, 0, 0, 741, 745, 3, 76, 30, 0, 742, 745, 3, 74, 29, 0, 743, 745, 3, 90, 37, 0, 744, 741, 1, 0, 0, 0, 744, 742, 1, 0, 0, 0, 744, 743, 1, 0, 0, 0, 745, 93, 1, 0, 0, 0, 746, 751, 5, 34, 0, 0, 747, 750, 3, 78, 31, 0, 748, 750, 3, 80, 32, 0, 749, 747, 1, 0, 0, 0, 749, 748, 1, 0, 0, 0, 750, 753, 1, 0, 0, 0, 751, 749, 1, 0, 0, 0, 751, 752, 1, 0, 0, 0, 752, 754, 1, 0, 0, 0, 753, 751, 1, 0, 0, 0, 754, 776, 5, 34, 0, 0, 755, 756, 5, 34, 0, 0, 756, 757, 5, 34, 0, 0, 757, 758, 5, 34, 0, 0, 758, 762, 1, 0, 0, 0, 759, 761, 8, 23, 0, 0, 760, 759, 1, 0, 0, 0, 761, 764, 1, 0, 0, 0, 762, 763, 1, 0, 0, 0, 762, 760, 1, 0, 0, 0, 763, 765, 1, 0, 0, 0, 764, 762, 1, 0, 0, 0, 765, 766, 5, 34, 0, 0, 766, 767, 5, 34, 0, 0, 767, 768, 5, 34, 0, 0, 768, 770, 1, 0, 0, 0, 769, 771, 5, 34, 0, 0, 770, 769, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 773, 1, 0, 0, 0, 772, 774, 5, 34, 0, 0, 773, 772, 1, 0, 0, 0, 773, 774, 1, 0, 0, 0, 774, 776, 1, 0, 0, 0, 775, 746, 1, 0, 0, 0, 775, 755, 1, 0, 0, 0, 776, 95, 1, 0, 0, 0, 777, 779, 3, 74, 29, 0, 778, 777, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 778, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 97, 1, 0, 0, 0, 782, 784, 3, 74, 29, 0, 783, 782, 1, 0, 0, 0, 784, 785, 1, 0, 0, 0, 785, 783, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 787, 1, 0, 0, 0, 787, 791, 3, 116, 50, 0, 788, 790, 3, 74, 29, 0, 789, 788, 1, 0, 0, 0, 790, 793, 1, 0, 0, 0, 791, 789, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 825, 1, 0, 0, 0, 793, 791, 1, 0, 0, 0, 794, 796, 3, 116, 50, 0, 795, 797, 3, 74, 29, 0, 796, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 796, 1, 0, 0, 0, 798, 799, 1, 0, 0, 0, 799, 825, 1, 0, 0, 0, 800, 802, 3, 74, 29, 0, 801, 800, 1, 0, 0, 0, 802, 803, 1, 0, 0, 0, 803, 801, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 812, 1, 0, 0, 0, 805, 809, 3, 116, 50, 0, 806, 808, 3, 74, 29, 0, 807, 806, 1, 0, 0, 0, 808, 811, 1, 0, 0, 0, 809, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 813, 1, 0, 0, 0, 811, 809, 1, 0, 0, 0, 812, 805, 1, 0, 0, 0, 812, 813, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 815, 3, 82, 33, 0, 815, 825, 1, 0, 0, 0, 816, 818, 3, 116, 50, 0, 817, 819, 3, 74, 29, 0, 818, 817, 1, 0, 0, 0, 819, 820, 1, 0, 0, 0, 820, 818, 1, 0, 0, 0, 820, 821, 1, 0, 0, 0, 821, 822, 1, 0, 0, 0, 822, 823, 3, 82, 33, 0, 823, 825, 1, 0, 0, 0, 824, 783, 1, 0, 0, 0, 824, 794, 1, 0, 0, 0, 824, 801, 1, 0, 0, 0, 824, 816, 1, 0, 0, 0, 825, 99, 1, 0, 0, 0, 826, 827, 7, 31, 0, 0, 827, 828, 7, 32, 0, 0, 828, 101, 1, 0, 0, 0, 829, 830, 7, 12, 0, 0, 830, 831, 7, 9, 0, 0, 831, 832, 7, 0, 0, 0, 832, 103, 1, 0, 0, 0, 833, 834, 7, 12, 0, 0, 834, 835, 7, 2, 0, 0, 835, 836, 7, 4, 0, 0, 836, 105, 1, 0, 0, 0, 837, 838, 5, 61, 0, 0, 838, 107, 1, 0, 0, 0, 839, 840, 5, 58, 0, 0, 840, 841, 5, 58, 0, 0, 841, 109, 1, 0, 0, 0, 842, 843, 5, 58, 0, 0, 843, 111, 1, 0, 0, 0, 844, 845, 5, 44, 0, 0, 845, 113, 1, 0, 0, 0, 846, 847, 7, 0, 0, 0, 847, 848, 7, 3, 0, 0, 848, 849, 7, 2, 0, 0, 849, 850, 7, 4, 0, 0, 850, 115, 1, 0, 0, 0, 851, 852, 5, 46, 0, 0, 852, 117, 1, 0, 0, 0, 853, 854, 7, 15, 0, 0, 854, 855, 7, 12, 0, 0, 855, 856, 7, 13, 0, 0, 856, 857, 7, 2, 0, 0, 857, 858, 7, 3, 0, 0, 858, 119, 1, 0, 0, 0, 859, 860, 7, 15, 0, 0, 860, 861, 7, 1, 0, 0, 861, 862, 7, 6, 0, 0, 862, 863, 7, 2, 0, 0, 863, 864, 7, 5, 0, 0, 864, 121, 1, 0, 0, 0, 865, 866, 7, 1, 0, 0, 866, 867, 7, 9, 0, 0, 867, 123, 1, 0, 0, 0, 868, 869, 7, 1, 0, 0, 869, 870, 7, 2, 0, 0, 870, 125, 1, 0, 0, 0, 871, 872, 7, 13, 0, 0, 872, 873, 7, 12, 0, 0, 873, 874, 7, 2, 0, 0, 874, 875, 7, 5, 0, 0, 875, 127, 1, 0, 0, 0, 876, 877, 7, 13, 0, 0, 877, 878, 7, 1, 0, 0, 878, 879, 7, 18, 0, 0, 879, 880, 7, 3, 0, 0, 880, 129, 1, 0, 0, 0, 881, 882, 5, 40, 0, 0, 882, 131, 1, 0, 0, 0, 883, 884, 7, 9, 0, 0, 884, 885, 7, 7, 0, 0, 885, 886, 7, 5, 0, 0, 886, 133, 1, 0, 0, 0, 887, 888, 7, 9, 0, 0, 888, 889, 7, 20, 0, 0, 889, 890, 7, 13, 0, 0, 890, 891, 7, 13, 0, 0, 891, 135, 1, 0, 0, 0, 892, 893, 7, 9, 0, 0, 893, 894, 7, 20, 0, 0, 894, 895, 7, 13, 0, 0, 895, 896, 7, 13, 0, 0, 896, 897, 7, 2, 0, 0, 897, 137, 1, 0, 0, 0, 898, 899, 7, 7, 0, 0, 899, 900, 7, 6, 0, 0, 900, 139, 1, 0, 0, 0, 901, 902, 5, 63, 0, 0, 902, 141, 1, 0, 0, 0, 903, 904, 7, 6, 0, 0, 904, 905, 7, 13, 0, 0, 905, 906, 7, 1, 0, 0, 906, 907, 7, 18, 0, 0, 907, 908, 7, 3, 0, 0, 908, 143, 1, 0, 0, 0, 909, 910, 5, 41, 0, 0, 910, 145, 1, 0, 0, 0, 911, 912, 7, 5, 0, 0, 912, 913, 7, 6, 0, 0, 913, 914, 7, 20, 0, 0, 914, 915, 7, 3, 0, 0, 915, 147, 1, 0, 0, 0, 916, 917, 5, 61, 0, 0, 917, 918, 5, 61, 0, 0, 918, 149, 1, 0, 0, 0, 919, 920, 5, 61, 0, 0, 920, 921, 5, 126, 0, 0, 921, 151, 1, 0, 0, 0, 922, 923, 5, 33, 0, 0, 923, 924, 5, 61, 0, 0, 924, 153, 1, 0, 0, 0, 925, 926, 5, 60, 0, 0, 926, 155, 1, 0, 0, 0, 927, 928, 5, 60, 0, 0, 928, 929, 5, 61, 0, 0, 929, 157, 1, 0, 0, 0, 930, 931, 5, 62, 0, 0, 931, 159, 1, 0, 0, 0, 932, 933, 5, 62, 0, 0, 933, 934, 5, 61, 0, 0, 934, 161, 1, 0, 0, 0, 935, 936, 5, 43, 0, 0, 936, 163, 1, 0, 0, 0, 937, 938, 5, 45, 0, 0, 938, 165, 1, 0, 0, 0, 939, 940, 5, 42, 0, 0, 940, 167, 1, 0, 0, 0, 941, 942, 5, 47, 0, 0, 942, 169, 1, 0, 0, 0, 943, 944, 5, 37, 0, 0, 944, 171, 1, 0, 0, 0, 945, 946, 4, 78, 8, 0, 946, 947, 5, 123, 0, 0, 947, 173, 1, 0, 0, 0, 948, 949, 4, 79, 9, 0, 949, 950, 5, 125, 0, 0, 950, 175, 1, 0, 0, 0, 951, 952, 3, 46, 15, 0, 952, 953, 1, 0, 0, 0, 953, 954, 6, 80, 13, 0, 954, 177, 1, 0, 0, 0, 955, 958, 3, 140, 62, 0, 956, 959, 3, 76, 30, 0, 957, 959, 3, 90, 37, 0, 958, 956, 1, 0, 0, 0, 958, 957, 1, 0, 0, 0, 959, 963, 1, 0, 0, 0, 960, 962, 3, 92, 38, 0, 961, 960, 1, 0, 0, 0, 962, 965, 1, 0, 0, 0, 963, 961, 1, 0, 0, 0, 963, 964, 1, 0, 0, 0, 964, 973, 1, 0, 0, 0, 965, 963, 1, 0, 0, 0, 966, 968, 3, 140, 62, 0, 967, 969, 3, 74, 29, 0, 968, 967, 1, 0, 0, 0, 969, 970, 1, 0, 0, 0, 970, 968, 1, 0, 0, 0, 970, 971, 1, 0, 0, 0, 971, 973, 1, 0, 0, 0, 972, 955, 1, 0, 0, 0, 972, 966, 1, 0, 0, 0, 973, 179, 1, 0, 0, 0, 974, 975, 5, 91, 0, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 82, 0, 0, 977, 978, 6, 82, 0, 0, 978, 181, 1, 0, 0, 0, 979, 980, 5, 93, 0, 0, 980, 981, 1, 0, 0, 0, 981, 982, 6, 83, 12, 0, 982, 983, 6, 83, 12, 0, 983, 183, 1, 0, 0, 0, 984, 988, 3, 76, 30, 0, 985, 987, 3, 92, 38, 0, 986, 985, 1, 0, 0, 0, 987, 990, 1, 0, 0, 0, 988, 986, 1, 0, 0, 0, 988, 989, 1, 0, 0, 0, 989, 1001, 1, 0, 0, 0, 990, 988, 1, 0, 0, 0, 991, 994, 3, 90, 37, 0, 992, 994, 3, 84, 34, 0, 993, 991, 1, 0, 0, 0, 993, 992, 1, 0, 0, 0, 994, 996, 1, 0, 0, 0, 995, 997, 3, 92, 38, 0, 996, 995, 1, 0, 0, 0, 997, 998, 1, 0, 0, 0, 998, 996, 1, 0, 0, 0, 998, 999, 1, 0, 0, 0, 999, 1001, 1, 0, 0, 0, 1000, 984, 1, 0, 0, 0, 1000, 993, 1, 0, 0, 0, 1001, 185, 1, 0, 0, 0, 1002, 1004, 3, 86, 35, 0, 1003, 1005, 3, 88, 36, 0, 1004, 1003, 1, 0, 0, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1004, 1, 0, 0, 0, 1006, 1007, 1, 0, 0, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 3, 86, 35, 0, 1009, 187, 1, 0, 0, 0, 1010, 1011, 3, 186, 85, 0, 1011, 189, 1, 0, 0, 0, 1012, 1013, 3, 66, 25, 0, 1013, 1014, 1, 0, 0, 0, 1014, 1015, 6, 87, 11, 0, 1015, 191, 1, 0, 0, 0, 1016, 1017, 3, 68, 26, 0, 1017, 1018, 1, 0, 0, 0, 1018, 1019, 6, 88, 11, 0, 1019, 193, 1, 0, 0, 0, 1020, 1021, 3, 70, 27, 0, 1021, 1022, 1, 0, 0, 0, 1022, 1023, 6, 89, 11, 0, 1023, 195, 1, 0, 0, 0, 1024, 1025, 3, 180, 82, 0, 1025, 1026, 1, 0, 0, 0, 1026, 1027, 6, 90, 14, 0, 1027, 1028, 6, 90, 15, 0, 1028, 197, 1, 0, 0, 0, 1029, 1030, 3, 72, 28, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 91, 16, 0, 1032, 1033, 6, 91, 12, 0, 1033, 199, 1, 0, 0, 0, 1034, 1035, 3, 70, 27, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 92, 11, 0, 1037, 201, 1, 0, 0, 0, 1038, 1039, 3, 66, 25, 0, 1039, 1040, 1, 0, 0, 0, 1040, 1041, 6, 93, 11, 0, 1041, 203, 1, 0, 0, 0, 1042, 1043, 3, 68, 26, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 94, 11, 0, 1045, 205, 1, 0, 0, 0, 1046, 1047, 3, 72, 28, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 95, 16, 0, 1049, 1050, 6, 95, 12, 0, 1050, 207, 1, 0, 0, 0, 1051, 1052, 3, 180, 82, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1054, 6, 96, 14, 0, 1054, 209, 1, 0, 0, 0, 1055, 1056, 3, 182, 83, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 97, 17, 0, 1058, 211, 1, 0, 0, 0, 1059, 1060, 3, 110, 47, 0, 1060, 1061, 1, 0, 0, 0, 1061, 1062, 6, 98, 18, 0, 1062, 213, 1, 0, 0, 0, 1063, 1064, 3, 112, 48, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 6, 99, 19, 0, 1066, 215, 1, 0, 0, 0, 1067, 1068, 3, 106, 45, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1070, 6, 100, 20, 0, 1070, 217, 1, 0, 0, 0, 1071, 1072, 7, 16, 0, 0, 1072, 1073, 7, 3, 0, 0, 1073, 1074, 7, 5, 0, 0, 1074, 1075, 7, 12, 0, 0, 1075, 1076, 7, 0, 0, 0, 1076, 1077, 7, 12, 0, 0, 1077, 1078, 7, 5, 0, 0, 1078, 1079, 7, 12, 0, 0, 1079, 219, 1, 0, 0, 0, 1080, 1084, 8, 33, 0, 0, 1081, 1082, 5, 47, 0, 0, 1082, 1084, 8, 34, 0, 0, 1083, 1080, 1, 0, 0, 0, 1083, 1081, 1, 0, 0, 0, 1084, 221, 1, 0, 0, 0, 1085, 1087, 3, 220, 102, 0, 1086, 1085, 1, 0, 0, 0, 1087, 1088, 1, 0, 0, 0, 1088, 1086, 1, 0, 0, 0, 1088, 1089, 1, 0, 0, 0, 1089, 223, 1, 0, 0, 0, 1090, 1091, 3, 222, 103, 0, 1091, 1092, 1, 0, 0, 0, 1092, 1093, 6, 104, 21, 0, 1093, 225, 1, 0, 0, 0, 1094, 1095, 3, 94, 39, 0, 1095, 1096, 1, 0, 0, 0, 1096, 1097, 6, 105, 22, 0, 1097, 227, 1, 0, 0, 0, 1098, 1099, 3, 66, 25, 0, 1099, 1100, 1, 0, 0, 0, 1100, 1101, 6, 106, 11, 0, 1101, 229, 1, 0, 0, 0, 1102, 1103, 3, 68, 26, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 107, 11, 0, 1105, 231, 1, 0, 0, 0, 1106, 1107, 3, 70, 27, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 108, 11, 0, 1109, 233, 1, 0, 0, 0, 1110, 1111, 3, 72, 28, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 109, 16, 0, 1113, 1114, 6, 109, 12, 0, 1114, 235, 1, 0, 0, 0, 1115, 1116, 3, 116, 50, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1118, 6, 110, 23, 0, 1118, 237, 1, 0, 0, 0, 1119, 1120, 3, 112, 48, 0, 1120, 1121, 1, 0, 0, 0, 1121, 1122, 6, 111, 19, 0, 1122, 239, 1, 0, 0, 0, 1123, 1124, 4, 112, 10, 0, 1124, 1125, 3, 140, 62, 0, 1125, 1126, 1, 0, 0, 0, 1126, 1127, 6, 112, 24, 0, 1127, 241, 1, 0, 0, 0, 1128, 1129, 4, 113, 11, 0, 1129, 1130, 3, 178, 81, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1132, 6, 113, 25, 0, 1132, 243, 1, 0, 0, 0, 1133, 1138, 3, 76, 30, 0, 1134, 1138, 3, 74, 29, 0, 1135, 1138, 3, 90, 37, 0, 1136, 1138, 3, 166, 75, 0, 1137, 1133, 1, 0, 0, 0, 1137, 1134, 1, 0, 0, 0, 1137, 1135, 1, 0, 0, 0, 1137, 1136, 1, 0, 0, 0, 1138, 245, 1, 0, 0, 0, 1139, 1142, 3, 76, 30, 0, 1140, 1142, 3, 166, 75, 0, 1141, 1139, 1, 0, 0, 0, 1141, 1140, 1, 0, 0, 0, 1142, 1146, 1, 0, 0, 0, 1143, 1145, 3, 244, 114, 0, 1144, 1143, 1, 0, 0, 0, 1145, 1148, 1, 0, 0, 0, 1146, 1144, 1, 0, 0, 0, 1146, 1147, 1, 0, 0, 0, 1147, 1159, 1, 0, 0, 0, 1148, 1146, 1, 0, 0, 0, 1149, 1152, 3, 90, 37, 0, 1150, 1152, 3, 84, 34, 0, 1151, 1149, 1, 0, 0, 0, 1151, 1150, 1, 0, 0, 0, 1152, 1154, 1, 0, 0, 0, 1153, 1155, 3, 244, 114, 0, 1154, 1153, 1, 0, 0, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1154, 1, 0, 0, 0, 1156, 1157, 1, 0, 0, 0, 1157, 1159, 1, 0, 0, 0, 1158, 1141, 1, 0, 0, 0, 1158, 1151, 1, 0, 0, 0, 1159, 247, 1, 0, 0, 0, 1160, 1163, 3, 246, 115, 0, 1161, 1163, 3, 186, 85, 0, 1162, 1160, 1, 0, 0, 0, 1162, 1161, 1, 0, 0, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1162, 1, 0, 0, 0, 1164, 1165, 1, 0, 0, 0, 1165, 249, 1, 0, 0, 0, 1166, 1167, 3, 66, 25, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1169, 6, 117, 11, 0, 1169, 251, 1, 0, 0, 0, 1170, 1171, 3, 68, 26, 0, 1171, 1172, 1, 0, 0, 0, 1172, 1173, 6, 118, 11, 0, 1173, 253, 1, 0, 0, 0, 1174, 1175, 3, 70, 27, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 119, 11, 0, 1177, 255, 1, 0, 0, 0, 1178, 1179, 3, 72, 28, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 120, 16, 0, 1181, 1182, 6, 120, 12, 0, 1182, 257, 1, 0, 0, 0, 1183, 1184, 3, 106, 45, 0, 1184, 1185, 1, 0, 0, 0, 1185, 1186, 6, 121, 20, 0, 1186, 259, 1, 0, 0, 0, 1187, 1188, 3, 112, 48, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1190, 6, 122, 19, 0, 1190, 261, 1, 0, 0, 0, 1191, 1192, 3, 116, 50, 0, 1192, 1193, 1, 0, 0, 0, 1193, 1194, 6, 123, 23, 0, 1194, 263, 1, 0, 0, 0, 1195, 1196, 4, 124, 12, 0, 1196, 1197, 3, 140, 62, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 124, 24, 0, 1199, 265, 1, 0, 0, 0, 1200, 1201, 4, 125, 13, 0, 1201, 1202, 3, 178, 81, 0, 1202, 1203, 1, 0, 0, 0, 1203, 1204, 6, 125, 25, 0, 1204, 267, 1, 0, 0, 0, 1205, 1206, 7, 12, 0, 0, 1206, 1207, 7, 2, 0, 0, 1207, 269, 1, 0, 0, 0, 1208, 1209, 3, 248, 116, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 6, 127, 26, 0, 1211, 271, 1, 0, 0, 0, 1212, 1213, 3, 66, 25, 0, 1213, 1214, 1, 0, 0, 0, 1214, 1215, 6, 128, 11, 0, 1215, 273, 1, 0, 0, 0, 1216, 1217, 3, 68, 26, 0, 1217, 1218, 1, 0, 0, 0, 1218, 1219, 6, 129, 11, 0, 1219, 275, 1, 0, 0, 0, 1220, 1221, 3, 70, 27, 0, 1221, 1222, 1, 0, 0, 0, 1222, 1223, 6, 130, 11, 0, 1223, 277, 1, 0, 0, 0, 1224, 1225, 3, 72, 28, 0, 1225, 1226, 1, 0, 0, 0, 1226, 1227, 6, 131, 16, 0, 1227, 1228, 6, 131, 12, 0, 1228, 279, 1, 0, 0, 0, 1229, 1230, 3, 180, 82, 0, 1230, 1231, 1, 0, 0, 0, 1231, 1232, 6, 132, 14, 0, 1232, 1233, 6, 132, 27, 0, 1233, 281, 1, 0, 0, 0, 1234, 1235, 7, 7, 0, 0, 1235, 1236, 7, 9, 0, 0, 1236, 1237, 1, 0, 0, 0, 1237, 1238, 6, 133, 28, 0, 1238, 283, 1, 0, 0, 0, 1239, 1240, 7, 19, 0, 0, 1240, 1241, 7, 1, 0, 0, 1241, 1242, 7, 5, 0, 0, 1242, 1243, 7, 10, 0, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1245, 6, 134, 28, 0, 1245, 285, 1, 0, 0, 0, 1246, 1247, 8, 35, 0, 0, 1247, 287, 1, 0, 0, 0, 1248, 1250, 3, 286, 135, 0, 1249, 1248, 1, 0, 0, 0, 1250, 1251, 1, 0, 0, 0, 1251, 1249, 1, 0, 0, 0, 1251, 1252, 1, 0, 0, 0, 1252, 1253, 1, 0, 0, 0, 1253, 1254, 3, 110, 47, 0, 1254, 1256, 1, 0, 0, 0, 1255, 1249, 1, 0, 0, 0, 1255, 1256, 1, 0, 0, 0, 1256, 1258, 1, 0, 0, 0, 1257, 1259, 3, 286, 135, 0, 1258, 1257, 1, 0, 0, 0, 1259, 1260, 1, 0, 0, 0, 1260, 1258, 1, 0, 0, 0, 1260, 1261, 1, 0, 0, 0, 1261, 289, 1, 0, 0, 0, 1262, 1263, 3, 288, 136, 0, 1263, 1264, 1, 0, 0, 0, 1264, 1265, 6, 137, 29, 0, 1265, 291, 1, 0, 0, 0, 1266, 1267, 3, 66, 25, 0, 1267, 1268, 1, 0, 0, 0, 1268, 1269, 6, 138, 11, 0, 1269, 293, 1, 0, 0, 0, 1270, 1271, 3, 68, 26, 0, 1271, 1272, 1, 0, 0, 0, 1272, 1273, 6, 139, 11, 0, 1273, 295, 1, 0, 0, 0, 1274, 1275, 3, 70, 27, 0, 1275, 1276, 1, 0, 0, 0, 1276, 1277, 6, 140, 11, 0, 1277, 297, 1, 0, 0, 0, 1278, 1279, 3, 72, 28, 0, 1279, 1280, 1, 0, 0, 0, 1280, 1281, 6, 141, 16, 0, 1281, 1282, 6, 141, 12, 0, 1282, 1283, 6, 141, 12, 0, 1283, 299, 1, 0, 0, 0, 1284, 1285, 3, 106, 45, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 142, 20, 0, 1287, 301, 1, 0, 0, 0, 1288, 1289, 3, 112, 48, 0, 1289, 1290, 1, 0, 0, 0, 1290, 1291, 6, 143, 19, 0, 1291, 303, 1, 0, 0, 0, 1292, 1293, 3, 116, 50, 0, 1293, 1294, 1, 0, 0, 0, 1294, 1295, 6, 144, 23, 0, 1295, 305, 1, 0, 0, 0, 1296, 1297, 3, 284, 134, 0, 1297, 1298, 1, 0, 0, 0, 1298, 1299, 6, 145, 30, 0, 1299, 307, 1, 0, 0, 0, 1300, 1301, 3, 248, 116, 0, 1301, 1302, 1, 0, 0, 0, 1302, 1303, 6, 146, 26, 0, 1303, 309, 1, 0, 0, 0, 1304, 1305, 3, 188, 86, 0, 1305, 1306, 1, 0, 0, 0, 1306, 1307, 6, 147, 31, 0, 1307, 311, 1, 0, 0, 0, 1308, 1309, 4, 148, 14, 0, 1309, 1310, 3, 140, 62, 0, 1310, 1311, 1, 0, 0, 0, 1311, 1312, 6, 148, 24, 0, 1312, 313, 1, 0, 0, 0, 1313, 1314, 4, 149, 15, 0, 1314, 1315, 3, 178, 81, 0, 1315, 1316, 1, 0, 0, 0, 1316, 1317, 6, 149, 25, 0, 1317, 315, 1, 0, 0, 0, 1318, 1319, 3, 66, 25, 0, 1319, 1320, 1, 0, 0, 0, 1320, 1321, 6, 150, 11, 0, 1321, 317, 1, 0, 0, 0, 1322, 1323, 3, 68, 26, 0, 1323, 1324, 1, 0, 0, 0, 1324, 1325, 6, 151, 11, 0, 1325, 319, 1, 0, 0, 0, 1326, 1327, 3, 70, 27, 0, 1327, 1328, 1, 0, 0, 0, 1328, 1329, 6, 152, 11, 0, 1329, 321, 1, 0, 0, 0, 1330, 1331, 3, 72, 28, 0, 1331, 1332, 1, 0, 0, 0, 1332, 1333, 6, 153, 16, 0, 1333, 1334, 6, 153, 12, 0, 1334, 323, 1, 0, 0, 0, 1335, 1336, 3, 116, 50, 0, 1336, 1337, 1, 0, 0, 0, 1337, 1338, 6, 154, 23, 0, 1338, 325, 1, 0, 0, 0, 1339, 1340, 4, 155, 16, 0, 1340, 1341, 3, 140, 62, 0, 1341, 1342, 1, 0, 0, 0, 1342, 1343, 6, 155, 24, 0, 1343, 327, 1, 0, 0, 0, 1344, 1345, 4, 156, 17, 0, 1345, 1346, 3, 178, 81, 0, 1346, 1347, 1, 0, 0, 0, 1347, 1348, 6, 156, 25, 0, 1348, 329, 1, 0, 0, 0, 1349, 1350, 3, 188, 86, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 157, 31, 0, 1352, 331, 1, 0, 0, 0, 1353, 1354, 3, 184, 84, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 158, 32, 0, 1356, 333, 1, 0, 0, 0, 1357, 1358, 3, 66, 25, 0, 1358, 1359, 1, 0, 0, 0, 1359, 1360, 6, 159, 11, 0, 1360, 335, 1, 0, 0, 0, 1361, 1362, 3, 68, 26, 0, 1362, 1363, 1, 0, 0, 0, 1363, 1364, 6, 160, 11, 0, 1364, 337, 1, 0, 0, 0, 1365, 1366, 3, 70, 27, 0, 1366, 1367, 1, 0, 0, 0, 1367, 1368, 6, 161, 11, 0, 1368, 339, 1, 0, 0, 0, 1369, 1370, 3, 72, 28, 0, 1370, 1371, 1, 0, 0, 0, 1371, 1372, 6, 162, 16, 0, 1372, 1373, 6, 162, 12, 0, 1373, 341, 1, 0, 0, 0, 1374, 1375, 7, 1, 0, 0, 1375, 1376, 7, 9, 0, 0, 1376, 1377, 7, 15, 0, 0, 1377, 1378, 7, 7, 0, 0, 1378, 343, 1, 0, 0, 0, 1379, 1380, 3, 66, 25, 0, 1380, 1381, 1, 0, 0, 0, 1381, 1382, 6, 164, 11, 0, 1382, 345, 1, 0, 0, 0, 1383, 1384, 3, 68, 26, 0, 1384, 1385, 1, 0, 0, 0, 1385, 1386, 6, 165, 11, 0, 1386, 347, 1, 0, 0, 0, 1387, 1388, 3, 70, 27, 0, 1388, 1389, 1, 0, 0, 0, 1389, 1390, 6, 166, 11, 0, 1390, 349, 1, 0, 0, 0, 1391, 1392, 3, 182, 83, 0, 1392, 1393, 1, 0, 0, 0, 1393, 1394, 6, 167, 17, 0, 1394, 1395, 6, 167, 12, 0, 1395, 351, 1, 0, 0, 0, 1396, 1397, 3, 110, 47, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1399, 6, 168, 18, 0, 1399, 353, 1, 0, 0, 0, 1400, 1406, 3, 84, 34, 0, 1401, 1406, 3, 74, 29, 0, 1402, 1406, 3, 116, 50, 0, 1403, 1406, 3, 76, 30, 0, 1404, 1406, 3, 90, 37, 0, 1405, 1400, 1, 0, 0, 0, 1405, 1401, 1, 0, 0, 0, 1405, 1402, 1, 0, 0, 0, 1405, 1403, 1, 0, 0, 0, 1405, 1404, 1, 0, 0, 0, 1406, 1407, 1, 0, 0, 0, 1407, 1405, 1, 0, 0, 0, 1407, 1408, 1, 0, 0, 0, 1408, 355, 1, 0, 0, 0, 1409, 1410, 3, 66, 25, 0, 1410, 1411, 1, 0, 0, 0, 1411, 1412, 6, 170, 11, 0, 1412, 357, 1, 0, 0, 0, 1413, 1414, 3, 68, 26, 0, 1414, 1415, 1, 0, 0, 0, 1415, 1416, 6, 171, 11, 0, 1416, 359, 1, 0, 0, 0, 1417, 1418, 3, 70, 27, 0, 1418, 1419, 1, 0, 0, 0, 1419, 1420, 6, 172, 11, 0, 1420, 361, 1, 0, 0, 0, 1421, 1422, 3, 72, 28, 0, 1422, 1423, 1, 0, 0, 0, 1423, 1424, 6, 173, 16, 0, 1424, 1425, 6, 173, 12, 0, 1425, 363, 1, 0, 0, 0, 1426, 1427, 3, 110, 47, 0, 1427, 1428, 1, 0, 0, 0, 1428, 1429, 6, 174, 18, 0, 1429, 365, 1, 0, 0, 0, 1430, 1431, 3, 112, 48, 0, 1431, 1432, 1, 0, 0, 0, 1432, 1433, 6, 175, 19, 0, 1433, 367, 1, 0, 0, 0, 1434, 1435, 3, 116, 50, 0, 1435, 1436, 1, 0, 0, 0, 1436, 1437, 6, 176, 23, 0, 1437, 369, 1, 0, 0, 0, 1438, 1439, 3, 282, 133, 0, 1439, 1440, 1, 0, 0, 0, 1440, 1441, 6, 177, 33, 0, 1441, 1442, 6, 177, 34, 0, 1442, 371, 1, 0, 0, 0, 1443, 1444, 3, 222, 103, 0, 1444, 1445, 1, 0, 0, 0, 1445, 1446, 6, 178, 21, 0, 1446, 373, 1, 0, 0, 0, 1447, 1448, 3, 94, 39, 0, 1448, 1449, 1, 0, 0, 0, 1449, 1450, 6, 179, 22, 0, 1450, 375, 1, 0, 0, 0, 1451, 1452, 3, 66, 25, 0, 1452, 1453, 1, 0, 0, 0, 1453, 1454, 6, 180, 11, 0, 1454, 377, 1, 0, 0, 0, 1455, 1456, 3, 68, 26, 0, 1456, 1457, 1, 0, 0, 0, 1457, 1458, 6, 181, 11, 0, 1458, 379, 1, 0, 0, 0, 1459, 1460, 3, 70, 27, 0, 1460, 1461, 1, 0, 0, 0, 1461, 1462, 6, 182, 11, 0, 1462, 381, 1, 0, 0, 0, 1463, 1464, 3, 72, 28, 0, 1464, 1465, 1, 0, 0, 0, 1465, 1466, 6, 183, 16, 0, 1466, 1467, 6, 183, 12, 0, 1467, 1468, 6, 183, 12, 0, 1468, 383, 1, 0, 0, 0, 1469, 1470, 3, 112, 48, 0, 1470, 1471, 1, 0, 0, 0, 1471, 1472, 6, 184, 19, 0, 1472, 385, 1, 0, 0, 0, 1473, 1474, 3, 116, 50, 0, 1474, 1475, 1, 0, 0, 0, 1475, 1476, 6, 185, 23, 0, 1476, 387, 1, 0, 0, 0, 1477, 1478, 3, 248, 116, 0, 1478, 1479, 1, 0, 0, 0, 1479, 1480, 6, 186, 26, 0, 1480, 389, 1, 0, 0, 0, 1481, 1482, 3, 66, 25, 0, 1482, 1483, 1, 0, 0, 0, 1483, 1484, 6, 187, 11, 0, 1484, 391, 1, 0, 0, 0, 1485, 1486, 3, 68, 26, 0, 1486, 1487, 1, 0, 0, 0, 1487, 1488, 6, 188, 11, 0, 1488, 393, 1, 0, 0, 0, 1489, 1490, 3, 70, 27, 0, 1490, 1491, 1, 0, 0, 0, 1491, 1492, 6, 189, 11, 0, 1492, 395, 1, 0, 0, 0, 1493, 1494, 3, 72, 28, 0, 1494, 1495, 1, 0, 0, 0, 1495, 1496, 6, 190, 16, 0, 1496, 1497, 6, 190, 12, 0, 1497, 397, 1, 0, 0, 0, 1498, 1499, 3, 54, 19, 0, 1499, 1500, 1, 0, 0, 0, 1500, 1501, 6, 191, 35, 0, 1501, 399, 1, 0, 0, 0, 1502, 1503, 3, 268, 126, 0, 1503, 1504, 1, 0, 0, 0, 1504, 1505, 6, 192, 36, 0, 1505, 401, 1, 0, 0, 0, 1506, 1507, 3, 282, 133, 0, 1507, 1508, 1, 0, 0, 0, 1508, 1509, 6, 193, 33, 0, 1509, 1510, 6, 193, 12, 0, 1510, 1511, 6, 193, 0, 0, 1511, 403, 1, 0, 0, 0, 1512, 1513, 7, 20, 0, 0, 1513, 1514, 7, 2, 0, 0, 1514, 1515, 7, 1, 0, 0, 1515, 1516, 7, 9, 0, 0, 1516, 1517, 7, 17, 0, 0, 1517, 1518, 1, 0, 0, 0, 1518, 1519, 6, 194, 12, 0, 1519, 1520, 6, 194, 0, 0, 1520, 405, 1, 0, 0, 0, 1521, 1522, 3, 222, 103, 0, 1522, 1523, 1, 0, 0, 0, 1523, 1524, 6, 195, 21, 0, 1524, 407, 1, 0, 0, 0, 1525, 1526, 3, 94, 39, 0, 1526, 1527, 1, 0, 0, 0, 1527, 1528, 6, 196, 22, 0, 1528, 409, 1, 0, 0, 0, 1529, 1530, 3, 110, 47, 0, 1530, 1531, 1, 0, 0, 0, 1531, 1532, 6, 197, 18, 0, 1532, 411, 1, 0, 0, 0, 1533, 1534, 3, 184, 84, 0, 1534, 1535, 1, 0, 0, 0, 1535, 1536, 6, 198, 32, 0, 1536, 413, 1, 0, 0, 0, 1537, 1538, 3, 188, 86, 0, 1538, 1539, 1, 0, 0, 0, 1539, 1540, 6, 199, 31, 0, 1540, 415, 1, 0, 0, 0, 1541, 1542, 3, 66, 25, 0, 1542, 1543, 1, 0, 0, 0, 1543, 1544, 6, 200, 11, 0, 1544, 417, 1, 0, 0, 0, 1545, 1546, 3, 68, 26, 0, 1546, 1547, 1, 0, 0, 0, 1547, 1548, 6, 201, 11, 0, 1548, 419, 1, 0, 0, 0, 1549, 1550, 3, 70, 27, 0, 1550, 1551, 1, 0, 0, 0, 1551, 1552, 6, 202, 11, 0, 1552, 421, 1, 0, 0, 0, 1553, 1554, 3, 72, 28, 0, 1554, 1555, 1, 0, 0, 0, 1555, 1556, 6, 203, 16, 0, 1556, 1557, 6, 203, 12, 0, 1557, 423, 1, 0, 0, 0, 1558, 1559, 3, 222, 103, 0, 1559, 1560, 1, 0, 0, 0, 1560, 1561, 6, 204, 21, 0, 1561, 1562, 6, 204, 12, 0, 1562, 1563, 6, 204, 37, 0, 1563, 425, 1, 0, 0, 0, 1564, 1565, 3, 94, 39, 0, 1565, 1566, 1, 0, 0, 0, 1566, 1567, 6, 205, 22, 0, 1567, 1568, 6, 205, 12, 0, 1568, 1569, 6, 205, 37, 0, 1569, 427, 1, 0, 0, 0, 1570, 1571, 3, 66, 25, 0, 1571, 1572, 1, 0, 0, 0, 1572, 1573, 6, 206, 11, 0, 1573, 429, 1, 0, 0, 0, 1574, 1575, 3, 68, 26, 0, 1575, 1576, 1, 0, 0, 0, 1576, 1577, 6, 207, 11, 0, 1577, 431, 1, 0, 0, 0, 1578, 1579, 3, 70, 27, 0, 1579, 1580, 1, 0, 0, 0, 1580, 1581, 6, 208, 11, 0, 1581, 433, 1, 0, 0, 0, 1582, 1583, 3, 110, 47, 0, 1583, 1584, 1, 0, 0, 0, 1584, 1585, 6, 209, 18, 0, 1585, 1586, 6, 209, 12, 0, 1586, 1587, 6, 209, 9, 0, 1587, 435, 1, 0, 0, 0, 1588, 1589, 3, 112, 48, 0, 1589, 1590, 1, 0, 0, 0, 1590, 1591, 6, 210, 19, 0, 1591, 1592, 6, 210, 12, 0, 1592, 1593, 6, 210, 9, 0, 1593, 437, 1, 0, 0, 0, 1594, 1595, 3, 66, 25, 0, 1595, 1596, 1, 0, 0, 0, 1596, 1597, 6, 211, 11, 0, 1597, 439, 1, 0, 0, 0, 1598, 1599, 3, 68, 26, 0, 1599, 1600, 1, 0, 0, 0, 1600, 1601, 6, 212, 11, 0, 1601, 441, 1, 0, 0, 0, 1602, 1603, 3, 70, 27, 0, 1603, 1604, 1, 0, 0, 0, 1604, 1605, 6, 213, 11, 0, 1605, 443, 1, 0, 0, 0, 1606, 1607, 3, 188, 86, 0, 1607, 1608, 1, 0, 0, 0, 1608, 1609, 6, 214, 12, 0, 1609, 1610, 6, 214, 0, 0, 1610, 1611, 6, 214, 31, 0, 1611, 445, 1, 0, 0, 0, 1612, 1613, 3, 184, 84, 0, 1613, 1614, 1, 0, 0, 0, 1614, 1615, 6, 215, 12, 0, 1615, 1616, 6, 215, 0, 0, 1616, 1617, 6, 215, 32, 0, 1617, 447, 1, 0, 0, 0, 1618, 1619, 3, 100, 42, 0, 1619, 1620, 1, 0, 0, 0, 1620, 1621, 6, 216, 12, 0, 1621, 1622, 6, 216, 0, 0, 1622, 1623, 6, 216, 38, 0, 1623, 449, 1, 0, 0, 0, 1624, 1625, 3, 72, 28, 0, 1625, 1626, 1, 0, 0, 0, 1626, 1627, 6, 217, 16, 0, 1627, 1628, 6, 217, 12, 0, 1628, 451, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 665, 675, 679, 682, 691, 693, 704, 723, 728, 737, 744, 749, 751, 762, 770, 773, 775, 780, 785, 791, 798, 803, 809, 812, 820, 824, 958, 963, 970, 972, 988, 993, 998, 1000, 1006, 1083, 1088, 1137, 1141, 1146, 1151, 1156, 1158, 1162, 1164, 1251, 1255, 1260, 1405, 1407, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 72, 0, 5, 0, 0, 7, 29, 0, 7, 73, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 83, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 71, 0, 7, 87, 0, 5, 10, 0, 5, 7, 0, 7, 97, 0, 7, 96, 0, 7, 75, 0, 7, 74, 0, 7, 95, 0, 5, 12, 0, 7, 20, 0, 7, 91, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file +[4, 0, 130, 1627, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 2, 214, 7, 214, 2, 215, 7, 215, 2, 216, 7, 216, 2, 217, 7, 217, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 664, 8, 24, 11, 24, 12, 24, 665, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 674, 8, 25, 10, 25, 12, 25, 677, 9, 25, 1, 25, 3, 25, 680, 8, 25, 1, 25, 3, 25, 683, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 692, 8, 26, 10, 26, 12, 26, 695, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 703, 8, 27, 11, 27, 12, 27, 704, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 724, 8, 33, 1, 33, 4, 33, 727, 8, 33, 11, 33, 12, 33, 728, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 738, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 745, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 750, 8, 39, 10, 39, 12, 39, 753, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 761, 8, 39, 10, 39, 12, 39, 764, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 771, 8, 39, 1, 39, 3, 39, 774, 8, 39, 3, 39, 776, 8, 39, 1, 40, 4, 40, 779, 8, 40, 11, 40, 12, 40, 780, 1, 41, 4, 41, 784, 8, 41, 11, 41, 12, 41, 785, 1, 41, 1, 41, 5, 41, 790, 8, 41, 10, 41, 12, 41, 793, 9, 41, 1, 41, 1, 41, 4, 41, 797, 8, 41, 11, 41, 12, 41, 798, 1, 41, 4, 41, 802, 8, 41, 11, 41, 12, 41, 803, 1, 41, 1, 41, 5, 41, 808, 8, 41, 10, 41, 12, 41, 811, 9, 41, 3, 41, 813, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 819, 8, 41, 11, 41, 12, 41, 820, 1, 41, 1, 41, 3, 41, 825, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 3, 81, 957, 8, 81, 1, 81, 5, 81, 960, 8, 81, 10, 81, 12, 81, 963, 9, 81, 1, 81, 1, 81, 4, 81, 967, 8, 81, 11, 81, 12, 81, 968, 3, 81, 971, 8, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 5, 84, 985, 8, 84, 10, 84, 12, 84, 988, 9, 84, 1, 84, 1, 84, 3, 84, 992, 8, 84, 1, 84, 4, 84, 995, 8, 84, 11, 84, 12, 84, 996, 3, 84, 999, 8, 84, 1, 85, 1, 85, 4, 85, 1003, 8, 85, 11, 85, 12, 85, 1004, 1, 85, 1, 85, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 3, 102, 1082, 8, 102, 1, 103, 4, 103, 1085, 8, 103, 11, 103, 12, 103, 1086, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 3, 114, 1136, 8, 114, 1, 115, 1, 115, 3, 115, 1140, 8, 115, 1, 115, 5, 115, 1143, 8, 115, 10, 115, 12, 115, 1146, 9, 115, 1, 115, 1, 115, 3, 115, 1150, 8, 115, 1, 115, 4, 115, 1153, 8, 115, 11, 115, 12, 115, 1154, 3, 115, 1157, 8, 115, 1, 116, 1, 116, 4, 116, 1161, 8, 116, 11, 116, 12, 116, 1162, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 136, 4, 136, 1248, 8, 136, 11, 136, 12, 136, 1249, 1, 136, 1, 136, 3, 136, 1254, 8, 136, 1, 136, 4, 136, 1257, 8, 136, 11, 136, 12, 136, 1258, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 4, 169, 1404, 8, 169, 11, 169, 12, 169, 1405, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 217, 1, 217, 1, 217, 1, 217, 1, 217, 2, 693, 762, 0, 218, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 69, 174, 70, 176, 0, 178, 71, 180, 72, 182, 73, 184, 74, 186, 0, 188, 75, 190, 76, 192, 77, 194, 78, 196, 0, 198, 0, 200, 79, 202, 80, 204, 81, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 0, 218, 82, 220, 0, 222, 83, 224, 0, 226, 0, 228, 84, 230, 85, 232, 86, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 0, 246, 0, 248, 87, 250, 88, 252, 89, 254, 90, 256, 0, 258, 0, 260, 0, 262, 0, 264, 0, 266, 0, 268, 91, 270, 0, 272, 92, 274, 93, 276, 94, 278, 0, 280, 0, 282, 95, 284, 96, 286, 0, 288, 97, 290, 0, 292, 98, 294, 99, 296, 100, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 0, 314, 0, 316, 101, 318, 102, 320, 103, 322, 0, 324, 0, 326, 0, 328, 0, 330, 0, 332, 0, 334, 104, 336, 105, 338, 106, 340, 0, 342, 107, 344, 108, 346, 109, 348, 110, 350, 0, 352, 0, 354, 111, 356, 112, 358, 113, 360, 114, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 0, 374, 0, 376, 115, 378, 116, 380, 117, 382, 0, 384, 0, 386, 0, 388, 0, 390, 118, 392, 119, 394, 120, 396, 0, 398, 0, 400, 0, 402, 0, 404, 121, 406, 0, 408, 0, 410, 0, 412, 0, 414, 0, 416, 122, 418, 123, 420, 124, 422, 0, 424, 0, 426, 0, 428, 125, 430, 126, 432, 127, 434, 0, 436, 0, 438, 128, 440, 129, 442, 130, 444, 0, 446, 0, 448, 0, 450, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1654, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 182, 1, 0, 0, 0, 1, 184, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 1, 192, 1, 0, 0, 0, 1, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 2, 202, 1, 0, 0, 0, 2, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 3, 230, 1, 0, 0, 0, 3, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 240, 1, 0, 0, 0, 4, 242, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 4, 252, 1, 0, 0, 0, 4, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 5, 274, 1, 0, 0, 0, 5, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 282, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 6, 294, 1, 0, 0, 0, 6, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 7, 318, 1, 0, 0, 0, 7, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 8, 336, 1, 0, 0, 0, 8, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 9, 346, 1, 0, 0, 0, 9, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 10, 358, 1, 0, 0, 0, 10, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 11, 378, 1, 0, 0, 0, 11, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 12, 392, 1, 0, 0, 0, 12, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 13, 412, 1, 0, 0, 0, 13, 414, 1, 0, 0, 0, 13, 416, 1, 0, 0, 0, 13, 418, 1, 0, 0, 0, 13, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 14, 424, 1, 0, 0, 0, 14, 426, 1, 0, 0, 0, 14, 428, 1, 0, 0, 0, 14, 430, 1, 0, 0, 0, 14, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 15, 442, 1, 0, 0, 0, 15, 444, 1, 0, 0, 0, 15, 446, 1, 0, 0, 0, 15, 448, 1, 0, 0, 0, 15, 450, 1, 0, 0, 0, 16, 452, 1, 0, 0, 0, 18, 462, 1, 0, 0, 0, 20, 469, 1, 0, 0, 0, 22, 478, 1, 0, 0, 0, 24, 485, 1, 0, 0, 0, 26, 495, 1, 0, 0, 0, 28, 502, 1, 0, 0, 0, 30, 509, 1, 0, 0, 0, 32, 516, 1, 0, 0, 0, 34, 524, 1, 0, 0, 0, 36, 536, 1, 0, 0, 0, 38, 545, 1, 0, 0, 0, 40, 551, 1, 0, 0, 0, 42, 558, 1, 0, 0, 0, 44, 565, 1, 0, 0, 0, 46, 573, 1, 0, 0, 0, 48, 581, 1, 0, 0, 0, 50, 596, 1, 0, 0, 0, 52, 608, 1, 0, 0, 0, 54, 619, 1, 0, 0, 0, 56, 627, 1, 0, 0, 0, 58, 635, 1, 0, 0, 0, 60, 643, 1, 0, 0, 0, 62, 652, 1, 0, 0, 0, 64, 663, 1, 0, 0, 0, 66, 669, 1, 0, 0, 0, 68, 686, 1, 0, 0, 0, 70, 702, 1, 0, 0, 0, 72, 708, 1, 0, 0, 0, 74, 712, 1, 0, 0, 0, 76, 714, 1, 0, 0, 0, 78, 716, 1, 0, 0, 0, 80, 719, 1, 0, 0, 0, 82, 721, 1, 0, 0, 0, 84, 730, 1, 0, 0, 0, 86, 732, 1, 0, 0, 0, 88, 737, 1, 0, 0, 0, 90, 739, 1, 0, 0, 0, 92, 744, 1, 0, 0, 0, 94, 775, 1, 0, 0, 0, 96, 778, 1, 0, 0, 0, 98, 824, 1, 0, 0, 0, 100, 826, 1, 0, 0, 0, 102, 829, 1, 0, 0, 0, 104, 833, 1, 0, 0, 0, 106, 837, 1, 0, 0, 0, 108, 839, 1, 0, 0, 0, 110, 842, 1, 0, 0, 0, 112, 844, 1, 0, 0, 0, 114, 846, 1, 0, 0, 0, 116, 851, 1, 0, 0, 0, 118, 853, 1, 0, 0, 0, 120, 859, 1, 0, 0, 0, 122, 865, 1, 0, 0, 0, 124, 868, 1, 0, 0, 0, 126, 871, 1, 0, 0, 0, 128, 876, 1, 0, 0, 0, 130, 881, 1, 0, 0, 0, 132, 883, 1, 0, 0, 0, 134, 887, 1, 0, 0, 0, 136, 892, 1, 0, 0, 0, 138, 898, 1, 0, 0, 0, 140, 901, 1, 0, 0, 0, 142, 903, 1, 0, 0, 0, 144, 909, 1, 0, 0, 0, 146, 911, 1, 0, 0, 0, 148, 916, 1, 0, 0, 0, 150, 919, 1, 0, 0, 0, 152, 922, 1, 0, 0, 0, 154, 925, 1, 0, 0, 0, 156, 927, 1, 0, 0, 0, 158, 930, 1, 0, 0, 0, 160, 932, 1, 0, 0, 0, 162, 935, 1, 0, 0, 0, 164, 937, 1, 0, 0, 0, 166, 939, 1, 0, 0, 0, 168, 941, 1, 0, 0, 0, 170, 943, 1, 0, 0, 0, 172, 945, 1, 0, 0, 0, 174, 947, 1, 0, 0, 0, 176, 949, 1, 0, 0, 0, 178, 970, 1, 0, 0, 0, 180, 972, 1, 0, 0, 0, 182, 977, 1, 0, 0, 0, 184, 998, 1, 0, 0, 0, 186, 1000, 1, 0, 0, 0, 188, 1008, 1, 0, 0, 0, 190, 1010, 1, 0, 0, 0, 192, 1014, 1, 0, 0, 0, 194, 1018, 1, 0, 0, 0, 196, 1022, 1, 0, 0, 0, 198, 1027, 1, 0, 0, 0, 200, 1032, 1, 0, 0, 0, 202, 1036, 1, 0, 0, 0, 204, 1040, 1, 0, 0, 0, 206, 1044, 1, 0, 0, 0, 208, 1049, 1, 0, 0, 0, 210, 1053, 1, 0, 0, 0, 212, 1057, 1, 0, 0, 0, 214, 1061, 1, 0, 0, 0, 216, 1065, 1, 0, 0, 0, 218, 1069, 1, 0, 0, 0, 220, 1081, 1, 0, 0, 0, 222, 1084, 1, 0, 0, 0, 224, 1088, 1, 0, 0, 0, 226, 1092, 1, 0, 0, 0, 228, 1096, 1, 0, 0, 0, 230, 1100, 1, 0, 0, 0, 232, 1104, 1, 0, 0, 0, 234, 1108, 1, 0, 0, 0, 236, 1113, 1, 0, 0, 0, 238, 1117, 1, 0, 0, 0, 240, 1121, 1, 0, 0, 0, 242, 1126, 1, 0, 0, 0, 244, 1135, 1, 0, 0, 0, 246, 1156, 1, 0, 0, 0, 248, 1160, 1, 0, 0, 0, 250, 1164, 1, 0, 0, 0, 252, 1168, 1, 0, 0, 0, 254, 1172, 1, 0, 0, 0, 256, 1176, 1, 0, 0, 0, 258, 1181, 1, 0, 0, 0, 260, 1185, 1, 0, 0, 0, 262, 1189, 1, 0, 0, 0, 264, 1193, 1, 0, 0, 0, 266, 1198, 1, 0, 0, 0, 268, 1203, 1, 0, 0, 0, 270, 1206, 1, 0, 0, 0, 272, 1210, 1, 0, 0, 0, 274, 1214, 1, 0, 0, 0, 276, 1218, 1, 0, 0, 0, 278, 1222, 1, 0, 0, 0, 280, 1227, 1, 0, 0, 0, 282, 1232, 1, 0, 0, 0, 284, 1237, 1, 0, 0, 0, 286, 1244, 1, 0, 0, 0, 288, 1253, 1, 0, 0, 0, 290, 1260, 1, 0, 0, 0, 292, 1264, 1, 0, 0, 0, 294, 1268, 1, 0, 0, 0, 296, 1272, 1, 0, 0, 0, 298, 1276, 1, 0, 0, 0, 300, 1282, 1, 0, 0, 0, 302, 1286, 1, 0, 0, 0, 304, 1290, 1, 0, 0, 0, 306, 1294, 1, 0, 0, 0, 308, 1298, 1, 0, 0, 0, 310, 1302, 1, 0, 0, 0, 312, 1306, 1, 0, 0, 0, 314, 1311, 1, 0, 0, 0, 316, 1316, 1, 0, 0, 0, 318, 1320, 1, 0, 0, 0, 320, 1324, 1, 0, 0, 0, 322, 1328, 1, 0, 0, 0, 324, 1333, 1, 0, 0, 0, 326, 1337, 1, 0, 0, 0, 328, 1342, 1, 0, 0, 0, 330, 1347, 1, 0, 0, 0, 332, 1351, 1, 0, 0, 0, 334, 1355, 1, 0, 0, 0, 336, 1359, 1, 0, 0, 0, 338, 1363, 1, 0, 0, 0, 340, 1367, 1, 0, 0, 0, 342, 1372, 1, 0, 0, 0, 344, 1377, 1, 0, 0, 0, 346, 1381, 1, 0, 0, 0, 348, 1385, 1, 0, 0, 0, 350, 1389, 1, 0, 0, 0, 352, 1394, 1, 0, 0, 0, 354, 1403, 1, 0, 0, 0, 356, 1407, 1, 0, 0, 0, 358, 1411, 1, 0, 0, 0, 360, 1415, 1, 0, 0, 0, 362, 1419, 1, 0, 0, 0, 364, 1424, 1, 0, 0, 0, 366, 1428, 1, 0, 0, 0, 368, 1432, 1, 0, 0, 0, 370, 1436, 1, 0, 0, 0, 372, 1441, 1, 0, 0, 0, 374, 1445, 1, 0, 0, 0, 376, 1449, 1, 0, 0, 0, 378, 1453, 1, 0, 0, 0, 380, 1457, 1, 0, 0, 0, 382, 1461, 1, 0, 0, 0, 384, 1467, 1, 0, 0, 0, 386, 1471, 1, 0, 0, 0, 388, 1475, 1, 0, 0, 0, 390, 1479, 1, 0, 0, 0, 392, 1483, 1, 0, 0, 0, 394, 1487, 1, 0, 0, 0, 396, 1491, 1, 0, 0, 0, 398, 1496, 1, 0, 0, 0, 400, 1500, 1, 0, 0, 0, 402, 1504, 1, 0, 0, 0, 404, 1510, 1, 0, 0, 0, 406, 1519, 1, 0, 0, 0, 408, 1523, 1, 0, 0, 0, 410, 1527, 1, 0, 0, 0, 412, 1531, 1, 0, 0, 0, 414, 1535, 1, 0, 0, 0, 416, 1539, 1, 0, 0, 0, 418, 1543, 1, 0, 0, 0, 420, 1547, 1, 0, 0, 0, 422, 1551, 1, 0, 0, 0, 424, 1556, 1, 0, 0, 0, 426, 1562, 1, 0, 0, 0, 428, 1568, 1, 0, 0, 0, 430, 1572, 1, 0, 0, 0, 432, 1576, 1, 0, 0, 0, 434, 1580, 1, 0, 0, 0, 436, 1586, 1, 0, 0, 0, 438, 1592, 1, 0, 0, 0, 440, 1596, 1, 0, 0, 0, 442, 1600, 1, 0, 0, 0, 444, 1604, 1, 0, 0, 0, 446, 1610, 1, 0, 0, 0, 448, 1616, 1, 0, 0, 0, 450, 1622, 1, 0, 0, 0, 452, 453, 7, 0, 0, 0, 453, 454, 7, 1, 0, 0, 454, 455, 7, 2, 0, 0, 455, 456, 7, 2, 0, 0, 456, 457, 7, 3, 0, 0, 457, 458, 7, 4, 0, 0, 458, 459, 7, 5, 0, 0, 459, 460, 1, 0, 0, 0, 460, 461, 6, 0, 0, 0, 461, 17, 1, 0, 0, 0, 462, 463, 7, 0, 0, 0, 463, 464, 7, 6, 0, 0, 464, 465, 7, 7, 0, 0, 465, 466, 7, 8, 0, 0, 466, 467, 1, 0, 0, 0, 467, 468, 6, 1, 1, 0, 468, 19, 1, 0, 0, 0, 469, 470, 7, 3, 0, 0, 470, 471, 7, 9, 0, 0, 471, 472, 7, 6, 0, 0, 472, 473, 7, 1, 0, 0, 473, 474, 7, 4, 0, 0, 474, 475, 7, 10, 0, 0, 475, 476, 1, 0, 0, 0, 476, 477, 6, 2, 2, 0, 477, 21, 1, 0, 0, 0, 478, 479, 7, 3, 0, 0, 479, 480, 7, 11, 0, 0, 480, 481, 7, 12, 0, 0, 481, 482, 7, 13, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 3, 0, 0, 484, 23, 1, 0, 0, 0, 485, 486, 7, 3, 0, 0, 486, 487, 7, 14, 0, 0, 487, 488, 7, 8, 0, 0, 488, 489, 7, 13, 0, 0, 489, 490, 7, 12, 0, 0, 490, 491, 7, 1, 0, 0, 491, 492, 7, 9, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 4, 3, 0, 494, 25, 1, 0, 0, 0, 495, 496, 7, 15, 0, 0, 496, 497, 7, 6, 0, 0, 497, 498, 7, 7, 0, 0, 498, 499, 7, 16, 0, 0, 499, 500, 1, 0, 0, 0, 500, 501, 6, 5, 4, 0, 501, 27, 1, 0, 0, 0, 502, 503, 7, 17, 0, 0, 503, 504, 7, 6, 0, 0, 504, 505, 7, 7, 0, 0, 505, 506, 7, 18, 0, 0, 506, 507, 1, 0, 0, 0, 507, 508, 6, 6, 0, 0, 508, 29, 1, 0, 0, 0, 509, 510, 7, 18, 0, 0, 510, 511, 7, 3, 0, 0, 511, 512, 7, 3, 0, 0, 512, 513, 7, 8, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 6, 7, 1, 0, 515, 31, 1, 0, 0, 0, 516, 517, 7, 13, 0, 0, 517, 518, 7, 1, 0, 0, 518, 519, 7, 16, 0, 0, 519, 520, 7, 1, 0, 0, 520, 521, 7, 5, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 8, 0, 0, 523, 33, 1, 0, 0, 0, 524, 525, 7, 16, 0, 0, 525, 526, 7, 11, 0, 0, 526, 527, 5, 95, 0, 0, 527, 528, 7, 3, 0, 0, 528, 529, 7, 14, 0, 0, 529, 530, 7, 8, 0, 0, 530, 531, 7, 12, 0, 0, 531, 532, 7, 9, 0, 0, 532, 533, 7, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 535, 6, 9, 5, 0, 535, 35, 1, 0, 0, 0, 536, 537, 7, 6, 0, 0, 537, 538, 7, 3, 0, 0, 538, 539, 7, 9, 0, 0, 539, 540, 7, 12, 0, 0, 540, 541, 7, 16, 0, 0, 541, 542, 7, 3, 0, 0, 542, 543, 1, 0, 0, 0, 543, 544, 6, 10, 6, 0, 544, 37, 1, 0, 0, 0, 545, 546, 7, 6, 0, 0, 546, 547, 7, 7, 0, 0, 547, 548, 7, 19, 0, 0, 548, 549, 1, 0, 0, 0, 549, 550, 6, 11, 0, 0, 550, 39, 1, 0, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 7, 10, 0, 0, 553, 554, 7, 7, 0, 0, 554, 555, 7, 19, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 6, 12, 7, 0, 557, 41, 1, 0, 0, 0, 558, 559, 7, 2, 0, 0, 559, 560, 7, 7, 0, 0, 560, 561, 7, 6, 0, 0, 561, 562, 7, 5, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 13, 0, 0, 564, 43, 1, 0, 0, 0, 565, 566, 7, 2, 0, 0, 566, 567, 7, 5, 0, 0, 567, 568, 7, 12, 0, 0, 568, 569, 7, 5, 0, 0, 569, 570, 7, 2, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 6, 14, 0, 0, 572, 45, 1, 0, 0, 0, 573, 574, 7, 19, 0, 0, 574, 575, 7, 10, 0, 0, 575, 576, 7, 3, 0, 0, 576, 577, 7, 6, 0, 0, 577, 578, 7, 3, 0, 0, 578, 579, 1, 0, 0, 0, 579, 580, 6, 15, 0, 0, 580, 47, 1, 0, 0, 0, 581, 582, 4, 16, 0, 0, 582, 583, 7, 1, 0, 0, 583, 584, 7, 9, 0, 0, 584, 585, 7, 13, 0, 0, 585, 586, 7, 1, 0, 0, 586, 587, 7, 9, 0, 0, 587, 588, 7, 3, 0, 0, 588, 589, 7, 2, 0, 0, 589, 590, 7, 5, 0, 0, 590, 591, 7, 12, 0, 0, 591, 592, 7, 5, 0, 0, 592, 593, 7, 2, 0, 0, 593, 594, 1, 0, 0, 0, 594, 595, 6, 16, 0, 0, 595, 49, 1, 0, 0, 0, 596, 597, 4, 17, 1, 0, 597, 598, 7, 13, 0, 0, 598, 599, 7, 7, 0, 0, 599, 600, 7, 7, 0, 0, 600, 601, 7, 18, 0, 0, 601, 602, 7, 20, 0, 0, 602, 603, 7, 8, 0, 0, 603, 604, 5, 95, 0, 0, 604, 605, 5, 128020, 0, 0, 605, 606, 1, 0, 0, 0, 606, 607, 6, 17, 8, 0, 607, 51, 1, 0, 0, 0, 608, 609, 4, 18, 2, 0, 609, 610, 7, 16, 0, 0, 610, 611, 7, 3, 0, 0, 611, 612, 7, 5, 0, 0, 612, 613, 7, 6, 0, 0, 613, 614, 7, 1, 0, 0, 614, 615, 7, 4, 0, 0, 615, 616, 7, 2, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 6, 18, 9, 0, 618, 53, 1, 0, 0, 0, 619, 620, 4, 19, 3, 0, 620, 621, 7, 21, 0, 0, 621, 622, 7, 7, 0, 0, 622, 623, 7, 1, 0, 0, 623, 624, 7, 9, 0, 0, 624, 625, 1, 0, 0, 0, 625, 626, 6, 19, 10, 0, 626, 55, 1, 0, 0, 0, 627, 628, 4, 20, 4, 0, 628, 629, 7, 15, 0, 0, 629, 630, 7, 20, 0, 0, 630, 631, 7, 13, 0, 0, 631, 632, 7, 13, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 6, 20, 10, 0, 634, 57, 1, 0, 0, 0, 635, 636, 4, 21, 5, 0, 636, 637, 7, 13, 0, 0, 637, 638, 7, 3, 0, 0, 638, 639, 7, 15, 0, 0, 639, 640, 7, 5, 0, 0, 640, 641, 1, 0, 0, 0, 641, 642, 6, 21, 10, 0, 642, 59, 1, 0, 0, 0, 643, 644, 4, 22, 6, 0, 644, 645, 7, 6, 0, 0, 645, 646, 7, 1, 0, 0, 646, 647, 7, 17, 0, 0, 647, 648, 7, 10, 0, 0, 648, 649, 7, 5, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 6, 22, 10, 0, 651, 61, 1, 0, 0, 0, 652, 653, 4, 23, 7, 0, 653, 654, 7, 13, 0, 0, 654, 655, 7, 7, 0, 0, 655, 656, 7, 7, 0, 0, 656, 657, 7, 18, 0, 0, 657, 658, 7, 20, 0, 0, 658, 659, 7, 8, 0, 0, 659, 660, 1, 0, 0, 0, 660, 661, 6, 23, 10, 0, 661, 63, 1, 0, 0, 0, 662, 664, 8, 22, 0, 0, 663, 662, 1, 0, 0, 0, 664, 665, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 667, 1, 0, 0, 0, 667, 668, 6, 24, 0, 0, 668, 65, 1, 0, 0, 0, 669, 670, 5, 47, 0, 0, 670, 671, 5, 47, 0, 0, 671, 675, 1, 0, 0, 0, 672, 674, 8, 23, 0, 0, 673, 672, 1, 0, 0, 0, 674, 677, 1, 0, 0, 0, 675, 673, 1, 0, 0, 0, 675, 676, 1, 0, 0, 0, 676, 679, 1, 0, 0, 0, 677, 675, 1, 0, 0, 0, 678, 680, 5, 13, 0, 0, 679, 678, 1, 0, 0, 0, 679, 680, 1, 0, 0, 0, 680, 682, 1, 0, 0, 0, 681, 683, 5, 10, 0, 0, 682, 681, 1, 0, 0, 0, 682, 683, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 684, 685, 6, 25, 11, 0, 685, 67, 1, 0, 0, 0, 686, 687, 5, 47, 0, 0, 687, 688, 5, 42, 0, 0, 688, 693, 1, 0, 0, 0, 689, 692, 3, 68, 26, 0, 690, 692, 9, 0, 0, 0, 691, 689, 1, 0, 0, 0, 691, 690, 1, 0, 0, 0, 692, 695, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 693, 691, 1, 0, 0, 0, 694, 696, 1, 0, 0, 0, 695, 693, 1, 0, 0, 0, 696, 697, 5, 42, 0, 0, 697, 698, 5, 47, 0, 0, 698, 699, 1, 0, 0, 0, 699, 700, 6, 26, 11, 0, 700, 69, 1, 0, 0, 0, 701, 703, 7, 24, 0, 0, 702, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 702, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 707, 6, 27, 11, 0, 707, 71, 1, 0, 0, 0, 708, 709, 5, 124, 0, 0, 709, 710, 1, 0, 0, 0, 710, 711, 6, 28, 12, 0, 711, 73, 1, 0, 0, 0, 712, 713, 7, 25, 0, 0, 713, 75, 1, 0, 0, 0, 714, 715, 7, 26, 0, 0, 715, 77, 1, 0, 0, 0, 716, 717, 5, 92, 0, 0, 717, 718, 7, 27, 0, 0, 718, 79, 1, 0, 0, 0, 719, 720, 8, 28, 0, 0, 720, 81, 1, 0, 0, 0, 721, 723, 7, 3, 0, 0, 722, 724, 7, 29, 0, 0, 723, 722, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 726, 1, 0, 0, 0, 725, 727, 3, 74, 29, 0, 726, 725, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 726, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 83, 1, 0, 0, 0, 730, 731, 5, 64, 0, 0, 731, 85, 1, 0, 0, 0, 732, 733, 5, 96, 0, 0, 733, 87, 1, 0, 0, 0, 734, 738, 8, 30, 0, 0, 735, 736, 5, 96, 0, 0, 736, 738, 5, 96, 0, 0, 737, 734, 1, 0, 0, 0, 737, 735, 1, 0, 0, 0, 738, 89, 1, 0, 0, 0, 739, 740, 5, 95, 0, 0, 740, 91, 1, 0, 0, 0, 741, 745, 3, 76, 30, 0, 742, 745, 3, 74, 29, 0, 743, 745, 3, 90, 37, 0, 744, 741, 1, 0, 0, 0, 744, 742, 1, 0, 0, 0, 744, 743, 1, 0, 0, 0, 745, 93, 1, 0, 0, 0, 746, 751, 5, 34, 0, 0, 747, 750, 3, 78, 31, 0, 748, 750, 3, 80, 32, 0, 749, 747, 1, 0, 0, 0, 749, 748, 1, 0, 0, 0, 750, 753, 1, 0, 0, 0, 751, 749, 1, 0, 0, 0, 751, 752, 1, 0, 0, 0, 752, 754, 1, 0, 0, 0, 753, 751, 1, 0, 0, 0, 754, 776, 5, 34, 0, 0, 755, 756, 5, 34, 0, 0, 756, 757, 5, 34, 0, 0, 757, 758, 5, 34, 0, 0, 758, 762, 1, 0, 0, 0, 759, 761, 8, 23, 0, 0, 760, 759, 1, 0, 0, 0, 761, 764, 1, 0, 0, 0, 762, 763, 1, 0, 0, 0, 762, 760, 1, 0, 0, 0, 763, 765, 1, 0, 0, 0, 764, 762, 1, 0, 0, 0, 765, 766, 5, 34, 0, 0, 766, 767, 5, 34, 0, 0, 767, 768, 5, 34, 0, 0, 768, 770, 1, 0, 0, 0, 769, 771, 5, 34, 0, 0, 770, 769, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 773, 1, 0, 0, 0, 772, 774, 5, 34, 0, 0, 773, 772, 1, 0, 0, 0, 773, 774, 1, 0, 0, 0, 774, 776, 1, 0, 0, 0, 775, 746, 1, 0, 0, 0, 775, 755, 1, 0, 0, 0, 776, 95, 1, 0, 0, 0, 777, 779, 3, 74, 29, 0, 778, 777, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 778, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 97, 1, 0, 0, 0, 782, 784, 3, 74, 29, 0, 783, 782, 1, 0, 0, 0, 784, 785, 1, 0, 0, 0, 785, 783, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 787, 1, 0, 0, 0, 787, 791, 3, 116, 50, 0, 788, 790, 3, 74, 29, 0, 789, 788, 1, 0, 0, 0, 790, 793, 1, 0, 0, 0, 791, 789, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 825, 1, 0, 0, 0, 793, 791, 1, 0, 0, 0, 794, 796, 3, 116, 50, 0, 795, 797, 3, 74, 29, 0, 796, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 796, 1, 0, 0, 0, 798, 799, 1, 0, 0, 0, 799, 825, 1, 0, 0, 0, 800, 802, 3, 74, 29, 0, 801, 800, 1, 0, 0, 0, 802, 803, 1, 0, 0, 0, 803, 801, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 812, 1, 0, 0, 0, 805, 809, 3, 116, 50, 0, 806, 808, 3, 74, 29, 0, 807, 806, 1, 0, 0, 0, 808, 811, 1, 0, 0, 0, 809, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 813, 1, 0, 0, 0, 811, 809, 1, 0, 0, 0, 812, 805, 1, 0, 0, 0, 812, 813, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 815, 3, 82, 33, 0, 815, 825, 1, 0, 0, 0, 816, 818, 3, 116, 50, 0, 817, 819, 3, 74, 29, 0, 818, 817, 1, 0, 0, 0, 819, 820, 1, 0, 0, 0, 820, 818, 1, 0, 0, 0, 820, 821, 1, 0, 0, 0, 821, 822, 1, 0, 0, 0, 822, 823, 3, 82, 33, 0, 823, 825, 1, 0, 0, 0, 824, 783, 1, 0, 0, 0, 824, 794, 1, 0, 0, 0, 824, 801, 1, 0, 0, 0, 824, 816, 1, 0, 0, 0, 825, 99, 1, 0, 0, 0, 826, 827, 7, 31, 0, 0, 827, 828, 7, 32, 0, 0, 828, 101, 1, 0, 0, 0, 829, 830, 7, 12, 0, 0, 830, 831, 7, 9, 0, 0, 831, 832, 7, 0, 0, 0, 832, 103, 1, 0, 0, 0, 833, 834, 7, 12, 0, 0, 834, 835, 7, 2, 0, 0, 835, 836, 7, 4, 0, 0, 836, 105, 1, 0, 0, 0, 837, 838, 5, 61, 0, 0, 838, 107, 1, 0, 0, 0, 839, 840, 5, 58, 0, 0, 840, 841, 5, 58, 0, 0, 841, 109, 1, 0, 0, 0, 842, 843, 5, 58, 0, 0, 843, 111, 1, 0, 0, 0, 844, 845, 5, 44, 0, 0, 845, 113, 1, 0, 0, 0, 846, 847, 7, 0, 0, 0, 847, 848, 7, 3, 0, 0, 848, 849, 7, 2, 0, 0, 849, 850, 7, 4, 0, 0, 850, 115, 1, 0, 0, 0, 851, 852, 5, 46, 0, 0, 852, 117, 1, 0, 0, 0, 853, 854, 7, 15, 0, 0, 854, 855, 7, 12, 0, 0, 855, 856, 7, 13, 0, 0, 856, 857, 7, 2, 0, 0, 857, 858, 7, 3, 0, 0, 858, 119, 1, 0, 0, 0, 859, 860, 7, 15, 0, 0, 860, 861, 7, 1, 0, 0, 861, 862, 7, 6, 0, 0, 862, 863, 7, 2, 0, 0, 863, 864, 7, 5, 0, 0, 864, 121, 1, 0, 0, 0, 865, 866, 7, 1, 0, 0, 866, 867, 7, 9, 0, 0, 867, 123, 1, 0, 0, 0, 868, 869, 7, 1, 0, 0, 869, 870, 7, 2, 0, 0, 870, 125, 1, 0, 0, 0, 871, 872, 7, 13, 0, 0, 872, 873, 7, 12, 0, 0, 873, 874, 7, 2, 0, 0, 874, 875, 7, 5, 0, 0, 875, 127, 1, 0, 0, 0, 876, 877, 7, 13, 0, 0, 877, 878, 7, 1, 0, 0, 878, 879, 7, 18, 0, 0, 879, 880, 7, 3, 0, 0, 880, 129, 1, 0, 0, 0, 881, 882, 5, 40, 0, 0, 882, 131, 1, 0, 0, 0, 883, 884, 7, 9, 0, 0, 884, 885, 7, 7, 0, 0, 885, 886, 7, 5, 0, 0, 886, 133, 1, 0, 0, 0, 887, 888, 7, 9, 0, 0, 888, 889, 7, 20, 0, 0, 889, 890, 7, 13, 0, 0, 890, 891, 7, 13, 0, 0, 891, 135, 1, 0, 0, 0, 892, 893, 7, 9, 0, 0, 893, 894, 7, 20, 0, 0, 894, 895, 7, 13, 0, 0, 895, 896, 7, 13, 0, 0, 896, 897, 7, 2, 0, 0, 897, 137, 1, 0, 0, 0, 898, 899, 7, 7, 0, 0, 899, 900, 7, 6, 0, 0, 900, 139, 1, 0, 0, 0, 901, 902, 5, 63, 0, 0, 902, 141, 1, 0, 0, 0, 903, 904, 7, 6, 0, 0, 904, 905, 7, 13, 0, 0, 905, 906, 7, 1, 0, 0, 906, 907, 7, 18, 0, 0, 907, 908, 7, 3, 0, 0, 908, 143, 1, 0, 0, 0, 909, 910, 5, 41, 0, 0, 910, 145, 1, 0, 0, 0, 911, 912, 7, 5, 0, 0, 912, 913, 7, 6, 0, 0, 913, 914, 7, 20, 0, 0, 914, 915, 7, 3, 0, 0, 915, 147, 1, 0, 0, 0, 916, 917, 5, 61, 0, 0, 917, 918, 5, 61, 0, 0, 918, 149, 1, 0, 0, 0, 919, 920, 5, 61, 0, 0, 920, 921, 5, 126, 0, 0, 921, 151, 1, 0, 0, 0, 922, 923, 5, 33, 0, 0, 923, 924, 5, 61, 0, 0, 924, 153, 1, 0, 0, 0, 925, 926, 5, 60, 0, 0, 926, 155, 1, 0, 0, 0, 927, 928, 5, 60, 0, 0, 928, 929, 5, 61, 0, 0, 929, 157, 1, 0, 0, 0, 930, 931, 5, 62, 0, 0, 931, 159, 1, 0, 0, 0, 932, 933, 5, 62, 0, 0, 933, 934, 5, 61, 0, 0, 934, 161, 1, 0, 0, 0, 935, 936, 5, 43, 0, 0, 936, 163, 1, 0, 0, 0, 937, 938, 5, 45, 0, 0, 938, 165, 1, 0, 0, 0, 939, 940, 5, 42, 0, 0, 940, 167, 1, 0, 0, 0, 941, 942, 5, 47, 0, 0, 942, 169, 1, 0, 0, 0, 943, 944, 5, 37, 0, 0, 944, 171, 1, 0, 0, 0, 945, 946, 5, 123, 0, 0, 946, 173, 1, 0, 0, 0, 947, 948, 5, 125, 0, 0, 948, 175, 1, 0, 0, 0, 949, 950, 3, 46, 15, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 80, 13, 0, 952, 177, 1, 0, 0, 0, 953, 956, 3, 140, 62, 0, 954, 957, 3, 76, 30, 0, 955, 957, 3, 90, 37, 0, 956, 954, 1, 0, 0, 0, 956, 955, 1, 0, 0, 0, 957, 961, 1, 0, 0, 0, 958, 960, 3, 92, 38, 0, 959, 958, 1, 0, 0, 0, 960, 963, 1, 0, 0, 0, 961, 959, 1, 0, 0, 0, 961, 962, 1, 0, 0, 0, 962, 971, 1, 0, 0, 0, 963, 961, 1, 0, 0, 0, 964, 966, 3, 140, 62, 0, 965, 967, 3, 74, 29, 0, 966, 965, 1, 0, 0, 0, 967, 968, 1, 0, 0, 0, 968, 966, 1, 0, 0, 0, 968, 969, 1, 0, 0, 0, 969, 971, 1, 0, 0, 0, 970, 953, 1, 0, 0, 0, 970, 964, 1, 0, 0, 0, 971, 179, 1, 0, 0, 0, 972, 973, 5, 91, 0, 0, 973, 974, 1, 0, 0, 0, 974, 975, 6, 82, 0, 0, 975, 976, 6, 82, 0, 0, 976, 181, 1, 0, 0, 0, 977, 978, 5, 93, 0, 0, 978, 979, 1, 0, 0, 0, 979, 980, 6, 83, 12, 0, 980, 981, 6, 83, 12, 0, 981, 183, 1, 0, 0, 0, 982, 986, 3, 76, 30, 0, 983, 985, 3, 92, 38, 0, 984, 983, 1, 0, 0, 0, 985, 988, 1, 0, 0, 0, 986, 984, 1, 0, 0, 0, 986, 987, 1, 0, 0, 0, 987, 999, 1, 0, 0, 0, 988, 986, 1, 0, 0, 0, 989, 992, 3, 90, 37, 0, 990, 992, 3, 84, 34, 0, 991, 989, 1, 0, 0, 0, 991, 990, 1, 0, 0, 0, 992, 994, 1, 0, 0, 0, 993, 995, 3, 92, 38, 0, 994, 993, 1, 0, 0, 0, 995, 996, 1, 0, 0, 0, 996, 994, 1, 0, 0, 0, 996, 997, 1, 0, 0, 0, 997, 999, 1, 0, 0, 0, 998, 982, 1, 0, 0, 0, 998, 991, 1, 0, 0, 0, 999, 185, 1, 0, 0, 0, 1000, 1002, 3, 86, 35, 0, 1001, 1003, 3, 88, 36, 0, 1002, 1001, 1, 0, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1002, 1, 0, 0, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1007, 3, 86, 35, 0, 1007, 187, 1, 0, 0, 0, 1008, 1009, 3, 186, 85, 0, 1009, 189, 1, 0, 0, 0, 1010, 1011, 3, 66, 25, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 87, 11, 0, 1013, 191, 1, 0, 0, 0, 1014, 1015, 3, 68, 26, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 88, 11, 0, 1017, 193, 1, 0, 0, 0, 1018, 1019, 3, 70, 27, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 89, 11, 0, 1021, 195, 1, 0, 0, 0, 1022, 1023, 3, 180, 82, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 90, 14, 0, 1025, 1026, 6, 90, 15, 0, 1026, 197, 1, 0, 0, 0, 1027, 1028, 3, 72, 28, 0, 1028, 1029, 1, 0, 0, 0, 1029, 1030, 6, 91, 16, 0, 1030, 1031, 6, 91, 12, 0, 1031, 199, 1, 0, 0, 0, 1032, 1033, 3, 70, 27, 0, 1033, 1034, 1, 0, 0, 0, 1034, 1035, 6, 92, 11, 0, 1035, 201, 1, 0, 0, 0, 1036, 1037, 3, 66, 25, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1039, 6, 93, 11, 0, 1039, 203, 1, 0, 0, 0, 1040, 1041, 3, 68, 26, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 94, 11, 0, 1043, 205, 1, 0, 0, 0, 1044, 1045, 3, 72, 28, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 95, 16, 0, 1047, 1048, 6, 95, 12, 0, 1048, 207, 1, 0, 0, 0, 1049, 1050, 3, 180, 82, 0, 1050, 1051, 1, 0, 0, 0, 1051, 1052, 6, 96, 14, 0, 1052, 209, 1, 0, 0, 0, 1053, 1054, 3, 182, 83, 0, 1054, 1055, 1, 0, 0, 0, 1055, 1056, 6, 97, 17, 0, 1056, 211, 1, 0, 0, 0, 1057, 1058, 3, 110, 47, 0, 1058, 1059, 1, 0, 0, 0, 1059, 1060, 6, 98, 18, 0, 1060, 213, 1, 0, 0, 0, 1061, 1062, 3, 112, 48, 0, 1062, 1063, 1, 0, 0, 0, 1063, 1064, 6, 99, 19, 0, 1064, 215, 1, 0, 0, 0, 1065, 1066, 3, 106, 45, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1068, 6, 100, 20, 0, 1068, 217, 1, 0, 0, 0, 1069, 1070, 7, 16, 0, 0, 1070, 1071, 7, 3, 0, 0, 1071, 1072, 7, 5, 0, 0, 1072, 1073, 7, 12, 0, 0, 1073, 1074, 7, 0, 0, 0, 1074, 1075, 7, 12, 0, 0, 1075, 1076, 7, 5, 0, 0, 1076, 1077, 7, 12, 0, 0, 1077, 219, 1, 0, 0, 0, 1078, 1082, 8, 33, 0, 0, 1079, 1080, 5, 47, 0, 0, 1080, 1082, 8, 34, 0, 0, 1081, 1078, 1, 0, 0, 0, 1081, 1079, 1, 0, 0, 0, 1082, 221, 1, 0, 0, 0, 1083, 1085, 3, 220, 102, 0, 1084, 1083, 1, 0, 0, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1084, 1, 0, 0, 0, 1086, 1087, 1, 0, 0, 0, 1087, 223, 1, 0, 0, 0, 1088, 1089, 3, 222, 103, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 104, 21, 0, 1091, 225, 1, 0, 0, 0, 1092, 1093, 3, 94, 39, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 105, 22, 0, 1095, 227, 1, 0, 0, 0, 1096, 1097, 3, 66, 25, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 106, 11, 0, 1099, 229, 1, 0, 0, 0, 1100, 1101, 3, 68, 26, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 107, 11, 0, 1103, 231, 1, 0, 0, 0, 1104, 1105, 3, 70, 27, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1107, 6, 108, 11, 0, 1107, 233, 1, 0, 0, 0, 1108, 1109, 3, 72, 28, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 109, 16, 0, 1111, 1112, 6, 109, 12, 0, 1112, 235, 1, 0, 0, 0, 1113, 1114, 3, 116, 50, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 110, 23, 0, 1116, 237, 1, 0, 0, 0, 1117, 1118, 3, 112, 48, 0, 1118, 1119, 1, 0, 0, 0, 1119, 1120, 6, 111, 19, 0, 1120, 239, 1, 0, 0, 0, 1121, 1122, 4, 112, 8, 0, 1122, 1123, 3, 140, 62, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1125, 6, 112, 24, 0, 1125, 241, 1, 0, 0, 0, 1126, 1127, 4, 113, 9, 0, 1127, 1128, 3, 178, 81, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 6, 113, 25, 0, 1130, 243, 1, 0, 0, 0, 1131, 1136, 3, 76, 30, 0, 1132, 1136, 3, 74, 29, 0, 1133, 1136, 3, 90, 37, 0, 1134, 1136, 3, 166, 75, 0, 1135, 1131, 1, 0, 0, 0, 1135, 1132, 1, 0, 0, 0, 1135, 1133, 1, 0, 0, 0, 1135, 1134, 1, 0, 0, 0, 1136, 245, 1, 0, 0, 0, 1137, 1140, 3, 76, 30, 0, 1138, 1140, 3, 166, 75, 0, 1139, 1137, 1, 0, 0, 0, 1139, 1138, 1, 0, 0, 0, 1140, 1144, 1, 0, 0, 0, 1141, 1143, 3, 244, 114, 0, 1142, 1141, 1, 0, 0, 0, 1143, 1146, 1, 0, 0, 0, 1144, 1142, 1, 0, 0, 0, 1144, 1145, 1, 0, 0, 0, 1145, 1157, 1, 0, 0, 0, 1146, 1144, 1, 0, 0, 0, 1147, 1150, 3, 90, 37, 0, 1148, 1150, 3, 84, 34, 0, 1149, 1147, 1, 0, 0, 0, 1149, 1148, 1, 0, 0, 0, 1150, 1152, 1, 0, 0, 0, 1151, 1153, 3, 244, 114, 0, 1152, 1151, 1, 0, 0, 0, 1153, 1154, 1, 0, 0, 0, 1154, 1152, 1, 0, 0, 0, 1154, 1155, 1, 0, 0, 0, 1155, 1157, 1, 0, 0, 0, 1156, 1139, 1, 0, 0, 0, 1156, 1149, 1, 0, 0, 0, 1157, 247, 1, 0, 0, 0, 1158, 1161, 3, 246, 115, 0, 1159, 1161, 3, 186, 85, 0, 1160, 1158, 1, 0, 0, 0, 1160, 1159, 1, 0, 0, 0, 1161, 1162, 1, 0, 0, 0, 1162, 1160, 1, 0, 0, 0, 1162, 1163, 1, 0, 0, 0, 1163, 249, 1, 0, 0, 0, 1164, 1165, 3, 66, 25, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1167, 6, 117, 11, 0, 1167, 251, 1, 0, 0, 0, 1168, 1169, 3, 68, 26, 0, 1169, 1170, 1, 0, 0, 0, 1170, 1171, 6, 118, 11, 0, 1171, 253, 1, 0, 0, 0, 1172, 1173, 3, 70, 27, 0, 1173, 1174, 1, 0, 0, 0, 1174, 1175, 6, 119, 11, 0, 1175, 255, 1, 0, 0, 0, 1176, 1177, 3, 72, 28, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 120, 16, 0, 1179, 1180, 6, 120, 12, 0, 1180, 257, 1, 0, 0, 0, 1181, 1182, 3, 106, 45, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1184, 6, 121, 20, 0, 1184, 259, 1, 0, 0, 0, 1185, 1186, 3, 112, 48, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 122, 19, 0, 1188, 261, 1, 0, 0, 0, 1189, 1190, 3, 116, 50, 0, 1190, 1191, 1, 0, 0, 0, 1191, 1192, 6, 123, 23, 0, 1192, 263, 1, 0, 0, 0, 1193, 1194, 4, 124, 10, 0, 1194, 1195, 3, 140, 62, 0, 1195, 1196, 1, 0, 0, 0, 1196, 1197, 6, 124, 24, 0, 1197, 265, 1, 0, 0, 0, 1198, 1199, 4, 125, 11, 0, 1199, 1200, 3, 178, 81, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 125, 25, 0, 1202, 267, 1, 0, 0, 0, 1203, 1204, 7, 12, 0, 0, 1204, 1205, 7, 2, 0, 0, 1205, 269, 1, 0, 0, 0, 1206, 1207, 3, 248, 116, 0, 1207, 1208, 1, 0, 0, 0, 1208, 1209, 6, 127, 26, 0, 1209, 271, 1, 0, 0, 0, 1210, 1211, 3, 66, 25, 0, 1211, 1212, 1, 0, 0, 0, 1212, 1213, 6, 128, 11, 0, 1213, 273, 1, 0, 0, 0, 1214, 1215, 3, 68, 26, 0, 1215, 1216, 1, 0, 0, 0, 1216, 1217, 6, 129, 11, 0, 1217, 275, 1, 0, 0, 0, 1218, 1219, 3, 70, 27, 0, 1219, 1220, 1, 0, 0, 0, 1220, 1221, 6, 130, 11, 0, 1221, 277, 1, 0, 0, 0, 1222, 1223, 3, 72, 28, 0, 1223, 1224, 1, 0, 0, 0, 1224, 1225, 6, 131, 16, 0, 1225, 1226, 6, 131, 12, 0, 1226, 279, 1, 0, 0, 0, 1227, 1228, 3, 180, 82, 0, 1228, 1229, 1, 0, 0, 0, 1229, 1230, 6, 132, 14, 0, 1230, 1231, 6, 132, 27, 0, 1231, 281, 1, 0, 0, 0, 1232, 1233, 7, 7, 0, 0, 1233, 1234, 7, 9, 0, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1236, 6, 133, 28, 0, 1236, 283, 1, 0, 0, 0, 1237, 1238, 7, 19, 0, 0, 1238, 1239, 7, 1, 0, 0, 1239, 1240, 7, 5, 0, 0, 1240, 1241, 7, 10, 0, 0, 1241, 1242, 1, 0, 0, 0, 1242, 1243, 6, 134, 28, 0, 1243, 285, 1, 0, 0, 0, 1244, 1245, 8, 35, 0, 0, 1245, 287, 1, 0, 0, 0, 1246, 1248, 3, 286, 135, 0, 1247, 1246, 1, 0, 0, 0, 1248, 1249, 1, 0, 0, 0, 1249, 1247, 1, 0, 0, 0, 1249, 1250, 1, 0, 0, 0, 1250, 1251, 1, 0, 0, 0, 1251, 1252, 3, 110, 47, 0, 1252, 1254, 1, 0, 0, 0, 1253, 1247, 1, 0, 0, 0, 1253, 1254, 1, 0, 0, 0, 1254, 1256, 1, 0, 0, 0, 1255, 1257, 3, 286, 135, 0, 1256, 1255, 1, 0, 0, 0, 1257, 1258, 1, 0, 0, 0, 1258, 1256, 1, 0, 0, 0, 1258, 1259, 1, 0, 0, 0, 1259, 289, 1, 0, 0, 0, 1260, 1261, 3, 288, 136, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 137, 29, 0, 1263, 291, 1, 0, 0, 0, 1264, 1265, 3, 66, 25, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 138, 11, 0, 1267, 293, 1, 0, 0, 0, 1268, 1269, 3, 68, 26, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 139, 11, 0, 1271, 295, 1, 0, 0, 0, 1272, 1273, 3, 70, 27, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 140, 11, 0, 1275, 297, 1, 0, 0, 0, 1276, 1277, 3, 72, 28, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 141, 16, 0, 1279, 1280, 6, 141, 12, 0, 1280, 1281, 6, 141, 12, 0, 1281, 299, 1, 0, 0, 0, 1282, 1283, 3, 106, 45, 0, 1283, 1284, 1, 0, 0, 0, 1284, 1285, 6, 142, 20, 0, 1285, 301, 1, 0, 0, 0, 1286, 1287, 3, 112, 48, 0, 1287, 1288, 1, 0, 0, 0, 1288, 1289, 6, 143, 19, 0, 1289, 303, 1, 0, 0, 0, 1290, 1291, 3, 116, 50, 0, 1291, 1292, 1, 0, 0, 0, 1292, 1293, 6, 144, 23, 0, 1293, 305, 1, 0, 0, 0, 1294, 1295, 3, 284, 134, 0, 1295, 1296, 1, 0, 0, 0, 1296, 1297, 6, 145, 30, 0, 1297, 307, 1, 0, 0, 0, 1298, 1299, 3, 248, 116, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 146, 26, 0, 1301, 309, 1, 0, 0, 0, 1302, 1303, 3, 188, 86, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 147, 31, 0, 1305, 311, 1, 0, 0, 0, 1306, 1307, 4, 148, 12, 0, 1307, 1308, 3, 140, 62, 0, 1308, 1309, 1, 0, 0, 0, 1309, 1310, 6, 148, 24, 0, 1310, 313, 1, 0, 0, 0, 1311, 1312, 4, 149, 13, 0, 1312, 1313, 3, 178, 81, 0, 1313, 1314, 1, 0, 0, 0, 1314, 1315, 6, 149, 25, 0, 1315, 315, 1, 0, 0, 0, 1316, 1317, 3, 66, 25, 0, 1317, 1318, 1, 0, 0, 0, 1318, 1319, 6, 150, 11, 0, 1319, 317, 1, 0, 0, 0, 1320, 1321, 3, 68, 26, 0, 1321, 1322, 1, 0, 0, 0, 1322, 1323, 6, 151, 11, 0, 1323, 319, 1, 0, 0, 0, 1324, 1325, 3, 70, 27, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 152, 11, 0, 1327, 321, 1, 0, 0, 0, 1328, 1329, 3, 72, 28, 0, 1329, 1330, 1, 0, 0, 0, 1330, 1331, 6, 153, 16, 0, 1331, 1332, 6, 153, 12, 0, 1332, 323, 1, 0, 0, 0, 1333, 1334, 3, 116, 50, 0, 1334, 1335, 1, 0, 0, 0, 1335, 1336, 6, 154, 23, 0, 1336, 325, 1, 0, 0, 0, 1337, 1338, 4, 155, 14, 0, 1338, 1339, 3, 140, 62, 0, 1339, 1340, 1, 0, 0, 0, 1340, 1341, 6, 155, 24, 0, 1341, 327, 1, 0, 0, 0, 1342, 1343, 4, 156, 15, 0, 1343, 1344, 3, 178, 81, 0, 1344, 1345, 1, 0, 0, 0, 1345, 1346, 6, 156, 25, 0, 1346, 329, 1, 0, 0, 0, 1347, 1348, 3, 188, 86, 0, 1348, 1349, 1, 0, 0, 0, 1349, 1350, 6, 157, 31, 0, 1350, 331, 1, 0, 0, 0, 1351, 1352, 3, 184, 84, 0, 1352, 1353, 1, 0, 0, 0, 1353, 1354, 6, 158, 32, 0, 1354, 333, 1, 0, 0, 0, 1355, 1356, 3, 66, 25, 0, 1356, 1357, 1, 0, 0, 0, 1357, 1358, 6, 159, 11, 0, 1358, 335, 1, 0, 0, 0, 1359, 1360, 3, 68, 26, 0, 1360, 1361, 1, 0, 0, 0, 1361, 1362, 6, 160, 11, 0, 1362, 337, 1, 0, 0, 0, 1363, 1364, 3, 70, 27, 0, 1364, 1365, 1, 0, 0, 0, 1365, 1366, 6, 161, 11, 0, 1366, 339, 1, 0, 0, 0, 1367, 1368, 3, 72, 28, 0, 1368, 1369, 1, 0, 0, 0, 1369, 1370, 6, 162, 16, 0, 1370, 1371, 6, 162, 12, 0, 1371, 341, 1, 0, 0, 0, 1372, 1373, 7, 1, 0, 0, 1373, 1374, 7, 9, 0, 0, 1374, 1375, 7, 15, 0, 0, 1375, 1376, 7, 7, 0, 0, 1376, 343, 1, 0, 0, 0, 1377, 1378, 3, 66, 25, 0, 1378, 1379, 1, 0, 0, 0, 1379, 1380, 6, 164, 11, 0, 1380, 345, 1, 0, 0, 0, 1381, 1382, 3, 68, 26, 0, 1382, 1383, 1, 0, 0, 0, 1383, 1384, 6, 165, 11, 0, 1384, 347, 1, 0, 0, 0, 1385, 1386, 3, 70, 27, 0, 1386, 1387, 1, 0, 0, 0, 1387, 1388, 6, 166, 11, 0, 1388, 349, 1, 0, 0, 0, 1389, 1390, 3, 182, 83, 0, 1390, 1391, 1, 0, 0, 0, 1391, 1392, 6, 167, 17, 0, 1392, 1393, 6, 167, 12, 0, 1393, 351, 1, 0, 0, 0, 1394, 1395, 3, 110, 47, 0, 1395, 1396, 1, 0, 0, 0, 1396, 1397, 6, 168, 18, 0, 1397, 353, 1, 0, 0, 0, 1398, 1404, 3, 84, 34, 0, 1399, 1404, 3, 74, 29, 0, 1400, 1404, 3, 116, 50, 0, 1401, 1404, 3, 76, 30, 0, 1402, 1404, 3, 90, 37, 0, 1403, 1398, 1, 0, 0, 0, 1403, 1399, 1, 0, 0, 0, 1403, 1400, 1, 0, 0, 0, 1403, 1401, 1, 0, 0, 0, 1403, 1402, 1, 0, 0, 0, 1404, 1405, 1, 0, 0, 0, 1405, 1403, 1, 0, 0, 0, 1405, 1406, 1, 0, 0, 0, 1406, 355, 1, 0, 0, 0, 1407, 1408, 3, 66, 25, 0, 1408, 1409, 1, 0, 0, 0, 1409, 1410, 6, 170, 11, 0, 1410, 357, 1, 0, 0, 0, 1411, 1412, 3, 68, 26, 0, 1412, 1413, 1, 0, 0, 0, 1413, 1414, 6, 171, 11, 0, 1414, 359, 1, 0, 0, 0, 1415, 1416, 3, 70, 27, 0, 1416, 1417, 1, 0, 0, 0, 1417, 1418, 6, 172, 11, 0, 1418, 361, 1, 0, 0, 0, 1419, 1420, 3, 72, 28, 0, 1420, 1421, 1, 0, 0, 0, 1421, 1422, 6, 173, 16, 0, 1422, 1423, 6, 173, 12, 0, 1423, 363, 1, 0, 0, 0, 1424, 1425, 3, 110, 47, 0, 1425, 1426, 1, 0, 0, 0, 1426, 1427, 6, 174, 18, 0, 1427, 365, 1, 0, 0, 0, 1428, 1429, 3, 112, 48, 0, 1429, 1430, 1, 0, 0, 0, 1430, 1431, 6, 175, 19, 0, 1431, 367, 1, 0, 0, 0, 1432, 1433, 3, 116, 50, 0, 1433, 1434, 1, 0, 0, 0, 1434, 1435, 6, 176, 23, 0, 1435, 369, 1, 0, 0, 0, 1436, 1437, 3, 282, 133, 0, 1437, 1438, 1, 0, 0, 0, 1438, 1439, 6, 177, 33, 0, 1439, 1440, 6, 177, 34, 0, 1440, 371, 1, 0, 0, 0, 1441, 1442, 3, 222, 103, 0, 1442, 1443, 1, 0, 0, 0, 1443, 1444, 6, 178, 21, 0, 1444, 373, 1, 0, 0, 0, 1445, 1446, 3, 94, 39, 0, 1446, 1447, 1, 0, 0, 0, 1447, 1448, 6, 179, 22, 0, 1448, 375, 1, 0, 0, 0, 1449, 1450, 3, 66, 25, 0, 1450, 1451, 1, 0, 0, 0, 1451, 1452, 6, 180, 11, 0, 1452, 377, 1, 0, 0, 0, 1453, 1454, 3, 68, 26, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 181, 11, 0, 1456, 379, 1, 0, 0, 0, 1457, 1458, 3, 70, 27, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 182, 11, 0, 1460, 381, 1, 0, 0, 0, 1461, 1462, 3, 72, 28, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 183, 16, 0, 1464, 1465, 6, 183, 12, 0, 1465, 1466, 6, 183, 12, 0, 1466, 383, 1, 0, 0, 0, 1467, 1468, 3, 112, 48, 0, 1468, 1469, 1, 0, 0, 0, 1469, 1470, 6, 184, 19, 0, 1470, 385, 1, 0, 0, 0, 1471, 1472, 3, 116, 50, 0, 1472, 1473, 1, 0, 0, 0, 1473, 1474, 6, 185, 23, 0, 1474, 387, 1, 0, 0, 0, 1475, 1476, 3, 248, 116, 0, 1476, 1477, 1, 0, 0, 0, 1477, 1478, 6, 186, 26, 0, 1478, 389, 1, 0, 0, 0, 1479, 1480, 3, 66, 25, 0, 1480, 1481, 1, 0, 0, 0, 1481, 1482, 6, 187, 11, 0, 1482, 391, 1, 0, 0, 0, 1483, 1484, 3, 68, 26, 0, 1484, 1485, 1, 0, 0, 0, 1485, 1486, 6, 188, 11, 0, 1486, 393, 1, 0, 0, 0, 1487, 1488, 3, 70, 27, 0, 1488, 1489, 1, 0, 0, 0, 1489, 1490, 6, 189, 11, 0, 1490, 395, 1, 0, 0, 0, 1491, 1492, 3, 72, 28, 0, 1492, 1493, 1, 0, 0, 0, 1493, 1494, 6, 190, 16, 0, 1494, 1495, 6, 190, 12, 0, 1495, 397, 1, 0, 0, 0, 1496, 1497, 3, 54, 19, 0, 1497, 1498, 1, 0, 0, 0, 1498, 1499, 6, 191, 35, 0, 1499, 399, 1, 0, 0, 0, 1500, 1501, 3, 268, 126, 0, 1501, 1502, 1, 0, 0, 0, 1502, 1503, 6, 192, 36, 0, 1503, 401, 1, 0, 0, 0, 1504, 1505, 3, 282, 133, 0, 1505, 1506, 1, 0, 0, 0, 1506, 1507, 6, 193, 33, 0, 1507, 1508, 6, 193, 12, 0, 1508, 1509, 6, 193, 0, 0, 1509, 403, 1, 0, 0, 0, 1510, 1511, 7, 20, 0, 0, 1511, 1512, 7, 2, 0, 0, 1512, 1513, 7, 1, 0, 0, 1513, 1514, 7, 9, 0, 0, 1514, 1515, 7, 17, 0, 0, 1515, 1516, 1, 0, 0, 0, 1516, 1517, 6, 194, 12, 0, 1517, 1518, 6, 194, 0, 0, 1518, 405, 1, 0, 0, 0, 1519, 1520, 3, 222, 103, 0, 1520, 1521, 1, 0, 0, 0, 1521, 1522, 6, 195, 21, 0, 1522, 407, 1, 0, 0, 0, 1523, 1524, 3, 94, 39, 0, 1524, 1525, 1, 0, 0, 0, 1525, 1526, 6, 196, 22, 0, 1526, 409, 1, 0, 0, 0, 1527, 1528, 3, 110, 47, 0, 1528, 1529, 1, 0, 0, 0, 1529, 1530, 6, 197, 18, 0, 1530, 411, 1, 0, 0, 0, 1531, 1532, 3, 184, 84, 0, 1532, 1533, 1, 0, 0, 0, 1533, 1534, 6, 198, 32, 0, 1534, 413, 1, 0, 0, 0, 1535, 1536, 3, 188, 86, 0, 1536, 1537, 1, 0, 0, 0, 1537, 1538, 6, 199, 31, 0, 1538, 415, 1, 0, 0, 0, 1539, 1540, 3, 66, 25, 0, 1540, 1541, 1, 0, 0, 0, 1541, 1542, 6, 200, 11, 0, 1542, 417, 1, 0, 0, 0, 1543, 1544, 3, 68, 26, 0, 1544, 1545, 1, 0, 0, 0, 1545, 1546, 6, 201, 11, 0, 1546, 419, 1, 0, 0, 0, 1547, 1548, 3, 70, 27, 0, 1548, 1549, 1, 0, 0, 0, 1549, 1550, 6, 202, 11, 0, 1550, 421, 1, 0, 0, 0, 1551, 1552, 3, 72, 28, 0, 1552, 1553, 1, 0, 0, 0, 1553, 1554, 6, 203, 16, 0, 1554, 1555, 6, 203, 12, 0, 1555, 423, 1, 0, 0, 0, 1556, 1557, 3, 222, 103, 0, 1557, 1558, 1, 0, 0, 0, 1558, 1559, 6, 204, 21, 0, 1559, 1560, 6, 204, 12, 0, 1560, 1561, 6, 204, 37, 0, 1561, 425, 1, 0, 0, 0, 1562, 1563, 3, 94, 39, 0, 1563, 1564, 1, 0, 0, 0, 1564, 1565, 6, 205, 22, 0, 1565, 1566, 6, 205, 12, 0, 1566, 1567, 6, 205, 37, 0, 1567, 427, 1, 0, 0, 0, 1568, 1569, 3, 66, 25, 0, 1569, 1570, 1, 0, 0, 0, 1570, 1571, 6, 206, 11, 0, 1571, 429, 1, 0, 0, 0, 1572, 1573, 3, 68, 26, 0, 1573, 1574, 1, 0, 0, 0, 1574, 1575, 6, 207, 11, 0, 1575, 431, 1, 0, 0, 0, 1576, 1577, 3, 70, 27, 0, 1577, 1578, 1, 0, 0, 0, 1578, 1579, 6, 208, 11, 0, 1579, 433, 1, 0, 0, 0, 1580, 1581, 3, 110, 47, 0, 1581, 1582, 1, 0, 0, 0, 1582, 1583, 6, 209, 18, 0, 1583, 1584, 6, 209, 12, 0, 1584, 1585, 6, 209, 9, 0, 1585, 435, 1, 0, 0, 0, 1586, 1587, 3, 112, 48, 0, 1587, 1588, 1, 0, 0, 0, 1588, 1589, 6, 210, 19, 0, 1589, 1590, 6, 210, 12, 0, 1590, 1591, 6, 210, 9, 0, 1591, 437, 1, 0, 0, 0, 1592, 1593, 3, 66, 25, 0, 1593, 1594, 1, 0, 0, 0, 1594, 1595, 6, 211, 11, 0, 1595, 439, 1, 0, 0, 0, 1596, 1597, 3, 68, 26, 0, 1597, 1598, 1, 0, 0, 0, 1598, 1599, 6, 212, 11, 0, 1599, 441, 1, 0, 0, 0, 1600, 1601, 3, 70, 27, 0, 1601, 1602, 1, 0, 0, 0, 1602, 1603, 6, 213, 11, 0, 1603, 443, 1, 0, 0, 0, 1604, 1605, 3, 188, 86, 0, 1605, 1606, 1, 0, 0, 0, 1606, 1607, 6, 214, 12, 0, 1607, 1608, 6, 214, 0, 0, 1608, 1609, 6, 214, 31, 0, 1609, 445, 1, 0, 0, 0, 1610, 1611, 3, 184, 84, 0, 1611, 1612, 1, 0, 0, 0, 1612, 1613, 6, 215, 12, 0, 1613, 1614, 6, 215, 0, 0, 1614, 1615, 6, 215, 32, 0, 1615, 447, 1, 0, 0, 0, 1616, 1617, 3, 100, 42, 0, 1617, 1618, 1, 0, 0, 0, 1618, 1619, 6, 216, 12, 0, 1619, 1620, 6, 216, 0, 0, 1620, 1621, 6, 216, 38, 0, 1621, 449, 1, 0, 0, 0, 1622, 1623, 3, 72, 28, 0, 1623, 1624, 1, 0, 0, 0, 1624, 1625, 6, 217, 16, 0, 1625, 1626, 6, 217, 12, 0, 1626, 451, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 665, 675, 679, 682, 691, 693, 704, 723, 728, 737, 744, 749, 751, 762, 770, 773, 775, 780, 785, 791, 798, 803, 809, 812, 820, 824, 956, 961, 968, 970, 986, 991, 996, 998, 1004, 1081, 1086, 1135, 1139, 1144, 1149, 1154, 1156, 1160, 1162, 1249, 1253, 1258, 1403, 1405, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 72, 0, 5, 0, 0, 7, 29, 0, 7, 73, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 83, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 71, 0, 7, 87, 0, 5, 10, 0, 5, 7, 0, 7, 97, 0, 7, 96, 0, 7, 75, 0, 7, 74, 0, 7, 95, 0, 5, 12, 0, 7, 20, 0, 7, 91, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index eb8af91bef274..28358a0f614e6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -129,7 +129,7 @@ private static String[] makeLiteralNames() { "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'{'", "'}'", null, null, "']'", null, null, null, null, null, null, null, null, "'metadata'", null, null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, null, null, null, null, @@ -246,10 +246,6 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return DEV_JOIN_RIGHT_sempred((RuleContext)_localctx, predIndex); case 23: return DEV_JOIN_LOOKUP_sempred((RuleContext)_localctx, predIndex); - case 78: - return LEFT_BRACES_sempred((RuleContext)_localctx, predIndex); - case 79: - return RIGHT_BRACES_sempred((RuleContext)_localctx, predIndex); case 112: return PROJECT_PARAM_sempred((RuleContext)_localctx, predIndex); case 113: @@ -325,79 +321,65 @@ private boolean DEV_JOIN_LOOKUP_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean LEFT_BRACES_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 8: - return this.isDevVersion(); - } - return true; - } - private boolean RIGHT_BRACES_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 9: - return this.isDevVersion(); - } - return true; - } private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 10: + case 8: return this.isDevVersion(); } return true; } private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 11: + case 9: return this.isDevVersion(); } return true; } private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 12: + case 10: return this.isDevVersion(); } return true; } private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 13: + case 11: return this.isDevVersion(); } return true; } private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 14: + case 12: return this.isDevVersion(); } return true; } private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 15: + case 13: return this.isDevVersion(); } return true; } private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 16: + case 14: return this.isDevVersion(); } return true; } private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 17: + case 15: return this.isDevVersion(); } return true; } public static final String _serializedATN = - "\u0004\u0000\u0082\u065d\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\u0004\u0000\u0082\u065b\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ @@ -526,13 +508,13 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "A\u0001A\u0001A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001"+ "D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001"+ "H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001"+ - "M\u0001M\u0001N\u0001N\u0001N\u0001O\u0001O\u0001O\u0001P\u0001P\u0001"+ - "P\u0001P\u0001Q\u0001Q\u0001Q\u0003Q\u03bf\bQ\u0001Q\u0005Q\u03c2\bQ\n"+ - "Q\fQ\u03c5\tQ\u0001Q\u0001Q\u0004Q\u03c9\bQ\u000bQ\fQ\u03ca\u0003Q\u03cd"+ - "\bQ\u0001R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001"+ - "S\u0001T\u0001T\u0005T\u03db\bT\nT\fT\u03de\tT\u0001T\u0001T\u0003T\u03e2"+ - "\bT\u0001T\u0004T\u03e5\bT\u000bT\fT\u03e6\u0003T\u03e9\bT\u0001U\u0001"+ - "U\u0004U\u03ed\bU\u000bU\fU\u03ee\u0001U\u0001U\u0001V\u0001V\u0001W\u0001"+ + "M\u0001M\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001"+ + "Q\u0001Q\u0001Q\u0003Q\u03bd\bQ\u0001Q\u0005Q\u03c0\bQ\nQ\fQ\u03c3\tQ"+ + "\u0001Q\u0001Q\u0004Q\u03c7\bQ\u000bQ\fQ\u03c8\u0003Q\u03cb\bQ\u0001R"+ + "\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001S\u0001"+ + "T\u0001T\u0005T\u03d9\bT\nT\fT\u03dc\tT\u0001T\u0001T\u0003T\u03e0\bT"+ + "\u0001T\u0004T\u03e3\bT\u000bT\fT\u03e4\u0003T\u03e7\bT\u0001U\u0001U"+ + "\u0004U\u03eb\bU\u000bU\fU\u03ec\u0001U\u0001U\u0001V\u0001V\u0001W\u0001"+ "W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001"+ "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001"+ "[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001"+ @@ -540,15 +522,15 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001"+ "c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001"+ "e\u0001e\u0001e\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0003"+ - "f\u043c\bf\u0001g\u0004g\u043f\bg\u000bg\fg\u0440\u0001h\u0001h\u0001"+ + "f\u043a\bf\u0001g\u0004g\u043d\bg\u000bg\fg\u043e\u0001h\u0001h\u0001"+ "h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001"+ "k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001"+ "m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001"+ "o\u0001p\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001"+ - "q\u0001r\u0001r\u0001r\u0001r\u0003r\u0472\br\u0001s\u0001s\u0003s\u0476"+ - "\bs\u0001s\u0005s\u0479\bs\ns\fs\u047c\ts\u0001s\u0001s\u0003s\u0480\b"+ - "s\u0001s\u0004s\u0483\bs\u000bs\fs\u0484\u0003s\u0487\bs\u0001t\u0001"+ - "t\u0004t\u048b\bt\u000bt\ft\u048c\u0001u\u0001u\u0001u\u0001u\u0001v\u0001"+ + "q\u0001r\u0001r\u0001r\u0001r\u0003r\u0470\br\u0001s\u0001s\u0003s\u0474"+ + "\bs\u0001s\u0005s\u0477\bs\ns\fs\u047a\ts\u0001s\u0001s\u0003s\u047e\b"+ + "s\u0001s\u0004s\u0481\bs\u000bs\fs\u0482\u0003s\u0485\bs\u0001t\u0001"+ + "t\u0004t\u0489\bt\u000bt\ft\u048a\u0001u\u0001u\u0001u\u0001u\u0001v\u0001"+ "v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001"+ "x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001"+ "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001"+ @@ -559,9 +541,9 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ "\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001"+ "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001"+ - "\u0087\u0001\u0087\u0001\u0088\u0004\u0088\u04e2\b\u0088\u000b\u0088\f"+ - "\u0088\u04e3\u0001\u0088\u0001\u0088\u0003\u0088\u04e8\b\u0088\u0001\u0088"+ - "\u0004\u0088\u04eb\b\u0088\u000b\u0088\f\u0088\u04ec\u0001\u0089\u0001"+ + "\u0087\u0001\u0087\u0001\u0088\u0004\u0088\u04e0\b\u0088\u000b\u0088\f"+ + "\u0088\u04e1\u0001\u0088\u0001\u0088\u0003\u0088\u04e6\b\u0088\u0001\u0088"+ + "\u0004\u0088\u04e9\b\u0088\u000b\u0088\f\u0088\u04ea\u0001\u0089\u0001"+ "\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001"+ "\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001"+ "\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001"+ @@ -585,8 +567,8 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001"+ "\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ "\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001"+ - "\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0004\u00a9\u057e\b\u00a9\u000b"+ - "\u00a9\f\u00a9\u057f\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ + "\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0004\u00a9\u057c\b\u00a9\u000b"+ + "\u00a9\f\u00a9\u057d\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ "\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001"+ "\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001"+ "\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001"+ @@ -660,7 +642,7 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\t\n\r\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004"+ "\u0000\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002"+ "\u0000YYyy\u000b\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b"+ - "\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u0678\u0000\u0010\u0001\u0000\u0000"+ + "\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u0676\u0000\u0010\u0001\u0000\u0000"+ "\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000"+ "\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000"+ "\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000"+ @@ -793,76 +775,76 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u0000\u0000\u00a0\u03a4\u0001\u0000\u0000\u0000\u00a2\u03a7\u0001\u0000"+ "\u0000\u0000\u00a4\u03a9\u0001\u0000\u0000\u0000\u00a6\u03ab\u0001\u0000"+ "\u0000\u0000\u00a8\u03ad\u0001\u0000\u0000\u0000\u00aa\u03af\u0001\u0000"+ - "\u0000\u0000\u00ac\u03b1\u0001\u0000\u0000\u0000\u00ae\u03b4\u0001\u0000"+ - "\u0000\u0000\u00b0\u03b7\u0001\u0000\u0000\u0000\u00b2\u03cc\u0001\u0000"+ - "\u0000\u0000\u00b4\u03ce\u0001\u0000\u0000\u0000\u00b6\u03d3\u0001\u0000"+ - "\u0000\u0000\u00b8\u03e8\u0001\u0000\u0000\u0000\u00ba\u03ea\u0001\u0000"+ - "\u0000\u0000\u00bc\u03f2\u0001\u0000\u0000\u0000\u00be\u03f4\u0001\u0000"+ - "\u0000\u0000\u00c0\u03f8\u0001\u0000\u0000\u0000\u00c2\u03fc\u0001\u0000"+ - "\u0000\u0000\u00c4\u0400\u0001\u0000\u0000\u0000\u00c6\u0405\u0001\u0000"+ - "\u0000\u0000\u00c8\u040a\u0001\u0000\u0000\u0000\u00ca\u040e\u0001\u0000"+ - "\u0000\u0000\u00cc\u0412\u0001\u0000\u0000\u0000\u00ce\u0416\u0001\u0000"+ - "\u0000\u0000\u00d0\u041b\u0001\u0000\u0000\u0000\u00d2\u041f\u0001\u0000"+ - "\u0000\u0000\u00d4\u0423\u0001\u0000\u0000\u0000\u00d6\u0427\u0001\u0000"+ - "\u0000\u0000\u00d8\u042b\u0001\u0000\u0000\u0000\u00da\u042f\u0001\u0000"+ - "\u0000\u0000\u00dc\u043b\u0001\u0000\u0000\u0000\u00de\u043e\u0001\u0000"+ - "\u0000\u0000\u00e0\u0442\u0001\u0000\u0000\u0000\u00e2\u0446\u0001\u0000"+ - "\u0000\u0000\u00e4\u044a\u0001\u0000\u0000\u0000\u00e6\u044e\u0001\u0000"+ - "\u0000\u0000\u00e8\u0452\u0001\u0000\u0000\u0000\u00ea\u0456\u0001\u0000"+ - "\u0000\u0000\u00ec\u045b\u0001\u0000\u0000\u0000\u00ee\u045f\u0001\u0000"+ - "\u0000\u0000\u00f0\u0463\u0001\u0000\u0000\u0000\u00f2\u0468\u0001\u0000"+ - "\u0000\u0000\u00f4\u0471\u0001\u0000\u0000\u0000\u00f6\u0486\u0001\u0000"+ - "\u0000\u0000\u00f8\u048a\u0001\u0000\u0000\u0000\u00fa\u048e\u0001\u0000"+ - "\u0000\u0000\u00fc\u0492\u0001\u0000\u0000\u0000\u00fe\u0496\u0001\u0000"+ - "\u0000\u0000\u0100\u049a\u0001\u0000\u0000\u0000\u0102\u049f\u0001\u0000"+ - "\u0000\u0000\u0104\u04a3\u0001\u0000\u0000\u0000\u0106\u04a7\u0001\u0000"+ - "\u0000\u0000\u0108\u04ab\u0001\u0000\u0000\u0000\u010a\u04b0\u0001\u0000"+ - "\u0000\u0000\u010c\u04b5\u0001\u0000\u0000\u0000\u010e\u04b8\u0001\u0000"+ - "\u0000\u0000\u0110\u04bc\u0001\u0000\u0000\u0000\u0112\u04c0\u0001\u0000"+ - "\u0000\u0000\u0114\u04c4\u0001\u0000\u0000\u0000\u0116\u04c8\u0001\u0000"+ - "\u0000\u0000\u0118\u04cd\u0001\u0000\u0000\u0000\u011a\u04d2\u0001\u0000"+ - "\u0000\u0000\u011c\u04d7\u0001\u0000\u0000\u0000\u011e\u04de\u0001\u0000"+ - "\u0000\u0000\u0120\u04e7\u0001\u0000\u0000\u0000\u0122\u04ee\u0001\u0000"+ - "\u0000\u0000\u0124\u04f2\u0001\u0000\u0000\u0000\u0126\u04f6\u0001\u0000"+ - "\u0000\u0000\u0128\u04fa\u0001\u0000\u0000\u0000\u012a\u04fe\u0001\u0000"+ - "\u0000\u0000\u012c\u0504\u0001\u0000\u0000\u0000\u012e\u0508\u0001\u0000"+ - "\u0000\u0000\u0130\u050c\u0001\u0000\u0000\u0000\u0132\u0510\u0001\u0000"+ - "\u0000\u0000\u0134\u0514\u0001\u0000\u0000\u0000\u0136\u0518\u0001\u0000"+ - "\u0000\u0000\u0138\u051c\u0001\u0000\u0000\u0000\u013a\u0521\u0001\u0000"+ - "\u0000\u0000\u013c\u0526\u0001\u0000\u0000\u0000\u013e\u052a\u0001\u0000"+ - "\u0000\u0000\u0140\u052e\u0001\u0000\u0000\u0000\u0142\u0532\u0001\u0000"+ - "\u0000\u0000\u0144\u0537\u0001\u0000\u0000\u0000\u0146\u053b\u0001\u0000"+ - "\u0000\u0000\u0148\u0540\u0001\u0000\u0000\u0000\u014a\u0545\u0001\u0000"+ - "\u0000\u0000\u014c\u0549\u0001\u0000\u0000\u0000\u014e\u054d\u0001\u0000"+ - "\u0000\u0000\u0150\u0551\u0001\u0000\u0000\u0000\u0152\u0555\u0001\u0000"+ - "\u0000\u0000\u0154\u0559\u0001\u0000\u0000\u0000\u0156\u055e\u0001\u0000"+ - "\u0000\u0000\u0158\u0563\u0001\u0000\u0000\u0000\u015a\u0567\u0001\u0000"+ - "\u0000\u0000\u015c\u056b\u0001\u0000\u0000\u0000\u015e\u056f\u0001\u0000"+ - "\u0000\u0000\u0160\u0574\u0001\u0000\u0000\u0000\u0162\u057d\u0001\u0000"+ - "\u0000\u0000\u0164\u0581\u0001\u0000\u0000\u0000\u0166\u0585\u0001\u0000"+ - "\u0000\u0000\u0168\u0589\u0001\u0000\u0000\u0000\u016a\u058d\u0001\u0000"+ - "\u0000\u0000\u016c\u0592\u0001\u0000\u0000\u0000\u016e\u0596\u0001\u0000"+ - "\u0000\u0000\u0170\u059a\u0001\u0000\u0000\u0000\u0172\u059e\u0001\u0000"+ - "\u0000\u0000\u0174\u05a3\u0001\u0000\u0000\u0000\u0176\u05a7\u0001\u0000"+ - "\u0000\u0000\u0178\u05ab\u0001\u0000\u0000\u0000\u017a\u05af\u0001\u0000"+ - "\u0000\u0000\u017c\u05b3\u0001\u0000\u0000\u0000\u017e\u05b7\u0001\u0000"+ - "\u0000\u0000\u0180\u05bd\u0001\u0000\u0000\u0000\u0182\u05c1\u0001\u0000"+ - "\u0000\u0000\u0184\u05c5\u0001\u0000\u0000\u0000\u0186\u05c9\u0001\u0000"+ - "\u0000\u0000\u0188\u05cd\u0001\u0000\u0000\u0000\u018a\u05d1\u0001\u0000"+ - "\u0000\u0000\u018c\u05d5\u0001\u0000\u0000\u0000\u018e\u05da\u0001\u0000"+ - "\u0000\u0000\u0190\u05de\u0001\u0000\u0000\u0000\u0192\u05e2\u0001\u0000"+ - "\u0000\u0000\u0194\u05e8\u0001\u0000\u0000\u0000\u0196\u05f1\u0001\u0000"+ - "\u0000\u0000\u0198\u05f5\u0001\u0000\u0000\u0000\u019a\u05f9\u0001\u0000"+ - "\u0000\u0000\u019c\u05fd\u0001\u0000\u0000\u0000\u019e\u0601\u0001\u0000"+ - "\u0000\u0000\u01a0\u0605\u0001\u0000\u0000\u0000\u01a2\u0609\u0001\u0000"+ - "\u0000\u0000\u01a4\u060d\u0001\u0000\u0000\u0000\u01a6\u0611\u0001\u0000"+ - "\u0000\u0000\u01a8\u0616\u0001\u0000\u0000\u0000\u01aa\u061c\u0001\u0000"+ - "\u0000\u0000\u01ac\u0622\u0001\u0000\u0000\u0000\u01ae\u0626\u0001\u0000"+ - "\u0000\u0000\u01b0\u062a\u0001\u0000\u0000\u0000\u01b2\u062e\u0001\u0000"+ - "\u0000\u0000\u01b4\u0634\u0001\u0000\u0000\u0000\u01b6\u063a\u0001\u0000"+ - "\u0000\u0000\u01b8\u063e\u0001\u0000\u0000\u0000\u01ba\u0642\u0001\u0000"+ - "\u0000\u0000\u01bc\u0646\u0001\u0000\u0000\u0000\u01be\u064c\u0001\u0000"+ - "\u0000\u0000\u01c0\u0652\u0001\u0000\u0000\u0000\u01c2\u0658\u0001\u0000"+ + "\u0000\u0000\u00ac\u03b1\u0001\u0000\u0000\u0000\u00ae\u03b3\u0001\u0000"+ + "\u0000\u0000\u00b0\u03b5\u0001\u0000\u0000\u0000\u00b2\u03ca\u0001\u0000"+ + "\u0000\u0000\u00b4\u03cc\u0001\u0000\u0000\u0000\u00b6\u03d1\u0001\u0000"+ + "\u0000\u0000\u00b8\u03e6\u0001\u0000\u0000\u0000\u00ba\u03e8\u0001\u0000"+ + "\u0000\u0000\u00bc\u03f0\u0001\u0000\u0000\u0000\u00be\u03f2\u0001\u0000"+ + "\u0000\u0000\u00c0\u03f6\u0001\u0000\u0000\u0000\u00c2\u03fa\u0001\u0000"+ + "\u0000\u0000\u00c4\u03fe\u0001\u0000\u0000\u0000\u00c6\u0403\u0001\u0000"+ + "\u0000\u0000\u00c8\u0408\u0001\u0000\u0000\u0000\u00ca\u040c\u0001\u0000"+ + "\u0000\u0000\u00cc\u0410\u0001\u0000\u0000\u0000\u00ce\u0414\u0001\u0000"+ + "\u0000\u0000\u00d0\u0419\u0001\u0000\u0000\u0000\u00d2\u041d\u0001\u0000"+ + "\u0000\u0000\u00d4\u0421\u0001\u0000\u0000\u0000\u00d6\u0425\u0001\u0000"+ + "\u0000\u0000\u00d8\u0429\u0001\u0000\u0000\u0000\u00da\u042d\u0001\u0000"+ + "\u0000\u0000\u00dc\u0439\u0001\u0000\u0000\u0000\u00de\u043c\u0001\u0000"+ + "\u0000\u0000\u00e0\u0440\u0001\u0000\u0000\u0000\u00e2\u0444\u0001\u0000"+ + "\u0000\u0000\u00e4\u0448\u0001\u0000\u0000\u0000\u00e6\u044c\u0001\u0000"+ + "\u0000\u0000\u00e8\u0450\u0001\u0000\u0000\u0000\u00ea\u0454\u0001\u0000"+ + "\u0000\u0000\u00ec\u0459\u0001\u0000\u0000\u0000\u00ee\u045d\u0001\u0000"+ + "\u0000\u0000\u00f0\u0461\u0001\u0000\u0000\u0000\u00f2\u0466\u0001\u0000"+ + "\u0000\u0000\u00f4\u046f\u0001\u0000\u0000\u0000\u00f6\u0484\u0001\u0000"+ + "\u0000\u0000\u00f8\u0488\u0001\u0000\u0000\u0000\u00fa\u048c\u0001\u0000"+ + "\u0000\u0000\u00fc\u0490\u0001\u0000\u0000\u0000\u00fe\u0494\u0001\u0000"+ + "\u0000\u0000\u0100\u0498\u0001\u0000\u0000\u0000\u0102\u049d\u0001\u0000"+ + "\u0000\u0000\u0104\u04a1\u0001\u0000\u0000\u0000\u0106\u04a5\u0001\u0000"+ + "\u0000\u0000\u0108\u04a9\u0001\u0000\u0000\u0000\u010a\u04ae\u0001\u0000"+ + "\u0000\u0000\u010c\u04b3\u0001\u0000\u0000\u0000\u010e\u04b6\u0001\u0000"+ + "\u0000\u0000\u0110\u04ba\u0001\u0000\u0000\u0000\u0112\u04be\u0001\u0000"+ + "\u0000\u0000\u0114\u04c2\u0001\u0000\u0000\u0000\u0116\u04c6\u0001\u0000"+ + "\u0000\u0000\u0118\u04cb\u0001\u0000\u0000\u0000\u011a\u04d0\u0001\u0000"+ + "\u0000\u0000\u011c\u04d5\u0001\u0000\u0000\u0000\u011e\u04dc\u0001\u0000"+ + "\u0000\u0000\u0120\u04e5\u0001\u0000\u0000\u0000\u0122\u04ec\u0001\u0000"+ + "\u0000\u0000\u0124\u04f0\u0001\u0000\u0000\u0000\u0126\u04f4\u0001\u0000"+ + "\u0000\u0000\u0128\u04f8\u0001\u0000\u0000\u0000\u012a\u04fc\u0001\u0000"+ + "\u0000\u0000\u012c\u0502\u0001\u0000\u0000\u0000\u012e\u0506\u0001\u0000"+ + "\u0000\u0000\u0130\u050a\u0001\u0000\u0000\u0000\u0132\u050e\u0001\u0000"+ + "\u0000\u0000\u0134\u0512\u0001\u0000\u0000\u0000\u0136\u0516\u0001\u0000"+ + "\u0000\u0000\u0138\u051a\u0001\u0000\u0000\u0000\u013a\u051f\u0001\u0000"+ + "\u0000\u0000\u013c\u0524\u0001\u0000\u0000\u0000\u013e\u0528\u0001\u0000"+ + "\u0000\u0000\u0140\u052c\u0001\u0000\u0000\u0000\u0142\u0530\u0001\u0000"+ + "\u0000\u0000\u0144\u0535\u0001\u0000\u0000\u0000\u0146\u0539\u0001\u0000"+ + "\u0000\u0000\u0148\u053e\u0001\u0000\u0000\u0000\u014a\u0543\u0001\u0000"+ + "\u0000\u0000\u014c\u0547\u0001\u0000\u0000\u0000\u014e\u054b\u0001\u0000"+ + "\u0000\u0000\u0150\u054f\u0001\u0000\u0000\u0000\u0152\u0553\u0001\u0000"+ + "\u0000\u0000\u0154\u0557\u0001\u0000\u0000\u0000\u0156\u055c\u0001\u0000"+ + "\u0000\u0000\u0158\u0561\u0001\u0000\u0000\u0000\u015a\u0565\u0001\u0000"+ + "\u0000\u0000\u015c\u0569\u0001\u0000\u0000\u0000\u015e\u056d\u0001\u0000"+ + "\u0000\u0000\u0160\u0572\u0001\u0000\u0000\u0000\u0162\u057b\u0001\u0000"+ + "\u0000\u0000\u0164\u057f\u0001\u0000\u0000\u0000\u0166\u0583\u0001\u0000"+ + "\u0000\u0000\u0168\u0587\u0001\u0000\u0000\u0000\u016a\u058b\u0001\u0000"+ + "\u0000\u0000\u016c\u0590\u0001\u0000\u0000\u0000\u016e\u0594\u0001\u0000"+ + "\u0000\u0000\u0170\u0598\u0001\u0000\u0000\u0000\u0172\u059c\u0001\u0000"+ + "\u0000\u0000\u0174\u05a1\u0001\u0000\u0000\u0000\u0176\u05a5\u0001\u0000"+ + "\u0000\u0000\u0178\u05a9\u0001\u0000\u0000\u0000\u017a\u05ad\u0001\u0000"+ + "\u0000\u0000\u017c\u05b1\u0001\u0000\u0000\u0000\u017e\u05b5\u0001\u0000"+ + "\u0000\u0000\u0180\u05bb\u0001\u0000\u0000\u0000\u0182\u05bf\u0001\u0000"+ + "\u0000\u0000\u0184\u05c3\u0001\u0000\u0000\u0000\u0186\u05c7\u0001\u0000"+ + "\u0000\u0000\u0188\u05cb\u0001\u0000\u0000\u0000\u018a\u05cf\u0001\u0000"+ + "\u0000\u0000\u018c\u05d3\u0001\u0000\u0000\u0000\u018e\u05d8\u0001\u0000"+ + "\u0000\u0000\u0190\u05dc\u0001\u0000\u0000\u0000\u0192\u05e0\u0001\u0000"+ + "\u0000\u0000\u0194\u05e6\u0001\u0000\u0000\u0000\u0196\u05ef\u0001\u0000"+ + "\u0000\u0000\u0198\u05f3\u0001\u0000\u0000\u0000\u019a\u05f7\u0001\u0000"+ + "\u0000\u0000\u019c\u05fb\u0001\u0000\u0000\u0000\u019e\u05ff\u0001\u0000"+ + "\u0000\u0000\u01a0\u0603\u0001\u0000\u0000\u0000\u01a2\u0607\u0001\u0000"+ + "\u0000\u0000\u01a4\u060b\u0001\u0000\u0000\u0000\u01a6\u060f\u0001\u0000"+ + "\u0000\u0000\u01a8\u0614\u0001\u0000\u0000\u0000\u01aa\u061a\u0001\u0000"+ + "\u0000\u0000\u01ac\u0620\u0001\u0000\u0000\u0000\u01ae\u0624\u0001\u0000"+ + "\u0000\u0000\u01b0\u0628\u0001\u0000\u0000\u0000\u01b2\u062c\u0001\u0000"+ + "\u0000\u0000\u01b4\u0632\u0001\u0000\u0000\u0000\u01b6\u0638\u0001\u0000"+ + "\u0000\u0000\u01b8\u063c\u0001\u0000\u0000\u0000\u01ba\u0640\u0001\u0000"+ + "\u0000\u0000\u01bc\u0644\u0001\u0000\u0000\u0000\u01be\u064a\u0001\u0000"+ + "\u0000\u0000\u01c0\u0650\u0001\u0000\u0000\u0000\u01c2\u0656\u0001\u0000"+ "\u0000\u0000\u01c4\u01c5\u0007\u0000\u0000\u0000\u01c5\u01c6\u0007\u0001"+ "\u0000\u0000\u01c6\u01c7\u0007\u0002\u0000\u0000\u01c7\u01c8\u0007\u0002"+ "\u0000\u0000\u01c8\u01c9\u0007\u0003\u0000\u0000\u01c9\u01ca\u0007\u0004"+ @@ -1110,347 +1092,346 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u0000\u03aa\u00a5\u0001\u0000\u0000\u0000\u03ab\u03ac\u0005*\u0000\u0000"+ "\u03ac\u00a7\u0001\u0000\u0000\u0000\u03ad\u03ae\u0005/\u0000\u0000\u03ae"+ "\u00a9\u0001\u0000\u0000\u0000\u03af\u03b0\u0005%\u0000\u0000\u03b0\u00ab"+ - "\u0001\u0000\u0000\u0000\u03b1\u03b2\u0004N\b\u0000\u03b2\u03b3\u0005"+ - "{\u0000\u0000\u03b3\u00ad\u0001\u0000\u0000\u0000\u03b4\u03b5\u0004O\t"+ - "\u0000\u03b5\u03b6\u0005}\u0000\u0000\u03b6\u00af\u0001\u0000\u0000\u0000"+ - "\u03b7\u03b8\u0003.\u000f\u0000\u03b8\u03b9\u0001\u0000\u0000\u0000\u03b9"+ - "\u03ba\u0006P\r\u0000\u03ba\u00b1\u0001\u0000\u0000\u0000\u03bb\u03be"+ - "\u0003\u008c>\u0000\u03bc\u03bf\u0003L\u001e\u0000\u03bd\u03bf\u0003Z"+ - "%\u0000\u03be\u03bc\u0001\u0000\u0000\u0000\u03be\u03bd\u0001\u0000\u0000"+ - "\u0000\u03bf\u03c3\u0001\u0000\u0000\u0000\u03c0\u03c2\u0003\\&\u0000"+ - "\u03c1\u03c0\u0001\u0000\u0000\u0000\u03c2\u03c5\u0001\u0000\u0000\u0000"+ - "\u03c3\u03c1\u0001\u0000\u0000\u0000\u03c3\u03c4\u0001\u0000\u0000\u0000"+ - "\u03c4\u03cd\u0001\u0000\u0000\u0000\u03c5\u03c3\u0001\u0000\u0000\u0000"+ - "\u03c6\u03c8\u0003\u008c>\u0000\u03c7\u03c9\u0003J\u001d\u0000\u03c8\u03c7"+ - "\u0001\u0000\u0000\u0000\u03c9\u03ca\u0001\u0000\u0000\u0000\u03ca\u03c8"+ - "\u0001\u0000\u0000\u0000\u03ca\u03cb\u0001\u0000\u0000\u0000\u03cb\u03cd"+ - "\u0001\u0000\u0000\u0000\u03cc\u03bb\u0001\u0000\u0000\u0000\u03cc\u03c6"+ - "\u0001\u0000\u0000\u0000\u03cd\u00b3\u0001\u0000\u0000\u0000\u03ce\u03cf"+ - "\u0005[\u0000\u0000\u03cf\u03d0\u0001\u0000\u0000\u0000\u03d0\u03d1\u0006"+ - "R\u0000\u0000\u03d1\u03d2\u0006R\u0000\u0000\u03d2\u00b5\u0001\u0000\u0000"+ - "\u0000\u03d3\u03d4\u0005]\u0000\u0000\u03d4\u03d5\u0001\u0000\u0000\u0000"+ - "\u03d5\u03d6\u0006S\f\u0000\u03d6\u03d7\u0006S\f\u0000\u03d7\u00b7\u0001"+ - "\u0000\u0000\u0000\u03d8\u03dc\u0003L\u001e\u0000\u03d9\u03db\u0003\\"+ - "&\u0000\u03da\u03d9\u0001\u0000\u0000\u0000\u03db\u03de\u0001\u0000\u0000"+ - "\u0000\u03dc\u03da\u0001\u0000\u0000\u0000\u03dc\u03dd\u0001\u0000\u0000"+ - "\u0000\u03dd\u03e9\u0001\u0000\u0000\u0000\u03de\u03dc\u0001\u0000\u0000"+ - "\u0000\u03df\u03e2\u0003Z%\u0000\u03e0\u03e2\u0003T\"\u0000\u03e1\u03df"+ - "\u0001\u0000\u0000\u0000\u03e1\u03e0\u0001\u0000\u0000\u0000\u03e2\u03e4"+ - "\u0001\u0000\u0000\u0000\u03e3\u03e5\u0003\\&\u0000\u03e4\u03e3\u0001"+ - "\u0000\u0000\u0000\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6\u03e4\u0001"+ - "\u0000\u0000\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7\u03e9\u0001"+ - "\u0000\u0000\u0000\u03e8\u03d8\u0001\u0000\u0000\u0000\u03e8\u03e1\u0001"+ - "\u0000\u0000\u0000\u03e9\u00b9\u0001\u0000\u0000\u0000\u03ea\u03ec\u0003"+ - "V#\u0000\u03eb\u03ed\u0003X$\u0000\u03ec\u03eb\u0001\u0000\u0000\u0000"+ - "\u03ed\u03ee\u0001\u0000\u0000\u0000\u03ee\u03ec\u0001\u0000\u0000\u0000"+ - "\u03ee\u03ef\u0001\u0000\u0000\u0000\u03ef\u03f0\u0001\u0000\u0000\u0000"+ - "\u03f0\u03f1\u0003V#\u0000\u03f1\u00bb\u0001\u0000\u0000\u0000\u03f2\u03f3"+ - "\u0003\u00baU\u0000\u03f3\u00bd\u0001\u0000\u0000\u0000\u03f4\u03f5\u0003"+ - "B\u0019\u0000\u03f5\u03f6\u0001\u0000\u0000\u0000\u03f6\u03f7\u0006W\u000b"+ - "\u0000\u03f7\u00bf\u0001\u0000\u0000\u0000\u03f8\u03f9\u0003D\u001a\u0000"+ - "\u03f9\u03fa\u0001\u0000\u0000\u0000\u03fa\u03fb\u0006X\u000b\u0000\u03fb"+ - "\u00c1\u0001\u0000\u0000\u0000\u03fc\u03fd\u0003F\u001b\u0000\u03fd\u03fe"+ - "\u0001\u0000\u0000\u0000\u03fe\u03ff\u0006Y\u000b\u0000\u03ff\u00c3\u0001"+ - "\u0000\u0000\u0000\u0400\u0401\u0003\u00b4R\u0000\u0401\u0402\u0001\u0000"+ - "\u0000\u0000\u0402\u0403\u0006Z\u000e\u0000\u0403\u0404\u0006Z\u000f\u0000"+ - "\u0404\u00c5\u0001\u0000\u0000\u0000\u0405\u0406\u0003H\u001c\u0000\u0406"+ - "\u0407\u0001\u0000\u0000\u0000\u0407\u0408\u0006[\u0010\u0000\u0408\u0409"+ - "\u0006[\f\u0000\u0409\u00c7\u0001\u0000\u0000\u0000\u040a\u040b\u0003"+ - "F\u001b\u0000\u040b\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006\\"+ - "\u000b\u0000\u040d\u00c9\u0001\u0000\u0000\u0000\u040e\u040f\u0003B\u0019"+ - "\u0000\u040f\u0410\u0001\u0000\u0000\u0000\u0410\u0411\u0006]\u000b\u0000"+ - "\u0411\u00cb\u0001\u0000\u0000\u0000\u0412\u0413\u0003D\u001a\u0000\u0413"+ - "\u0414\u0001\u0000\u0000\u0000\u0414\u0415\u0006^\u000b\u0000\u0415\u00cd"+ - "\u0001\u0000\u0000\u0000\u0416\u0417\u0003H\u001c\u0000\u0417\u0418\u0001"+ - "\u0000\u0000\u0000\u0418\u0419\u0006_\u0010\u0000\u0419\u041a\u0006_\f"+ - "\u0000\u041a\u00cf\u0001\u0000\u0000\u0000\u041b\u041c\u0003\u00b4R\u0000"+ - "\u041c\u041d\u0001\u0000\u0000\u0000\u041d\u041e\u0006`\u000e\u0000\u041e"+ - "\u00d1\u0001\u0000\u0000\u0000\u041f\u0420\u0003\u00b6S\u0000\u0420\u0421"+ - "\u0001\u0000\u0000\u0000\u0421\u0422\u0006a\u0011\u0000\u0422\u00d3\u0001"+ - "\u0000\u0000\u0000\u0423\u0424\u0003n/\u0000\u0424\u0425\u0001\u0000\u0000"+ - "\u0000\u0425\u0426\u0006b\u0012\u0000\u0426\u00d5\u0001\u0000\u0000\u0000"+ - "\u0427\u0428\u0003p0\u0000\u0428\u0429\u0001\u0000\u0000\u0000\u0429\u042a"+ - "\u0006c\u0013\u0000\u042a\u00d7\u0001\u0000\u0000\u0000\u042b\u042c\u0003"+ - "j-\u0000\u042c\u042d\u0001\u0000\u0000\u0000\u042d\u042e\u0006d\u0014"+ - "\u0000\u042e\u00d9\u0001\u0000\u0000\u0000\u042f\u0430\u0007\u0010\u0000"+ - "\u0000\u0430\u0431\u0007\u0003\u0000\u0000\u0431\u0432\u0007\u0005\u0000"+ - "\u0000\u0432\u0433\u0007\f\u0000\u0000\u0433\u0434\u0007\u0000\u0000\u0000"+ - "\u0434\u0435\u0007\f\u0000\u0000\u0435\u0436\u0007\u0005\u0000\u0000\u0436"+ - "\u0437\u0007\f\u0000\u0000\u0437\u00db\u0001\u0000\u0000\u0000\u0438\u043c"+ - "\b!\u0000\u0000\u0439\u043a\u0005/\u0000\u0000\u043a\u043c\b\"\u0000\u0000"+ - "\u043b\u0438\u0001\u0000\u0000\u0000\u043b\u0439\u0001\u0000\u0000\u0000"+ - "\u043c\u00dd\u0001\u0000\u0000\u0000\u043d\u043f\u0003\u00dcf\u0000\u043e"+ - "\u043d\u0001\u0000\u0000\u0000\u043f\u0440\u0001\u0000\u0000\u0000\u0440"+ - "\u043e\u0001\u0000\u0000\u0000\u0440\u0441\u0001\u0000\u0000\u0000\u0441"+ - "\u00df\u0001\u0000\u0000\u0000\u0442\u0443\u0003\u00deg\u0000\u0443\u0444"+ - "\u0001\u0000\u0000\u0000\u0444\u0445\u0006h\u0015\u0000\u0445\u00e1\u0001"+ - "\u0000\u0000\u0000\u0446\u0447\u0003^\'\u0000\u0447\u0448\u0001\u0000"+ - "\u0000\u0000\u0448\u0449\u0006i\u0016\u0000\u0449\u00e3\u0001\u0000\u0000"+ - "\u0000\u044a\u044b\u0003B\u0019\u0000\u044b\u044c\u0001\u0000\u0000\u0000"+ - "\u044c\u044d\u0006j\u000b\u0000\u044d\u00e5\u0001\u0000\u0000\u0000\u044e"+ - "\u044f\u0003D\u001a\u0000\u044f\u0450\u0001\u0000\u0000\u0000\u0450\u0451"+ - "\u0006k\u000b\u0000\u0451\u00e7\u0001\u0000\u0000\u0000\u0452\u0453\u0003"+ - "F\u001b\u0000\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455\u0006l\u000b"+ - "\u0000\u0455\u00e9\u0001\u0000\u0000\u0000\u0456\u0457\u0003H\u001c\u0000"+ - "\u0457\u0458\u0001\u0000\u0000\u0000\u0458\u0459\u0006m\u0010\u0000\u0459"+ - "\u045a\u0006m\f\u0000\u045a\u00eb\u0001\u0000\u0000\u0000\u045b\u045c"+ - "\u0003t2\u0000\u045c\u045d\u0001\u0000\u0000\u0000\u045d\u045e\u0006n"+ - "\u0017\u0000\u045e\u00ed\u0001\u0000\u0000\u0000\u045f\u0460\u0003p0\u0000"+ - "\u0460\u0461\u0001\u0000\u0000\u0000\u0461\u0462\u0006o\u0013\u0000\u0462"+ - "\u00ef\u0001\u0000\u0000\u0000\u0463\u0464\u0004p\n\u0000\u0464\u0465"+ - "\u0003\u008c>\u0000\u0465\u0466\u0001\u0000\u0000\u0000\u0466\u0467\u0006"+ - "p\u0018\u0000\u0467\u00f1\u0001\u0000\u0000\u0000\u0468\u0469\u0004q\u000b"+ - "\u0000\u0469\u046a\u0003\u00b2Q\u0000\u046a\u046b\u0001\u0000\u0000\u0000"+ - "\u046b\u046c\u0006q\u0019\u0000\u046c\u00f3\u0001\u0000\u0000\u0000\u046d"+ - "\u0472\u0003L\u001e\u0000\u046e\u0472\u0003J\u001d\u0000\u046f\u0472\u0003"+ - "Z%\u0000\u0470\u0472\u0003\u00a6K\u0000\u0471\u046d\u0001\u0000\u0000"+ - "\u0000\u0471\u046e\u0001\u0000\u0000\u0000\u0471\u046f\u0001\u0000\u0000"+ - "\u0000\u0471\u0470\u0001\u0000\u0000\u0000\u0472\u00f5\u0001\u0000\u0000"+ - "\u0000\u0473\u0476\u0003L\u001e\u0000\u0474\u0476\u0003\u00a6K\u0000\u0475"+ - "\u0473\u0001\u0000\u0000\u0000\u0475\u0474\u0001\u0000\u0000\u0000\u0476"+ - "\u047a\u0001\u0000\u0000\u0000\u0477\u0479\u0003\u00f4r\u0000\u0478\u0477"+ - "\u0001\u0000\u0000\u0000\u0479\u047c\u0001\u0000\u0000\u0000\u047a\u0478"+ - "\u0001\u0000\u0000\u0000\u047a\u047b\u0001\u0000\u0000\u0000\u047b\u0487"+ - "\u0001\u0000\u0000\u0000\u047c\u047a\u0001\u0000\u0000\u0000\u047d\u0480"+ - "\u0003Z%\u0000\u047e\u0480\u0003T\"\u0000\u047f\u047d\u0001\u0000\u0000"+ - "\u0000\u047f\u047e\u0001\u0000\u0000\u0000\u0480\u0482\u0001\u0000\u0000"+ - "\u0000\u0481\u0483\u0003\u00f4r\u0000\u0482\u0481\u0001\u0000\u0000\u0000"+ - "\u0483\u0484\u0001\u0000\u0000\u0000\u0484\u0482\u0001\u0000\u0000\u0000"+ - "\u0484\u0485\u0001\u0000\u0000\u0000\u0485\u0487\u0001\u0000\u0000\u0000"+ - "\u0486\u0475\u0001\u0000\u0000\u0000\u0486\u047f\u0001\u0000\u0000\u0000"+ - "\u0487\u00f7\u0001\u0000\u0000\u0000\u0488\u048b\u0003\u00f6s\u0000\u0489"+ - "\u048b\u0003\u00baU\u0000\u048a\u0488\u0001\u0000\u0000\u0000\u048a\u0489"+ - "\u0001\u0000\u0000\u0000\u048b\u048c\u0001\u0000\u0000\u0000\u048c\u048a"+ - "\u0001\u0000\u0000\u0000\u048c\u048d\u0001\u0000\u0000\u0000\u048d\u00f9"+ - "\u0001\u0000\u0000\u0000\u048e\u048f\u0003B\u0019\u0000\u048f\u0490\u0001"+ - "\u0000\u0000\u0000\u0490\u0491\u0006u\u000b\u0000\u0491\u00fb\u0001\u0000"+ - "\u0000\u0000\u0492\u0493\u0003D\u001a\u0000\u0493\u0494\u0001\u0000\u0000"+ - "\u0000\u0494\u0495\u0006v\u000b\u0000\u0495\u00fd\u0001\u0000\u0000\u0000"+ - "\u0496\u0497\u0003F\u001b\u0000\u0497\u0498\u0001\u0000\u0000\u0000\u0498"+ - "\u0499\u0006w\u000b\u0000\u0499\u00ff\u0001\u0000\u0000\u0000\u049a\u049b"+ - "\u0003H\u001c\u0000\u049b\u049c\u0001\u0000\u0000\u0000\u049c\u049d\u0006"+ - "x\u0010\u0000\u049d\u049e\u0006x\f\u0000\u049e\u0101\u0001\u0000\u0000"+ - "\u0000\u049f\u04a0\u0003j-\u0000\u04a0\u04a1\u0001\u0000\u0000\u0000\u04a1"+ - "\u04a2\u0006y\u0014\u0000\u04a2\u0103\u0001\u0000\u0000\u0000\u04a3\u04a4"+ - "\u0003p0\u0000\u04a4\u04a5\u0001\u0000\u0000\u0000\u04a5\u04a6\u0006z"+ - "\u0013\u0000\u04a6\u0105\u0001\u0000\u0000\u0000\u04a7\u04a8\u0003t2\u0000"+ - "\u04a8\u04a9\u0001\u0000\u0000\u0000\u04a9\u04aa\u0006{\u0017\u0000\u04aa"+ - "\u0107\u0001\u0000\u0000\u0000\u04ab\u04ac\u0004|\f\u0000\u04ac\u04ad"+ - "\u0003\u008c>\u0000\u04ad\u04ae\u0001\u0000\u0000\u0000\u04ae\u04af\u0006"+ - "|\u0018\u0000\u04af\u0109\u0001\u0000\u0000\u0000\u04b0\u04b1\u0004}\r"+ - "\u0000\u04b1\u04b2\u0003\u00b2Q\u0000\u04b2\u04b3\u0001\u0000\u0000\u0000"+ - "\u04b3\u04b4\u0006}\u0019\u0000\u04b4\u010b\u0001\u0000\u0000\u0000\u04b5"+ - "\u04b6\u0007\f\u0000\u0000\u04b6\u04b7\u0007\u0002\u0000\u0000\u04b7\u010d"+ - "\u0001\u0000\u0000\u0000\u04b8\u04b9\u0003\u00f8t\u0000\u04b9\u04ba\u0001"+ - "\u0000\u0000\u0000\u04ba\u04bb\u0006\u007f\u001a\u0000\u04bb\u010f\u0001"+ - "\u0000\u0000\u0000\u04bc\u04bd\u0003B\u0019\u0000\u04bd\u04be\u0001\u0000"+ - "\u0000\u0000\u04be\u04bf\u0006\u0080\u000b\u0000\u04bf\u0111\u0001\u0000"+ - "\u0000\u0000\u04c0\u04c1\u0003D\u001a\u0000\u04c1\u04c2\u0001\u0000\u0000"+ - "\u0000\u04c2\u04c3\u0006\u0081\u000b\u0000\u04c3\u0113\u0001\u0000\u0000"+ - "\u0000\u04c4\u04c5\u0003F\u001b\u0000\u04c5\u04c6\u0001\u0000\u0000\u0000"+ - "\u04c6\u04c7\u0006\u0082\u000b\u0000\u04c7\u0115\u0001\u0000\u0000\u0000"+ - "\u04c8\u04c9\u0003H\u001c\u0000\u04c9\u04ca\u0001\u0000\u0000\u0000\u04ca"+ - "\u04cb\u0006\u0083\u0010\u0000\u04cb\u04cc\u0006\u0083\f\u0000\u04cc\u0117"+ - "\u0001\u0000\u0000\u0000\u04cd\u04ce\u0003\u00b4R\u0000\u04ce\u04cf\u0001"+ - "\u0000\u0000\u0000\u04cf\u04d0\u0006\u0084\u000e\u0000\u04d0\u04d1\u0006"+ - "\u0084\u001b\u0000\u04d1\u0119\u0001\u0000\u0000\u0000\u04d2\u04d3\u0007"+ - "\u0007\u0000\u0000\u04d3\u04d4\u0007\t\u0000\u0000\u04d4\u04d5\u0001\u0000"+ - "\u0000\u0000\u04d5\u04d6\u0006\u0085\u001c\u0000\u04d6\u011b\u0001\u0000"+ - "\u0000\u0000\u04d7\u04d8\u0007\u0013\u0000\u0000\u04d8\u04d9\u0007\u0001"+ - "\u0000\u0000\u04d9\u04da\u0007\u0005\u0000\u0000\u04da\u04db\u0007\n\u0000"+ - "\u0000\u04db\u04dc\u0001\u0000\u0000\u0000\u04dc\u04dd\u0006\u0086\u001c"+ - "\u0000\u04dd\u011d\u0001\u0000\u0000\u0000\u04de\u04df\b#\u0000\u0000"+ - "\u04df\u011f\u0001\u0000\u0000\u0000\u04e0\u04e2\u0003\u011e\u0087\u0000"+ - "\u04e1\u04e0\u0001\u0000\u0000\u0000\u04e2\u04e3\u0001\u0000\u0000\u0000"+ - "\u04e3\u04e1\u0001\u0000\u0000\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000"+ - "\u04e4\u04e5\u0001\u0000\u0000\u0000\u04e5\u04e6\u0003n/\u0000\u04e6\u04e8"+ - "\u0001\u0000\u0000\u0000\u04e7\u04e1\u0001\u0000\u0000\u0000\u04e7\u04e8"+ - "\u0001\u0000\u0000\u0000\u04e8\u04ea\u0001\u0000\u0000\u0000\u04e9\u04eb"+ - "\u0003\u011e\u0087\u0000\u04ea\u04e9\u0001\u0000\u0000\u0000\u04eb\u04ec"+ - "\u0001\u0000\u0000\u0000\u04ec\u04ea\u0001\u0000\u0000\u0000\u04ec\u04ed"+ - "\u0001\u0000\u0000\u0000\u04ed\u0121\u0001\u0000\u0000\u0000\u04ee\u04ef"+ - "\u0003\u0120\u0088\u0000\u04ef\u04f0\u0001\u0000\u0000\u0000\u04f0\u04f1"+ - "\u0006\u0089\u001d\u0000\u04f1\u0123\u0001\u0000\u0000\u0000\u04f2\u04f3"+ - "\u0003B\u0019\u0000\u04f3\u04f4\u0001\u0000\u0000\u0000\u04f4\u04f5\u0006"+ - "\u008a\u000b\u0000\u04f5\u0125\u0001\u0000\u0000\u0000\u04f6\u04f7\u0003"+ - "D\u001a\u0000\u04f7\u04f8\u0001\u0000\u0000\u0000\u04f8\u04f9\u0006\u008b"+ - "\u000b\u0000\u04f9\u0127\u0001\u0000\u0000\u0000\u04fa\u04fb\u0003F\u001b"+ - "\u0000\u04fb\u04fc\u0001\u0000\u0000\u0000\u04fc\u04fd\u0006\u008c\u000b"+ - "\u0000\u04fd\u0129\u0001\u0000\u0000\u0000\u04fe\u04ff\u0003H\u001c\u0000"+ - "\u04ff\u0500\u0001\u0000\u0000\u0000\u0500\u0501\u0006\u008d\u0010\u0000"+ - "\u0501\u0502\u0006\u008d\f\u0000\u0502\u0503\u0006\u008d\f\u0000\u0503"+ - "\u012b\u0001\u0000\u0000\u0000\u0504\u0505\u0003j-\u0000\u0505\u0506\u0001"+ - "\u0000\u0000\u0000\u0506\u0507\u0006\u008e\u0014\u0000\u0507\u012d\u0001"+ - "\u0000\u0000\u0000\u0508\u0509\u0003p0\u0000\u0509\u050a\u0001\u0000\u0000"+ - "\u0000\u050a\u050b\u0006\u008f\u0013\u0000\u050b\u012f\u0001\u0000\u0000"+ - "\u0000\u050c\u050d\u0003t2\u0000\u050d\u050e\u0001\u0000\u0000\u0000\u050e"+ - "\u050f\u0006\u0090\u0017\u0000\u050f\u0131\u0001\u0000\u0000\u0000\u0510"+ - "\u0511\u0003\u011c\u0086\u0000\u0511\u0512\u0001\u0000\u0000\u0000\u0512"+ - "\u0513\u0006\u0091\u001e\u0000\u0513\u0133\u0001\u0000\u0000\u0000\u0514"+ - "\u0515\u0003\u00f8t\u0000\u0515\u0516\u0001\u0000\u0000\u0000\u0516\u0517"+ - "\u0006\u0092\u001a\u0000\u0517\u0135\u0001\u0000\u0000\u0000\u0518\u0519"+ - "\u0003\u00bcV\u0000\u0519\u051a\u0001\u0000\u0000\u0000\u051a\u051b\u0006"+ - "\u0093\u001f\u0000\u051b\u0137\u0001\u0000\u0000\u0000\u051c\u051d\u0004"+ - "\u0094\u000e\u0000\u051d\u051e\u0003\u008c>\u0000\u051e\u051f\u0001\u0000"+ - "\u0000\u0000\u051f\u0520\u0006\u0094\u0018\u0000\u0520\u0139\u0001\u0000"+ - "\u0000\u0000\u0521\u0522\u0004\u0095\u000f\u0000\u0522\u0523\u0003\u00b2"+ - "Q\u0000\u0523\u0524\u0001\u0000\u0000\u0000\u0524\u0525\u0006\u0095\u0019"+ - "\u0000\u0525\u013b\u0001\u0000\u0000\u0000\u0526\u0527\u0003B\u0019\u0000"+ - "\u0527\u0528\u0001\u0000\u0000\u0000\u0528\u0529\u0006\u0096\u000b\u0000"+ - "\u0529\u013d\u0001\u0000\u0000\u0000\u052a\u052b\u0003D\u001a\u0000\u052b"+ - "\u052c\u0001\u0000\u0000\u0000\u052c\u052d\u0006\u0097\u000b\u0000\u052d"+ - "\u013f\u0001\u0000\u0000\u0000\u052e\u052f\u0003F\u001b\u0000\u052f\u0530"+ - "\u0001\u0000\u0000\u0000\u0530\u0531\u0006\u0098\u000b\u0000\u0531\u0141"+ - "\u0001\u0000\u0000\u0000\u0532\u0533\u0003H\u001c\u0000\u0533\u0534\u0001"+ - "\u0000\u0000\u0000\u0534\u0535\u0006\u0099\u0010\u0000\u0535\u0536\u0006"+ - "\u0099\f\u0000\u0536\u0143\u0001\u0000\u0000\u0000\u0537\u0538\u0003t"+ - "2\u0000\u0538\u0539\u0001\u0000\u0000\u0000\u0539\u053a\u0006\u009a\u0017"+ - "\u0000\u053a\u0145\u0001\u0000\u0000\u0000\u053b\u053c\u0004\u009b\u0010"+ - "\u0000\u053c\u053d\u0003\u008c>\u0000\u053d\u053e\u0001\u0000\u0000\u0000"+ - "\u053e\u053f\u0006\u009b\u0018\u0000\u053f\u0147\u0001\u0000\u0000\u0000"+ - "\u0540\u0541\u0004\u009c\u0011\u0000\u0541\u0542\u0003\u00b2Q\u0000\u0542"+ - "\u0543\u0001\u0000\u0000\u0000\u0543\u0544\u0006\u009c\u0019\u0000\u0544"+ - "\u0149\u0001\u0000\u0000\u0000\u0545\u0546\u0003\u00bcV\u0000\u0546\u0547"+ - "\u0001\u0000\u0000\u0000\u0547\u0548\u0006\u009d\u001f\u0000\u0548\u014b"+ - "\u0001\u0000\u0000\u0000\u0549\u054a\u0003\u00b8T\u0000\u054a\u054b\u0001"+ - "\u0000\u0000\u0000\u054b\u054c\u0006\u009e \u0000\u054c\u014d\u0001\u0000"+ - "\u0000\u0000\u054d\u054e\u0003B\u0019\u0000\u054e\u054f\u0001\u0000\u0000"+ - "\u0000\u054f\u0550\u0006\u009f\u000b\u0000\u0550\u014f\u0001\u0000\u0000"+ - "\u0000\u0551\u0552\u0003D\u001a\u0000\u0552\u0553\u0001\u0000\u0000\u0000"+ - "\u0553\u0554\u0006\u00a0\u000b\u0000\u0554\u0151\u0001\u0000\u0000\u0000"+ - "\u0555\u0556\u0003F\u001b\u0000\u0556\u0557\u0001\u0000\u0000\u0000\u0557"+ - "\u0558\u0006\u00a1\u000b\u0000\u0558\u0153\u0001\u0000\u0000\u0000\u0559"+ - "\u055a\u0003H\u001c\u0000\u055a\u055b\u0001\u0000\u0000\u0000\u055b\u055c"+ - "\u0006\u00a2\u0010\u0000\u055c\u055d\u0006\u00a2\f\u0000\u055d\u0155\u0001"+ - "\u0000\u0000\u0000\u055e\u055f\u0007\u0001\u0000\u0000\u055f\u0560\u0007"+ - "\t\u0000\u0000\u0560\u0561\u0007\u000f\u0000\u0000\u0561\u0562\u0007\u0007"+ - "\u0000\u0000\u0562\u0157\u0001\u0000\u0000\u0000\u0563\u0564\u0003B\u0019"+ - "\u0000\u0564\u0565\u0001\u0000\u0000\u0000\u0565\u0566\u0006\u00a4\u000b"+ - "\u0000\u0566\u0159\u0001\u0000\u0000\u0000\u0567\u0568\u0003D\u001a\u0000"+ - "\u0568\u0569\u0001\u0000\u0000\u0000\u0569\u056a\u0006\u00a5\u000b\u0000"+ - "\u056a\u015b\u0001\u0000\u0000\u0000\u056b\u056c\u0003F\u001b\u0000\u056c"+ - "\u056d\u0001\u0000\u0000\u0000\u056d\u056e\u0006\u00a6\u000b\u0000\u056e"+ - "\u015d\u0001\u0000\u0000\u0000\u056f\u0570\u0003\u00b6S\u0000\u0570\u0571"+ - "\u0001\u0000\u0000\u0000\u0571\u0572\u0006\u00a7\u0011\u0000\u0572\u0573"+ - "\u0006\u00a7\f\u0000\u0573\u015f\u0001\u0000\u0000\u0000\u0574\u0575\u0003"+ - "n/\u0000\u0575\u0576\u0001\u0000\u0000\u0000\u0576\u0577\u0006\u00a8\u0012"+ - "\u0000\u0577\u0161\u0001\u0000\u0000\u0000\u0578\u057e\u0003T\"\u0000"+ - "\u0579\u057e\u0003J\u001d\u0000\u057a\u057e\u0003t2\u0000\u057b\u057e"+ - "\u0003L\u001e\u0000\u057c\u057e\u0003Z%\u0000\u057d\u0578\u0001\u0000"+ - "\u0000\u0000\u057d\u0579\u0001\u0000\u0000\u0000\u057d\u057a\u0001\u0000"+ - "\u0000\u0000\u057d\u057b\u0001\u0000\u0000\u0000\u057d\u057c\u0001\u0000"+ - "\u0000\u0000\u057e\u057f\u0001\u0000\u0000\u0000\u057f\u057d\u0001\u0000"+ - "\u0000\u0000\u057f\u0580\u0001\u0000\u0000\u0000\u0580\u0163\u0001\u0000"+ - "\u0000\u0000\u0581\u0582\u0003B\u0019\u0000\u0582\u0583\u0001\u0000\u0000"+ - "\u0000\u0583\u0584\u0006\u00aa\u000b\u0000\u0584\u0165\u0001\u0000\u0000"+ - "\u0000\u0585\u0586\u0003D\u001a\u0000\u0586\u0587\u0001\u0000\u0000\u0000"+ - "\u0587\u0588\u0006\u00ab\u000b\u0000\u0588\u0167\u0001\u0000\u0000\u0000"+ - "\u0589\u058a\u0003F\u001b\u0000\u058a\u058b\u0001\u0000\u0000\u0000\u058b"+ - "\u058c\u0006\u00ac\u000b\u0000\u058c\u0169\u0001\u0000\u0000\u0000\u058d"+ - "\u058e\u0003H\u001c\u0000\u058e\u058f\u0001\u0000\u0000\u0000\u058f\u0590"+ - "\u0006\u00ad\u0010\u0000\u0590\u0591\u0006\u00ad\f\u0000\u0591\u016b\u0001"+ - "\u0000\u0000\u0000\u0592\u0593\u0003n/\u0000\u0593\u0594\u0001\u0000\u0000"+ - "\u0000\u0594\u0595\u0006\u00ae\u0012\u0000\u0595\u016d\u0001\u0000\u0000"+ - "\u0000\u0596\u0597\u0003p0\u0000\u0597\u0598\u0001\u0000\u0000\u0000\u0598"+ - "\u0599\u0006\u00af\u0013\u0000\u0599\u016f\u0001\u0000\u0000\u0000\u059a"+ - "\u059b\u0003t2\u0000\u059b\u059c\u0001\u0000\u0000\u0000\u059c\u059d\u0006"+ - "\u00b0\u0017\u0000\u059d\u0171\u0001\u0000\u0000\u0000\u059e\u059f\u0003"+ - "\u011a\u0085\u0000\u059f\u05a0\u0001\u0000\u0000\u0000\u05a0\u05a1\u0006"+ - "\u00b1!\u0000\u05a1\u05a2\u0006\u00b1\"\u0000\u05a2\u0173\u0001\u0000"+ - "\u0000\u0000\u05a3\u05a4\u0003\u00deg\u0000\u05a4\u05a5\u0001\u0000\u0000"+ - "\u0000\u05a5\u05a6\u0006\u00b2\u0015\u0000\u05a6\u0175\u0001\u0000\u0000"+ - "\u0000\u05a7\u05a8\u0003^\'\u0000\u05a8\u05a9\u0001\u0000\u0000\u0000"+ - "\u05a9\u05aa\u0006\u00b3\u0016\u0000\u05aa\u0177\u0001\u0000\u0000\u0000"+ - "\u05ab\u05ac\u0003B\u0019\u0000\u05ac\u05ad\u0001\u0000\u0000\u0000\u05ad"+ - "\u05ae\u0006\u00b4\u000b\u0000\u05ae\u0179\u0001\u0000\u0000\u0000\u05af"+ - "\u05b0\u0003D\u001a\u0000\u05b0\u05b1\u0001\u0000\u0000\u0000\u05b1\u05b2"+ - "\u0006\u00b5\u000b\u0000\u05b2\u017b\u0001\u0000\u0000\u0000\u05b3\u05b4"+ - "\u0003F\u001b\u0000\u05b4\u05b5\u0001\u0000\u0000\u0000\u05b5\u05b6\u0006"+ - "\u00b6\u000b\u0000\u05b6\u017d\u0001\u0000\u0000\u0000\u05b7\u05b8\u0003"+ - "H\u001c\u0000\u05b8\u05b9\u0001\u0000\u0000\u0000\u05b9\u05ba\u0006\u00b7"+ - "\u0010\u0000\u05ba\u05bb\u0006\u00b7\f\u0000\u05bb\u05bc\u0006\u00b7\f"+ - "\u0000\u05bc\u017f\u0001\u0000\u0000\u0000\u05bd\u05be\u0003p0\u0000\u05be"+ - "\u05bf\u0001\u0000\u0000\u0000\u05bf\u05c0\u0006\u00b8\u0013\u0000\u05c0"+ - "\u0181\u0001\u0000\u0000\u0000\u05c1\u05c2\u0003t2\u0000\u05c2\u05c3\u0001"+ - "\u0000\u0000\u0000\u05c3\u05c4\u0006\u00b9\u0017\u0000\u05c4\u0183\u0001"+ - "\u0000\u0000\u0000\u05c5\u05c6\u0003\u00f8t\u0000\u05c6\u05c7\u0001\u0000"+ - "\u0000\u0000\u05c7\u05c8\u0006\u00ba\u001a\u0000\u05c8\u0185\u0001\u0000"+ - "\u0000\u0000\u05c9\u05ca\u0003B\u0019\u0000\u05ca\u05cb\u0001\u0000\u0000"+ - "\u0000\u05cb\u05cc\u0006\u00bb\u000b\u0000\u05cc\u0187\u0001\u0000\u0000"+ - "\u0000\u05cd\u05ce\u0003D\u001a\u0000\u05ce\u05cf\u0001\u0000\u0000\u0000"+ - "\u05cf\u05d0\u0006\u00bc\u000b\u0000\u05d0\u0189\u0001\u0000\u0000\u0000"+ - "\u05d1\u05d2\u0003F\u001b\u0000\u05d2\u05d3\u0001\u0000\u0000\u0000\u05d3"+ - "\u05d4\u0006\u00bd\u000b\u0000\u05d4\u018b\u0001\u0000\u0000\u0000\u05d5"+ - "\u05d6\u0003H\u001c\u0000\u05d6\u05d7\u0001\u0000\u0000\u0000\u05d7\u05d8"+ - "\u0006\u00be\u0010\u0000\u05d8\u05d9\u0006\u00be\f\u0000\u05d9\u018d\u0001"+ - "\u0000\u0000\u0000\u05da\u05db\u00036\u0013\u0000\u05db\u05dc\u0001\u0000"+ - "\u0000\u0000\u05dc\u05dd\u0006\u00bf#\u0000\u05dd\u018f\u0001\u0000\u0000"+ - "\u0000\u05de\u05df\u0003\u010c~\u0000\u05df\u05e0\u0001\u0000\u0000\u0000"+ - "\u05e0\u05e1\u0006\u00c0$\u0000\u05e1\u0191\u0001\u0000\u0000\u0000\u05e2"+ - "\u05e3\u0003\u011a\u0085\u0000\u05e3\u05e4\u0001\u0000\u0000\u0000\u05e4"+ - "\u05e5\u0006\u00c1!\u0000\u05e5\u05e6\u0006\u00c1\f\u0000\u05e6\u05e7"+ - "\u0006\u00c1\u0000\u0000\u05e7\u0193\u0001\u0000\u0000\u0000\u05e8\u05e9"+ - "\u0007\u0014\u0000\u0000\u05e9\u05ea\u0007\u0002\u0000\u0000\u05ea\u05eb"+ - "\u0007\u0001\u0000\u0000\u05eb\u05ec\u0007\t\u0000\u0000\u05ec\u05ed\u0007"+ - "\u0011\u0000\u0000\u05ed\u05ee\u0001\u0000\u0000\u0000\u05ee\u05ef\u0006"+ - "\u00c2\f\u0000\u05ef\u05f0\u0006\u00c2\u0000\u0000\u05f0\u0195\u0001\u0000"+ - "\u0000\u0000\u05f1\u05f2\u0003\u00deg\u0000\u05f2\u05f3\u0001\u0000\u0000"+ - "\u0000\u05f3\u05f4\u0006\u00c3\u0015\u0000\u05f4\u0197\u0001\u0000\u0000"+ - "\u0000\u05f5\u05f6\u0003^\'\u0000\u05f6\u05f7\u0001\u0000\u0000\u0000"+ - "\u05f7\u05f8\u0006\u00c4\u0016\u0000\u05f8\u0199\u0001\u0000\u0000\u0000"+ - "\u05f9\u05fa\u0003n/\u0000\u05fa\u05fb\u0001\u0000\u0000\u0000\u05fb\u05fc"+ - "\u0006\u00c5\u0012\u0000\u05fc\u019b\u0001\u0000\u0000\u0000\u05fd\u05fe"+ - "\u0003\u00b8T\u0000\u05fe\u05ff\u0001\u0000\u0000\u0000\u05ff\u0600\u0006"+ - "\u00c6 \u0000\u0600\u019d\u0001\u0000\u0000\u0000\u0601\u0602\u0003\u00bc"+ - "V\u0000\u0602\u0603\u0001\u0000\u0000\u0000\u0603\u0604\u0006\u00c7\u001f"+ - "\u0000\u0604\u019f\u0001\u0000\u0000\u0000\u0605\u0606\u0003B\u0019\u0000"+ - "\u0606\u0607\u0001\u0000\u0000\u0000\u0607\u0608\u0006\u00c8\u000b\u0000"+ - "\u0608\u01a1\u0001\u0000\u0000\u0000\u0609\u060a\u0003D\u001a\u0000\u060a"+ - "\u060b\u0001\u0000\u0000\u0000\u060b\u060c\u0006\u00c9\u000b\u0000\u060c"+ - "\u01a3\u0001\u0000\u0000\u0000\u060d\u060e\u0003F\u001b\u0000\u060e\u060f"+ - "\u0001\u0000\u0000\u0000\u060f\u0610\u0006\u00ca\u000b\u0000\u0610\u01a5"+ - "\u0001\u0000\u0000\u0000\u0611\u0612\u0003H\u001c\u0000\u0612\u0613\u0001"+ - "\u0000\u0000\u0000\u0613\u0614\u0006\u00cb\u0010\u0000\u0614\u0615\u0006"+ - "\u00cb\f\u0000\u0615\u01a7\u0001\u0000\u0000\u0000\u0616\u0617\u0003\u00de"+ - "g\u0000\u0617\u0618\u0001\u0000\u0000\u0000\u0618\u0619\u0006\u00cc\u0015"+ - "\u0000\u0619\u061a\u0006\u00cc\f\u0000\u061a\u061b\u0006\u00cc%\u0000"+ - "\u061b\u01a9\u0001\u0000\u0000\u0000\u061c\u061d\u0003^\'\u0000\u061d"+ - "\u061e\u0001\u0000\u0000\u0000\u061e\u061f\u0006\u00cd\u0016\u0000\u061f"+ - "\u0620\u0006\u00cd\f\u0000\u0620\u0621\u0006\u00cd%\u0000\u0621\u01ab"+ - "\u0001\u0000\u0000\u0000\u0622\u0623\u0003B\u0019\u0000\u0623\u0624\u0001"+ - "\u0000\u0000\u0000\u0624\u0625\u0006\u00ce\u000b\u0000\u0625\u01ad\u0001"+ - "\u0000\u0000\u0000\u0626\u0627\u0003D\u001a\u0000\u0627\u0628\u0001\u0000"+ - "\u0000\u0000\u0628\u0629\u0006\u00cf\u000b\u0000\u0629\u01af\u0001\u0000"+ - "\u0000\u0000\u062a\u062b\u0003F\u001b\u0000\u062b\u062c\u0001\u0000\u0000"+ - "\u0000\u062c\u062d\u0006\u00d0\u000b\u0000\u062d\u01b1\u0001\u0000\u0000"+ - "\u0000\u062e\u062f\u0003n/\u0000\u062f\u0630\u0001\u0000\u0000\u0000\u0630"+ - "\u0631\u0006\u00d1\u0012\u0000\u0631\u0632\u0006\u00d1\f\u0000\u0632\u0633"+ - "\u0006\u00d1\t\u0000\u0633\u01b3\u0001\u0000\u0000\u0000\u0634\u0635\u0003"+ - "p0\u0000\u0635\u0636\u0001\u0000\u0000\u0000\u0636\u0637\u0006\u00d2\u0013"+ - "\u0000\u0637\u0638\u0006\u00d2\f\u0000\u0638\u0639\u0006\u00d2\t\u0000"+ - "\u0639\u01b5\u0001\u0000\u0000\u0000\u063a\u063b\u0003B\u0019\u0000\u063b"+ - "\u063c\u0001\u0000\u0000\u0000\u063c\u063d\u0006\u00d3\u000b\u0000\u063d"+ - "\u01b7\u0001\u0000\u0000\u0000\u063e\u063f\u0003D\u001a\u0000\u063f\u0640"+ - "\u0001\u0000\u0000\u0000\u0640\u0641\u0006\u00d4\u000b\u0000\u0641\u01b9"+ - "\u0001\u0000\u0000\u0000\u0642\u0643\u0003F\u001b\u0000\u0643\u0644\u0001"+ - "\u0000\u0000\u0000\u0644\u0645\u0006\u00d5\u000b\u0000\u0645\u01bb\u0001"+ - "\u0000\u0000\u0000\u0646\u0647\u0003\u00bcV\u0000\u0647\u0648\u0001\u0000"+ - "\u0000\u0000\u0648\u0649\u0006\u00d6\f\u0000\u0649\u064a\u0006\u00d6\u0000"+ - "\u0000\u064a\u064b\u0006\u00d6\u001f\u0000\u064b\u01bd\u0001\u0000\u0000"+ - "\u0000\u064c\u064d\u0003\u00b8T\u0000\u064d\u064e\u0001\u0000\u0000\u0000"+ - "\u064e\u064f\u0006\u00d7\f\u0000\u064f\u0650\u0006\u00d7\u0000\u0000\u0650"+ - "\u0651\u0006\u00d7 \u0000\u0651\u01bf\u0001\u0000\u0000\u0000\u0652\u0653"+ - "\u0003d*\u0000\u0653\u0654\u0001\u0000\u0000\u0000\u0654\u0655\u0006\u00d8"+ - "\f\u0000\u0655\u0656\u0006\u00d8\u0000\u0000\u0656\u0657\u0006\u00d8&"+ - "\u0000\u0657\u01c1\u0001\u0000\u0000\u0000\u0658\u0659\u0003H\u001c\u0000"+ - "\u0659\u065a\u0001\u0000\u0000\u0000\u065a\u065b\u0006\u00d9\u0010\u0000"+ - "\u065b\u065c\u0006\u00d9\f\u0000\u065c\u01c3\u0001\u0000\u0000\u0000B"+ + "\u0001\u0000\u0000\u0000\u03b1\u03b2\u0005{\u0000\u0000\u03b2\u00ad\u0001"+ + "\u0000\u0000\u0000\u03b3\u03b4\u0005}\u0000\u0000\u03b4\u00af\u0001\u0000"+ + "\u0000\u0000\u03b5\u03b6\u0003.\u000f\u0000\u03b6\u03b7\u0001\u0000\u0000"+ + "\u0000\u03b7\u03b8\u0006P\r\u0000\u03b8\u00b1\u0001\u0000\u0000\u0000"+ + "\u03b9\u03bc\u0003\u008c>\u0000\u03ba\u03bd\u0003L\u001e\u0000\u03bb\u03bd"+ + "\u0003Z%\u0000\u03bc\u03ba\u0001\u0000\u0000\u0000\u03bc\u03bb\u0001\u0000"+ + "\u0000\u0000\u03bd\u03c1\u0001\u0000\u0000\u0000\u03be\u03c0\u0003\\&"+ + "\u0000\u03bf\u03be\u0001\u0000\u0000\u0000\u03c0\u03c3\u0001\u0000\u0000"+ + "\u0000\u03c1\u03bf\u0001\u0000\u0000\u0000\u03c1\u03c2\u0001\u0000\u0000"+ + "\u0000\u03c2\u03cb\u0001\u0000\u0000\u0000\u03c3\u03c1\u0001\u0000\u0000"+ + "\u0000\u03c4\u03c6\u0003\u008c>\u0000\u03c5\u03c7\u0003J\u001d\u0000\u03c6"+ + "\u03c5\u0001\u0000\u0000\u0000\u03c7\u03c8\u0001\u0000\u0000\u0000\u03c8"+ + "\u03c6\u0001\u0000\u0000\u0000\u03c8\u03c9\u0001\u0000\u0000\u0000\u03c9"+ + "\u03cb\u0001\u0000\u0000\u0000\u03ca\u03b9\u0001\u0000\u0000\u0000\u03ca"+ + "\u03c4\u0001\u0000\u0000\u0000\u03cb\u00b3\u0001\u0000\u0000\u0000\u03cc"+ + "\u03cd\u0005[\u0000\u0000\u03cd\u03ce\u0001\u0000\u0000\u0000\u03ce\u03cf"+ + "\u0006R\u0000\u0000\u03cf\u03d0\u0006R\u0000\u0000\u03d0\u00b5\u0001\u0000"+ + "\u0000\u0000\u03d1\u03d2\u0005]\u0000\u0000\u03d2\u03d3\u0001\u0000\u0000"+ + "\u0000\u03d3\u03d4\u0006S\f\u0000\u03d4\u03d5\u0006S\f\u0000\u03d5\u00b7"+ + "\u0001\u0000\u0000\u0000\u03d6\u03da\u0003L\u001e\u0000\u03d7\u03d9\u0003"+ + "\\&\u0000\u03d8\u03d7\u0001\u0000\u0000\u0000\u03d9\u03dc\u0001\u0000"+ + "\u0000\u0000\u03da\u03d8\u0001\u0000\u0000\u0000\u03da\u03db\u0001\u0000"+ + "\u0000\u0000\u03db\u03e7\u0001\u0000\u0000\u0000\u03dc\u03da\u0001\u0000"+ + "\u0000\u0000\u03dd\u03e0\u0003Z%\u0000\u03de\u03e0\u0003T\"\u0000\u03df"+ + "\u03dd\u0001\u0000\u0000\u0000\u03df\u03de\u0001\u0000\u0000\u0000\u03e0"+ + "\u03e2\u0001\u0000\u0000\u0000\u03e1\u03e3\u0003\\&\u0000\u03e2\u03e1"+ + "\u0001\u0000\u0000\u0000\u03e3\u03e4\u0001\u0000\u0000\u0000\u03e4\u03e2"+ + "\u0001\u0000\u0000\u0000\u03e4\u03e5\u0001\u0000\u0000\u0000\u03e5\u03e7"+ + "\u0001\u0000\u0000\u0000\u03e6\u03d6\u0001\u0000\u0000\u0000\u03e6\u03df"+ + "\u0001\u0000\u0000\u0000\u03e7\u00b9\u0001\u0000\u0000\u0000\u03e8\u03ea"+ + "\u0003V#\u0000\u03e9\u03eb\u0003X$\u0000\u03ea\u03e9\u0001\u0000\u0000"+ + "\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec\u03ea\u0001\u0000\u0000"+ + "\u0000\u03ec\u03ed\u0001\u0000\u0000\u0000\u03ed\u03ee\u0001\u0000\u0000"+ + "\u0000\u03ee\u03ef\u0003V#\u0000\u03ef\u00bb\u0001\u0000\u0000\u0000\u03f0"+ + "\u03f1\u0003\u00baU\u0000\u03f1\u00bd\u0001\u0000\u0000\u0000\u03f2\u03f3"+ + "\u0003B\u0019\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006"+ + "W\u000b\u0000\u03f5\u00bf\u0001\u0000\u0000\u0000\u03f6\u03f7\u0003D\u001a"+ + "\u0000\u03f7\u03f8\u0001\u0000\u0000\u0000\u03f8\u03f9\u0006X\u000b\u0000"+ + "\u03f9\u00c1\u0001\u0000\u0000\u0000\u03fa\u03fb\u0003F\u001b\u0000\u03fb"+ + "\u03fc\u0001\u0000\u0000\u0000\u03fc\u03fd\u0006Y\u000b\u0000\u03fd\u00c3"+ + "\u0001\u0000\u0000\u0000\u03fe\u03ff\u0003\u00b4R\u0000\u03ff\u0400\u0001"+ + "\u0000\u0000\u0000\u0400\u0401\u0006Z\u000e\u0000\u0401\u0402\u0006Z\u000f"+ + "\u0000\u0402\u00c5\u0001\u0000\u0000\u0000\u0403\u0404\u0003H\u001c\u0000"+ + "\u0404\u0405\u0001\u0000\u0000\u0000\u0405\u0406\u0006[\u0010\u0000\u0406"+ + "\u0407\u0006[\f\u0000\u0407\u00c7\u0001\u0000\u0000\u0000\u0408\u0409"+ + "\u0003F\u001b\u0000\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u040b\u0006"+ + "\\\u000b\u0000\u040b\u00c9\u0001\u0000\u0000\u0000\u040c\u040d\u0003B"+ + "\u0019\u0000\u040d\u040e\u0001\u0000\u0000\u0000\u040e\u040f\u0006]\u000b"+ + "\u0000\u040f\u00cb\u0001\u0000\u0000\u0000\u0410\u0411\u0003D\u001a\u0000"+ + "\u0411\u0412\u0001\u0000\u0000\u0000\u0412\u0413\u0006^\u000b\u0000\u0413"+ + "\u00cd\u0001\u0000\u0000\u0000\u0414\u0415\u0003H\u001c\u0000\u0415\u0416"+ + "\u0001\u0000\u0000\u0000\u0416\u0417\u0006_\u0010\u0000\u0417\u0418\u0006"+ + "_\f\u0000\u0418\u00cf\u0001\u0000\u0000\u0000\u0419\u041a\u0003\u00b4"+ + "R\u0000\u041a\u041b\u0001\u0000\u0000\u0000\u041b\u041c\u0006`\u000e\u0000"+ + "\u041c\u00d1\u0001\u0000\u0000\u0000\u041d\u041e\u0003\u00b6S\u0000\u041e"+ + "\u041f\u0001\u0000\u0000\u0000\u041f\u0420\u0006a\u0011\u0000\u0420\u00d3"+ + "\u0001\u0000\u0000\u0000\u0421\u0422\u0003n/\u0000\u0422\u0423\u0001\u0000"+ + "\u0000\u0000\u0423\u0424\u0006b\u0012\u0000\u0424\u00d5\u0001\u0000\u0000"+ + "\u0000\u0425\u0426\u0003p0\u0000\u0426\u0427\u0001\u0000\u0000\u0000\u0427"+ + "\u0428\u0006c\u0013\u0000\u0428\u00d7\u0001\u0000\u0000\u0000\u0429\u042a"+ + "\u0003j-\u0000\u042a\u042b\u0001\u0000\u0000\u0000\u042b\u042c\u0006d"+ + "\u0014\u0000\u042c\u00d9\u0001\u0000\u0000\u0000\u042d\u042e\u0007\u0010"+ + "\u0000\u0000\u042e\u042f\u0007\u0003\u0000\u0000\u042f\u0430\u0007\u0005"+ + "\u0000\u0000\u0430\u0431\u0007\f\u0000\u0000\u0431\u0432\u0007\u0000\u0000"+ + "\u0000\u0432\u0433\u0007\f\u0000\u0000\u0433\u0434\u0007\u0005\u0000\u0000"+ + "\u0434\u0435\u0007\f\u0000\u0000\u0435\u00db\u0001\u0000\u0000\u0000\u0436"+ + "\u043a\b!\u0000\u0000\u0437\u0438\u0005/\u0000\u0000\u0438\u043a\b\"\u0000"+ + "\u0000\u0439\u0436\u0001\u0000\u0000\u0000\u0439\u0437\u0001\u0000\u0000"+ + "\u0000\u043a\u00dd\u0001\u0000\u0000\u0000\u043b\u043d\u0003\u00dcf\u0000"+ + "\u043c\u043b\u0001\u0000\u0000\u0000\u043d\u043e\u0001\u0000\u0000\u0000"+ + "\u043e\u043c\u0001\u0000\u0000\u0000\u043e\u043f\u0001\u0000\u0000\u0000"+ + "\u043f\u00df\u0001\u0000\u0000\u0000\u0440\u0441\u0003\u00deg\u0000\u0441"+ + "\u0442\u0001\u0000\u0000\u0000\u0442\u0443\u0006h\u0015\u0000\u0443\u00e1"+ + "\u0001\u0000\u0000\u0000\u0444\u0445\u0003^\'\u0000\u0445\u0446\u0001"+ + "\u0000\u0000\u0000\u0446\u0447\u0006i\u0016\u0000\u0447\u00e3\u0001\u0000"+ + "\u0000\u0000\u0448\u0449\u0003B\u0019\u0000\u0449\u044a\u0001\u0000\u0000"+ + "\u0000\u044a\u044b\u0006j\u000b\u0000\u044b\u00e5\u0001\u0000\u0000\u0000"+ + "\u044c\u044d\u0003D\u001a\u0000\u044d\u044e\u0001\u0000\u0000\u0000\u044e"+ + "\u044f\u0006k\u000b\u0000\u044f\u00e7\u0001\u0000\u0000\u0000\u0450\u0451"+ + "\u0003F\u001b\u0000\u0451\u0452\u0001\u0000\u0000\u0000\u0452\u0453\u0006"+ + "l\u000b\u0000\u0453\u00e9\u0001\u0000\u0000\u0000\u0454\u0455\u0003H\u001c"+ + "\u0000\u0455\u0456\u0001\u0000\u0000\u0000\u0456\u0457\u0006m\u0010\u0000"+ + "\u0457\u0458\u0006m\f\u0000\u0458\u00eb\u0001\u0000\u0000\u0000\u0459"+ + "\u045a\u0003t2\u0000\u045a\u045b\u0001\u0000\u0000\u0000\u045b\u045c\u0006"+ + "n\u0017\u0000\u045c\u00ed\u0001\u0000\u0000\u0000\u045d\u045e\u0003p0"+ + "\u0000\u045e\u045f\u0001\u0000\u0000\u0000\u045f\u0460\u0006o\u0013\u0000"+ + "\u0460\u00ef\u0001\u0000\u0000\u0000\u0461\u0462\u0004p\b\u0000\u0462"+ + "\u0463\u0003\u008c>\u0000\u0463\u0464\u0001\u0000\u0000\u0000\u0464\u0465"+ + "\u0006p\u0018\u0000\u0465\u00f1\u0001\u0000\u0000\u0000\u0466\u0467\u0004"+ + "q\t\u0000\u0467\u0468\u0003\u00b2Q\u0000\u0468\u0469\u0001\u0000\u0000"+ + "\u0000\u0469\u046a\u0006q\u0019\u0000\u046a\u00f3\u0001\u0000\u0000\u0000"+ + "\u046b\u0470\u0003L\u001e\u0000\u046c\u0470\u0003J\u001d\u0000\u046d\u0470"+ + "\u0003Z%\u0000\u046e\u0470\u0003\u00a6K\u0000\u046f\u046b\u0001\u0000"+ + "\u0000\u0000\u046f\u046c\u0001\u0000\u0000\u0000\u046f\u046d\u0001\u0000"+ + "\u0000\u0000\u046f\u046e\u0001\u0000\u0000\u0000\u0470\u00f5\u0001\u0000"+ + "\u0000\u0000\u0471\u0474\u0003L\u001e\u0000\u0472\u0474\u0003\u00a6K\u0000"+ + "\u0473\u0471\u0001\u0000\u0000\u0000\u0473\u0472\u0001\u0000\u0000\u0000"+ + "\u0474\u0478\u0001\u0000\u0000\u0000\u0475\u0477\u0003\u00f4r\u0000\u0476"+ + "\u0475\u0001\u0000\u0000\u0000\u0477\u047a\u0001\u0000\u0000\u0000\u0478"+ + "\u0476\u0001\u0000\u0000\u0000\u0478\u0479\u0001\u0000\u0000\u0000\u0479"+ + "\u0485\u0001\u0000\u0000\u0000\u047a\u0478\u0001\u0000\u0000\u0000\u047b"+ + "\u047e\u0003Z%\u0000\u047c\u047e\u0003T\"\u0000\u047d\u047b\u0001\u0000"+ + "\u0000\u0000\u047d\u047c\u0001\u0000\u0000\u0000\u047e\u0480\u0001\u0000"+ + "\u0000\u0000\u047f\u0481\u0003\u00f4r\u0000\u0480\u047f\u0001\u0000\u0000"+ + "\u0000\u0481\u0482\u0001\u0000\u0000\u0000\u0482\u0480\u0001\u0000\u0000"+ + "\u0000\u0482\u0483\u0001\u0000\u0000\u0000\u0483\u0485\u0001\u0000\u0000"+ + "\u0000\u0484\u0473\u0001\u0000\u0000\u0000\u0484\u047d\u0001\u0000\u0000"+ + "\u0000\u0485\u00f7\u0001\u0000\u0000\u0000\u0486\u0489\u0003\u00f6s\u0000"+ + "\u0487\u0489\u0003\u00baU\u0000\u0488\u0486\u0001\u0000\u0000\u0000\u0488"+ + "\u0487\u0001\u0000\u0000\u0000\u0489\u048a\u0001\u0000\u0000\u0000\u048a"+ + "\u0488\u0001\u0000\u0000\u0000\u048a\u048b\u0001\u0000\u0000\u0000\u048b"+ + "\u00f9\u0001\u0000\u0000\u0000\u048c\u048d\u0003B\u0019\u0000\u048d\u048e"+ + "\u0001\u0000\u0000\u0000\u048e\u048f\u0006u\u000b\u0000\u048f\u00fb\u0001"+ + "\u0000\u0000\u0000\u0490\u0491\u0003D\u001a\u0000\u0491\u0492\u0001\u0000"+ + "\u0000\u0000\u0492\u0493\u0006v\u000b\u0000\u0493\u00fd\u0001\u0000\u0000"+ + "\u0000\u0494\u0495\u0003F\u001b\u0000\u0495\u0496\u0001\u0000\u0000\u0000"+ + "\u0496\u0497\u0006w\u000b\u0000\u0497\u00ff\u0001\u0000\u0000\u0000\u0498"+ + "\u0499\u0003H\u001c\u0000\u0499\u049a\u0001\u0000\u0000\u0000\u049a\u049b"+ + "\u0006x\u0010\u0000\u049b\u049c\u0006x\f\u0000\u049c\u0101\u0001\u0000"+ + "\u0000\u0000\u049d\u049e\u0003j-\u0000\u049e\u049f\u0001\u0000\u0000\u0000"+ + "\u049f\u04a0\u0006y\u0014\u0000\u04a0\u0103\u0001\u0000\u0000\u0000\u04a1"+ + "\u04a2\u0003p0\u0000\u04a2\u04a3\u0001\u0000\u0000\u0000\u04a3\u04a4\u0006"+ + "z\u0013\u0000\u04a4\u0105\u0001\u0000\u0000\u0000\u04a5\u04a6\u0003t2"+ + "\u0000\u04a6\u04a7\u0001\u0000\u0000\u0000\u04a7\u04a8\u0006{\u0017\u0000"+ + "\u04a8\u0107\u0001\u0000\u0000\u0000\u04a9\u04aa\u0004|\n\u0000\u04aa"+ + "\u04ab\u0003\u008c>\u0000\u04ab\u04ac\u0001\u0000\u0000\u0000\u04ac\u04ad"+ + "\u0006|\u0018\u0000\u04ad\u0109\u0001\u0000\u0000\u0000\u04ae\u04af\u0004"+ + "}\u000b\u0000\u04af\u04b0\u0003\u00b2Q\u0000\u04b0\u04b1\u0001\u0000\u0000"+ + "\u0000\u04b1\u04b2\u0006}\u0019\u0000\u04b2\u010b\u0001\u0000\u0000\u0000"+ + "\u04b3\u04b4\u0007\f\u0000\u0000\u04b4\u04b5\u0007\u0002\u0000\u0000\u04b5"+ + "\u010d\u0001\u0000\u0000\u0000\u04b6\u04b7\u0003\u00f8t\u0000\u04b7\u04b8"+ + "\u0001\u0000\u0000\u0000\u04b8\u04b9\u0006\u007f\u001a\u0000\u04b9\u010f"+ + "\u0001\u0000\u0000\u0000\u04ba\u04bb\u0003B\u0019\u0000\u04bb\u04bc\u0001"+ + "\u0000\u0000\u0000\u04bc\u04bd\u0006\u0080\u000b\u0000\u04bd\u0111\u0001"+ + "\u0000\u0000\u0000\u04be\u04bf\u0003D\u001a\u0000\u04bf\u04c0\u0001\u0000"+ + "\u0000\u0000\u04c0\u04c1\u0006\u0081\u000b\u0000\u04c1\u0113\u0001\u0000"+ + "\u0000\u0000\u04c2\u04c3\u0003F\u001b\u0000\u04c3\u04c4\u0001\u0000\u0000"+ + "\u0000\u04c4\u04c5\u0006\u0082\u000b\u0000\u04c5\u0115\u0001\u0000\u0000"+ + "\u0000\u04c6\u04c7\u0003H\u001c\u0000\u04c7\u04c8\u0001\u0000\u0000\u0000"+ + "\u04c8\u04c9\u0006\u0083\u0010\u0000\u04c9\u04ca\u0006\u0083\f\u0000\u04ca"+ + "\u0117\u0001\u0000\u0000\u0000\u04cb\u04cc\u0003\u00b4R\u0000\u04cc\u04cd"+ + "\u0001\u0000\u0000\u0000\u04cd\u04ce\u0006\u0084\u000e\u0000\u04ce\u04cf"+ + "\u0006\u0084\u001b\u0000\u04cf\u0119\u0001\u0000\u0000\u0000\u04d0\u04d1"+ + "\u0007\u0007\u0000\u0000\u04d1\u04d2\u0007\t\u0000\u0000\u04d2\u04d3\u0001"+ + "\u0000\u0000\u0000\u04d3\u04d4\u0006\u0085\u001c\u0000\u04d4\u011b\u0001"+ + "\u0000\u0000\u0000\u04d5\u04d6\u0007\u0013\u0000\u0000\u04d6\u04d7\u0007"+ + "\u0001\u0000\u0000\u04d7\u04d8\u0007\u0005\u0000\u0000\u04d8\u04d9\u0007"+ + "\n\u0000\u0000\u04d9\u04da\u0001\u0000\u0000\u0000\u04da\u04db\u0006\u0086"+ + "\u001c\u0000\u04db\u011d\u0001\u0000\u0000\u0000\u04dc\u04dd\b#\u0000"+ + "\u0000\u04dd\u011f\u0001\u0000\u0000\u0000\u04de\u04e0\u0003\u011e\u0087"+ + "\u0000\u04df\u04de\u0001\u0000\u0000\u0000\u04e0\u04e1\u0001\u0000\u0000"+ + "\u0000\u04e1\u04df\u0001\u0000\u0000\u0000\u04e1\u04e2\u0001\u0000\u0000"+ + "\u0000\u04e2\u04e3\u0001\u0000\u0000\u0000\u04e3\u04e4\u0003n/\u0000\u04e4"+ + "\u04e6\u0001\u0000\u0000\u0000\u04e5\u04df\u0001\u0000\u0000\u0000\u04e5"+ + "\u04e6\u0001\u0000\u0000\u0000\u04e6\u04e8\u0001\u0000\u0000\u0000\u04e7"+ + "\u04e9\u0003\u011e\u0087\u0000\u04e8\u04e7\u0001\u0000\u0000\u0000\u04e9"+ + "\u04ea\u0001\u0000\u0000\u0000\u04ea\u04e8\u0001\u0000\u0000\u0000\u04ea"+ + "\u04eb\u0001\u0000\u0000\u0000\u04eb\u0121\u0001\u0000\u0000\u0000\u04ec"+ + "\u04ed\u0003\u0120\u0088\u0000\u04ed\u04ee\u0001\u0000\u0000\u0000\u04ee"+ + "\u04ef\u0006\u0089\u001d\u0000\u04ef\u0123\u0001\u0000\u0000\u0000\u04f0"+ + "\u04f1\u0003B\u0019\u0000\u04f1\u04f2\u0001\u0000\u0000\u0000\u04f2\u04f3"+ + "\u0006\u008a\u000b\u0000\u04f3\u0125\u0001\u0000\u0000\u0000\u04f4\u04f5"+ + "\u0003D\u001a\u0000\u04f5\u04f6\u0001\u0000\u0000\u0000\u04f6\u04f7\u0006"+ + "\u008b\u000b\u0000\u04f7\u0127\u0001\u0000\u0000\u0000\u04f8\u04f9\u0003"+ + "F\u001b\u0000\u04f9\u04fa\u0001\u0000\u0000\u0000\u04fa\u04fb\u0006\u008c"+ + "\u000b\u0000\u04fb\u0129\u0001\u0000\u0000\u0000\u04fc\u04fd\u0003H\u001c"+ + "\u0000\u04fd\u04fe\u0001\u0000\u0000\u0000\u04fe\u04ff\u0006\u008d\u0010"+ + "\u0000\u04ff\u0500\u0006\u008d\f\u0000\u0500\u0501\u0006\u008d\f\u0000"+ + "\u0501\u012b\u0001\u0000\u0000\u0000\u0502\u0503\u0003j-\u0000\u0503\u0504"+ + "\u0001\u0000\u0000\u0000\u0504\u0505\u0006\u008e\u0014\u0000\u0505\u012d"+ + "\u0001\u0000\u0000\u0000\u0506\u0507\u0003p0\u0000\u0507\u0508\u0001\u0000"+ + "\u0000\u0000\u0508\u0509\u0006\u008f\u0013\u0000\u0509\u012f\u0001\u0000"+ + "\u0000\u0000\u050a\u050b\u0003t2\u0000\u050b\u050c\u0001\u0000\u0000\u0000"+ + "\u050c\u050d\u0006\u0090\u0017\u0000\u050d\u0131\u0001\u0000\u0000\u0000"+ + "\u050e\u050f\u0003\u011c\u0086\u0000\u050f\u0510\u0001\u0000\u0000\u0000"+ + "\u0510\u0511\u0006\u0091\u001e\u0000\u0511\u0133\u0001\u0000\u0000\u0000"+ + "\u0512\u0513\u0003\u00f8t\u0000\u0513\u0514\u0001\u0000\u0000\u0000\u0514"+ + "\u0515\u0006\u0092\u001a\u0000\u0515\u0135\u0001\u0000\u0000\u0000\u0516"+ + "\u0517\u0003\u00bcV\u0000\u0517\u0518\u0001\u0000\u0000\u0000\u0518\u0519"+ + "\u0006\u0093\u001f\u0000\u0519\u0137\u0001\u0000\u0000\u0000\u051a\u051b"+ + "\u0004\u0094\f\u0000\u051b\u051c\u0003\u008c>\u0000\u051c\u051d\u0001"+ + "\u0000\u0000\u0000\u051d\u051e\u0006\u0094\u0018\u0000\u051e\u0139\u0001"+ + "\u0000\u0000\u0000\u051f\u0520\u0004\u0095\r\u0000\u0520\u0521\u0003\u00b2"+ + "Q\u0000\u0521\u0522\u0001\u0000\u0000\u0000\u0522\u0523\u0006\u0095\u0019"+ + "\u0000\u0523\u013b\u0001\u0000\u0000\u0000\u0524\u0525\u0003B\u0019\u0000"+ + "\u0525\u0526\u0001\u0000\u0000\u0000\u0526\u0527\u0006\u0096\u000b\u0000"+ + "\u0527\u013d\u0001\u0000\u0000\u0000\u0528\u0529\u0003D\u001a\u0000\u0529"+ + "\u052a\u0001\u0000\u0000\u0000\u052a\u052b\u0006\u0097\u000b\u0000\u052b"+ + "\u013f\u0001\u0000\u0000\u0000\u052c\u052d\u0003F\u001b\u0000\u052d\u052e"+ + "\u0001\u0000\u0000\u0000\u052e\u052f\u0006\u0098\u000b\u0000\u052f\u0141"+ + "\u0001\u0000\u0000\u0000\u0530\u0531\u0003H\u001c\u0000\u0531\u0532\u0001"+ + "\u0000\u0000\u0000\u0532\u0533\u0006\u0099\u0010\u0000\u0533\u0534\u0006"+ + "\u0099\f\u0000\u0534\u0143\u0001\u0000\u0000\u0000\u0535\u0536\u0003t"+ + "2\u0000\u0536\u0537\u0001\u0000\u0000\u0000\u0537\u0538\u0006\u009a\u0017"+ + "\u0000\u0538\u0145\u0001\u0000\u0000\u0000\u0539\u053a\u0004\u009b\u000e"+ + "\u0000\u053a\u053b\u0003\u008c>\u0000\u053b\u053c\u0001\u0000\u0000\u0000"+ + "\u053c\u053d\u0006\u009b\u0018\u0000\u053d\u0147\u0001\u0000\u0000\u0000"+ + "\u053e\u053f\u0004\u009c\u000f\u0000\u053f\u0540\u0003\u00b2Q\u0000\u0540"+ + "\u0541\u0001\u0000\u0000\u0000\u0541\u0542\u0006\u009c\u0019\u0000\u0542"+ + "\u0149\u0001\u0000\u0000\u0000\u0543\u0544\u0003\u00bcV\u0000\u0544\u0545"+ + "\u0001\u0000\u0000\u0000\u0545\u0546\u0006\u009d\u001f\u0000\u0546\u014b"+ + "\u0001\u0000\u0000\u0000\u0547\u0548\u0003\u00b8T\u0000\u0548\u0549\u0001"+ + "\u0000\u0000\u0000\u0549\u054a\u0006\u009e \u0000\u054a\u014d\u0001\u0000"+ + "\u0000\u0000\u054b\u054c\u0003B\u0019\u0000\u054c\u054d\u0001\u0000\u0000"+ + "\u0000\u054d\u054e\u0006\u009f\u000b\u0000\u054e\u014f\u0001\u0000\u0000"+ + "\u0000\u054f\u0550\u0003D\u001a\u0000\u0550\u0551\u0001\u0000\u0000\u0000"+ + "\u0551\u0552\u0006\u00a0\u000b\u0000\u0552\u0151\u0001\u0000\u0000\u0000"+ + "\u0553\u0554\u0003F\u001b\u0000\u0554\u0555\u0001\u0000\u0000\u0000\u0555"+ + "\u0556\u0006\u00a1\u000b\u0000\u0556\u0153\u0001\u0000\u0000\u0000\u0557"+ + "\u0558\u0003H\u001c\u0000\u0558\u0559\u0001\u0000\u0000\u0000\u0559\u055a"+ + "\u0006\u00a2\u0010\u0000\u055a\u055b\u0006\u00a2\f\u0000\u055b\u0155\u0001"+ + "\u0000\u0000\u0000\u055c\u055d\u0007\u0001\u0000\u0000\u055d\u055e\u0007"+ + "\t\u0000\u0000\u055e\u055f\u0007\u000f\u0000\u0000\u055f\u0560\u0007\u0007"+ + "\u0000\u0000\u0560\u0157\u0001\u0000\u0000\u0000\u0561\u0562\u0003B\u0019"+ + "\u0000\u0562\u0563\u0001\u0000\u0000\u0000\u0563\u0564\u0006\u00a4\u000b"+ + "\u0000\u0564\u0159\u0001\u0000\u0000\u0000\u0565\u0566\u0003D\u001a\u0000"+ + "\u0566\u0567\u0001\u0000\u0000\u0000\u0567\u0568\u0006\u00a5\u000b\u0000"+ + "\u0568\u015b\u0001\u0000\u0000\u0000\u0569\u056a\u0003F\u001b\u0000\u056a"+ + "\u056b\u0001\u0000\u0000\u0000\u056b\u056c\u0006\u00a6\u000b\u0000\u056c"+ + "\u015d\u0001\u0000\u0000\u0000\u056d\u056e\u0003\u00b6S\u0000\u056e\u056f"+ + "\u0001\u0000\u0000\u0000\u056f\u0570\u0006\u00a7\u0011\u0000\u0570\u0571"+ + "\u0006\u00a7\f\u0000\u0571\u015f\u0001\u0000\u0000\u0000\u0572\u0573\u0003"+ + "n/\u0000\u0573\u0574\u0001\u0000\u0000\u0000\u0574\u0575\u0006\u00a8\u0012"+ + "\u0000\u0575\u0161\u0001\u0000\u0000\u0000\u0576\u057c\u0003T\"\u0000"+ + "\u0577\u057c\u0003J\u001d\u0000\u0578\u057c\u0003t2\u0000\u0579\u057c"+ + "\u0003L\u001e\u0000\u057a\u057c\u0003Z%\u0000\u057b\u0576\u0001\u0000"+ + "\u0000\u0000\u057b\u0577\u0001\u0000\u0000\u0000\u057b\u0578\u0001\u0000"+ + "\u0000\u0000\u057b\u0579\u0001\u0000\u0000\u0000\u057b\u057a\u0001\u0000"+ + "\u0000\u0000\u057c\u057d\u0001\u0000\u0000\u0000\u057d\u057b\u0001\u0000"+ + "\u0000\u0000\u057d\u057e\u0001\u0000\u0000\u0000\u057e\u0163\u0001\u0000"+ + "\u0000\u0000\u057f\u0580\u0003B\u0019\u0000\u0580\u0581\u0001\u0000\u0000"+ + "\u0000\u0581\u0582\u0006\u00aa\u000b\u0000\u0582\u0165\u0001\u0000\u0000"+ + "\u0000\u0583\u0584\u0003D\u001a\u0000\u0584\u0585\u0001\u0000\u0000\u0000"+ + "\u0585\u0586\u0006\u00ab\u000b\u0000\u0586\u0167\u0001\u0000\u0000\u0000"+ + "\u0587\u0588\u0003F\u001b\u0000\u0588\u0589\u0001\u0000\u0000\u0000\u0589"+ + "\u058a\u0006\u00ac\u000b\u0000\u058a\u0169\u0001\u0000\u0000\u0000\u058b"+ + "\u058c\u0003H\u001c\u0000\u058c\u058d\u0001\u0000\u0000\u0000\u058d\u058e"+ + "\u0006\u00ad\u0010\u0000\u058e\u058f\u0006\u00ad\f\u0000\u058f\u016b\u0001"+ + "\u0000\u0000\u0000\u0590\u0591\u0003n/\u0000\u0591\u0592\u0001\u0000\u0000"+ + "\u0000\u0592\u0593\u0006\u00ae\u0012\u0000\u0593\u016d\u0001\u0000\u0000"+ + "\u0000\u0594\u0595\u0003p0\u0000\u0595\u0596\u0001\u0000\u0000\u0000\u0596"+ + "\u0597\u0006\u00af\u0013\u0000\u0597\u016f\u0001\u0000\u0000\u0000\u0598"+ + "\u0599\u0003t2\u0000\u0599\u059a\u0001\u0000\u0000\u0000\u059a\u059b\u0006"+ + "\u00b0\u0017\u0000\u059b\u0171\u0001\u0000\u0000\u0000\u059c\u059d\u0003"+ + "\u011a\u0085\u0000\u059d\u059e\u0001\u0000\u0000\u0000\u059e\u059f\u0006"+ + "\u00b1!\u0000\u059f\u05a0\u0006\u00b1\"\u0000\u05a0\u0173\u0001\u0000"+ + "\u0000\u0000\u05a1\u05a2\u0003\u00deg\u0000\u05a2\u05a3\u0001\u0000\u0000"+ + "\u0000\u05a3\u05a4\u0006\u00b2\u0015\u0000\u05a4\u0175\u0001\u0000\u0000"+ + "\u0000\u05a5\u05a6\u0003^\'\u0000\u05a6\u05a7\u0001\u0000\u0000\u0000"+ + "\u05a7\u05a8\u0006\u00b3\u0016\u0000\u05a8\u0177\u0001\u0000\u0000\u0000"+ + "\u05a9\u05aa\u0003B\u0019\u0000\u05aa\u05ab\u0001\u0000\u0000\u0000\u05ab"+ + "\u05ac\u0006\u00b4\u000b\u0000\u05ac\u0179\u0001\u0000\u0000\u0000\u05ad"+ + "\u05ae\u0003D\u001a\u0000\u05ae\u05af\u0001\u0000\u0000\u0000\u05af\u05b0"+ + "\u0006\u00b5\u000b\u0000\u05b0\u017b\u0001\u0000\u0000\u0000\u05b1\u05b2"+ + "\u0003F\u001b\u0000\u05b2\u05b3\u0001\u0000\u0000\u0000\u05b3\u05b4\u0006"+ + "\u00b6\u000b\u0000\u05b4\u017d\u0001\u0000\u0000\u0000\u05b5\u05b6\u0003"+ + "H\u001c\u0000\u05b6\u05b7\u0001\u0000\u0000\u0000\u05b7\u05b8\u0006\u00b7"+ + "\u0010\u0000\u05b8\u05b9\u0006\u00b7\f\u0000\u05b9\u05ba\u0006\u00b7\f"+ + "\u0000\u05ba\u017f\u0001\u0000\u0000\u0000\u05bb\u05bc\u0003p0\u0000\u05bc"+ + "\u05bd\u0001\u0000\u0000\u0000\u05bd\u05be\u0006\u00b8\u0013\u0000\u05be"+ + "\u0181\u0001\u0000\u0000\u0000\u05bf\u05c0\u0003t2\u0000\u05c0\u05c1\u0001"+ + "\u0000\u0000\u0000\u05c1\u05c2\u0006\u00b9\u0017\u0000\u05c2\u0183\u0001"+ + "\u0000\u0000\u0000\u05c3\u05c4\u0003\u00f8t\u0000\u05c4\u05c5\u0001\u0000"+ + "\u0000\u0000\u05c5\u05c6\u0006\u00ba\u001a\u0000\u05c6\u0185\u0001\u0000"+ + "\u0000\u0000\u05c7\u05c8\u0003B\u0019\u0000\u05c8\u05c9\u0001\u0000\u0000"+ + "\u0000\u05c9\u05ca\u0006\u00bb\u000b\u0000\u05ca\u0187\u0001\u0000\u0000"+ + "\u0000\u05cb\u05cc\u0003D\u001a\u0000\u05cc\u05cd\u0001\u0000\u0000\u0000"+ + "\u05cd\u05ce\u0006\u00bc\u000b\u0000\u05ce\u0189\u0001\u0000\u0000\u0000"+ + "\u05cf\u05d0\u0003F\u001b\u0000\u05d0\u05d1\u0001\u0000\u0000\u0000\u05d1"+ + "\u05d2\u0006\u00bd\u000b\u0000\u05d2\u018b\u0001\u0000\u0000\u0000\u05d3"+ + "\u05d4\u0003H\u001c\u0000\u05d4\u05d5\u0001\u0000\u0000\u0000\u05d5\u05d6"+ + "\u0006\u00be\u0010\u0000\u05d6\u05d7\u0006\u00be\f\u0000\u05d7\u018d\u0001"+ + "\u0000\u0000\u0000\u05d8\u05d9\u00036\u0013\u0000\u05d9\u05da\u0001\u0000"+ + "\u0000\u0000\u05da\u05db\u0006\u00bf#\u0000\u05db\u018f\u0001\u0000\u0000"+ + "\u0000\u05dc\u05dd\u0003\u010c~\u0000\u05dd\u05de\u0001\u0000\u0000\u0000"+ + "\u05de\u05df\u0006\u00c0$\u0000\u05df\u0191\u0001\u0000\u0000\u0000\u05e0"+ + "\u05e1\u0003\u011a\u0085\u0000\u05e1\u05e2\u0001\u0000\u0000\u0000\u05e2"+ + "\u05e3\u0006\u00c1!\u0000\u05e3\u05e4\u0006\u00c1\f\u0000\u05e4\u05e5"+ + "\u0006\u00c1\u0000\u0000\u05e5\u0193\u0001\u0000\u0000\u0000\u05e6\u05e7"+ + "\u0007\u0014\u0000\u0000\u05e7\u05e8\u0007\u0002\u0000\u0000\u05e8\u05e9"+ + "\u0007\u0001\u0000\u0000\u05e9\u05ea\u0007\t\u0000\u0000\u05ea\u05eb\u0007"+ + "\u0011\u0000\u0000\u05eb\u05ec\u0001\u0000\u0000\u0000\u05ec\u05ed\u0006"+ + "\u00c2\f\u0000\u05ed\u05ee\u0006\u00c2\u0000\u0000\u05ee\u0195\u0001\u0000"+ + "\u0000\u0000\u05ef\u05f0\u0003\u00deg\u0000\u05f0\u05f1\u0001\u0000\u0000"+ + "\u0000\u05f1\u05f2\u0006\u00c3\u0015\u0000\u05f2\u0197\u0001\u0000\u0000"+ + "\u0000\u05f3\u05f4\u0003^\'\u0000\u05f4\u05f5\u0001\u0000\u0000\u0000"+ + "\u05f5\u05f6\u0006\u00c4\u0016\u0000\u05f6\u0199\u0001\u0000\u0000\u0000"+ + "\u05f7\u05f8\u0003n/\u0000\u05f8\u05f9\u0001\u0000\u0000\u0000\u05f9\u05fa"+ + "\u0006\u00c5\u0012\u0000\u05fa\u019b\u0001\u0000\u0000\u0000\u05fb\u05fc"+ + "\u0003\u00b8T\u0000\u05fc\u05fd\u0001\u0000\u0000\u0000\u05fd\u05fe\u0006"+ + "\u00c6 \u0000\u05fe\u019d\u0001\u0000\u0000\u0000\u05ff\u0600\u0003\u00bc"+ + "V\u0000\u0600\u0601\u0001\u0000\u0000\u0000\u0601\u0602\u0006\u00c7\u001f"+ + "\u0000\u0602\u019f\u0001\u0000\u0000\u0000\u0603\u0604\u0003B\u0019\u0000"+ + "\u0604\u0605\u0001\u0000\u0000\u0000\u0605\u0606\u0006\u00c8\u000b\u0000"+ + "\u0606\u01a1\u0001\u0000\u0000\u0000\u0607\u0608\u0003D\u001a\u0000\u0608"+ + "\u0609\u0001\u0000\u0000\u0000\u0609\u060a\u0006\u00c9\u000b\u0000\u060a"+ + "\u01a3\u0001\u0000\u0000\u0000\u060b\u060c\u0003F\u001b\u0000\u060c\u060d"+ + "\u0001\u0000\u0000\u0000\u060d\u060e\u0006\u00ca\u000b\u0000\u060e\u01a5"+ + "\u0001\u0000\u0000\u0000\u060f\u0610\u0003H\u001c\u0000\u0610\u0611\u0001"+ + "\u0000\u0000\u0000\u0611\u0612\u0006\u00cb\u0010\u0000\u0612\u0613\u0006"+ + "\u00cb\f\u0000\u0613\u01a7\u0001\u0000\u0000\u0000\u0614\u0615\u0003\u00de"+ + "g\u0000\u0615\u0616\u0001\u0000\u0000\u0000\u0616\u0617\u0006\u00cc\u0015"+ + "\u0000\u0617\u0618\u0006\u00cc\f\u0000\u0618\u0619\u0006\u00cc%\u0000"+ + "\u0619\u01a9\u0001\u0000\u0000\u0000\u061a\u061b\u0003^\'\u0000\u061b"+ + "\u061c\u0001\u0000\u0000\u0000\u061c\u061d\u0006\u00cd\u0016\u0000\u061d"+ + "\u061e\u0006\u00cd\f\u0000\u061e\u061f\u0006\u00cd%\u0000\u061f\u01ab"+ + "\u0001\u0000\u0000\u0000\u0620\u0621\u0003B\u0019\u0000\u0621\u0622\u0001"+ + "\u0000\u0000\u0000\u0622\u0623\u0006\u00ce\u000b\u0000\u0623\u01ad\u0001"+ + "\u0000\u0000\u0000\u0624\u0625\u0003D\u001a\u0000\u0625\u0626\u0001\u0000"+ + "\u0000\u0000\u0626\u0627\u0006\u00cf\u000b\u0000\u0627\u01af\u0001\u0000"+ + "\u0000\u0000\u0628\u0629\u0003F\u001b\u0000\u0629\u062a\u0001\u0000\u0000"+ + "\u0000\u062a\u062b\u0006\u00d0\u000b\u0000\u062b\u01b1\u0001\u0000\u0000"+ + "\u0000\u062c\u062d\u0003n/\u0000\u062d\u062e\u0001\u0000\u0000\u0000\u062e"+ + "\u062f\u0006\u00d1\u0012\u0000\u062f\u0630\u0006\u00d1\f\u0000\u0630\u0631"+ + "\u0006\u00d1\t\u0000\u0631\u01b3\u0001\u0000\u0000\u0000\u0632\u0633\u0003"+ + "p0\u0000\u0633\u0634\u0001\u0000\u0000\u0000\u0634\u0635\u0006\u00d2\u0013"+ + "\u0000\u0635\u0636\u0006\u00d2\f\u0000\u0636\u0637\u0006\u00d2\t\u0000"+ + "\u0637\u01b5\u0001\u0000\u0000\u0000\u0638\u0639\u0003B\u0019\u0000\u0639"+ + "\u063a\u0001\u0000\u0000\u0000\u063a\u063b\u0006\u00d3\u000b\u0000\u063b"+ + "\u01b7\u0001\u0000\u0000\u0000\u063c\u063d\u0003D\u001a\u0000\u063d\u063e"+ + "\u0001\u0000\u0000\u0000\u063e\u063f\u0006\u00d4\u000b\u0000\u063f\u01b9"+ + "\u0001\u0000\u0000\u0000\u0640\u0641\u0003F\u001b\u0000\u0641\u0642\u0001"+ + "\u0000\u0000\u0000\u0642\u0643\u0006\u00d5\u000b\u0000\u0643\u01bb\u0001"+ + "\u0000\u0000\u0000\u0644\u0645\u0003\u00bcV\u0000\u0645\u0646\u0001\u0000"+ + "\u0000\u0000\u0646\u0647\u0006\u00d6\f\u0000\u0647\u0648\u0006\u00d6\u0000"+ + "\u0000\u0648\u0649\u0006\u00d6\u001f\u0000\u0649\u01bd\u0001\u0000\u0000"+ + "\u0000\u064a\u064b\u0003\u00b8T\u0000\u064b\u064c\u0001\u0000\u0000\u0000"+ + "\u064c\u064d\u0006\u00d7\f\u0000\u064d\u064e\u0006\u00d7\u0000\u0000\u064e"+ + "\u064f\u0006\u00d7 \u0000\u064f\u01bf\u0001\u0000\u0000\u0000\u0650\u0651"+ + "\u0003d*\u0000\u0651\u0652\u0001\u0000\u0000\u0000\u0652\u0653\u0006\u00d8"+ + "\f\u0000\u0653\u0654\u0006\u00d8\u0000\u0000\u0654\u0655\u0006\u00d8&"+ + "\u0000\u0655\u01c1\u0001\u0000\u0000\u0000\u0656\u0657\u0003H\u001c\u0000"+ + "\u0657\u0658\u0001\u0000\u0000\u0000\u0658\u0659\u0006\u00d9\u0010\u0000"+ + "\u0659\u065a\u0006\u00d9\f\u0000\u065a\u01c3\u0001\u0000\u0000\u0000B"+ "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e"+ "\u000f\u0299\u02a3\u02a7\u02aa\u02b3\u02b5\u02c0\u02d3\u02d8\u02e1\u02e8"+ "\u02ed\u02ef\u02fa\u0302\u0305\u0307\u030c\u0311\u0317\u031e\u0323\u0329"+ - "\u032c\u0334\u0338\u03be\u03c3\u03ca\u03cc\u03dc\u03e1\u03e6\u03e8\u03ee"+ - "\u043b\u0440\u0471\u0475\u047a\u047f\u0484\u0486\u048a\u048c\u04e3\u04e7"+ - "\u04ec\u057d\u057f\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006\u0000"+ + "\u032c\u0334\u0338\u03bc\u03c1\u03c8\u03ca\u03da\u03df\u03e4\u03e6\u03ec"+ + "\u0439\u043e\u046f\u0473\u0478\u047d\u0482\u0484\u0488\u048a\u04e1\u04e5"+ + "\u04ea\u057b\u057d\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006\u0000"+ "\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005"+ "\t\u0000\u0005\u000b\u0000\u0005\u000e\u0000\u0005\r\u0000\u0000\u0001"+ "\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007H\u0000\u0005\u0000\u0000"+ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 9bed77ff31168..a75d7e985c1d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -68,8 +68,8 @@ null '*' '/' '%' -null -null +'{' +'}' null null ']' @@ -334,4 +334,4 @@ joinPredicate atn: -[4, 1, 130, 651, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 207, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 215, 8, 5, 10, 5, 12, 5, 218, 9, 5, 1, 6, 1, 6, 3, 6, 222, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 229, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 234, 8, 6, 1, 7, 1, 7, 1, 7, 3, 7, 239, 8, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 249, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 255, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 263, 8, 9, 10, 9, 12, 9, 266, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 276, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 281, 8, 10, 10, 10, 12, 10, 284, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 292, 8, 11, 10, 11, 12, 11, 295, 9, 11, 1, 11, 1, 11, 3, 11, 299, 8, 11, 3, 11, 301, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 312, 8, 13, 10, 13, 12, 13, 315, 9, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 5, 17, 331, 8, 17, 10, 17, 12, 17, 334, 9, 17, 1, 18, 1, 18, 1, 18, 3, 18, 339, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 347, 8, 19, 10, 19, 12, 19, 350, 9, 19, 1, 19, 3, 19, 353, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 358, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 370, 8, 23, 10, 23, 12, 23, 373, 9, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 379, 8, 24, 10, 24, 12, 24, 382, 9, 24, 1, 24, 3, 24, 385, 8, 24, 1, 24, 1, 24, 3, 24, 389, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 396, 8, 26, 1, 26, 1, 26, 3, 26, 400, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 405, 8, 27, 10, 27, 12, 27, 408, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 413, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 418, 8, 29, 10, 29, 12, 29, 421, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 426, 8, 30, 10, 30, 12, 30, 429, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 434, 8, 31, 10, 31, 12, 31, 437, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 444, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 459, 8, 34, 10, 34, 12, 34, 462, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 470, 8, 34, 10, 34, 12, 34, 473, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 481, 8, 34, 10, 34, 12, 34, 484, 9, 34, 1, 34, 1, 34, 3, 34, 488, 8, 34, 1, 35, 1, 35, 3, 35, 492, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 497, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 506, 8, 38, 10, 38, 12, 38, 509, 9, 38, 1, 39, 1, 39, 3, 39, 513, 8, 39, 1, 39, 1, 39, 3, 39, 517, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 529, 8, 42, 10, 42, 12, 42, 532, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 542, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 554, 8, 47, 10, 47, 12, 47, 557, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 567, 8, 50, 1, 51, 3, 51, 570, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 575, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 597, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 603, 8, 58, 10, 58, 12, 58, 606, 9, 58, 3, 58, 608, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 613, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 626, 8, 61, 1, 62, 3, 62, 629, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 638, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 644, 8, 64, 10, 64, 12, 64, 647, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 83, 83, 1, 0, 74, 75, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 678, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 206, 1, 0, 0, 0, 12, 233, 1, 0, 0, 0, 14, 235, 1, 0, 0, 0, 16, 248, 1, 0, 0, 0, 18, 254, 1, 0, 0, 0, 20, 275, 1, 0, 0, 0, 22, 285, 1, 0, 0, 0, 24, 304, 1, 0, 0, 0, 26, 306, 1, 0, 0, 0, 28, 318, 1, 0, 0, 0, 30, 322, 1, 0, 0, 0, 32, 324, 1, 0, 0, 0, 34, 327, 1, 0, 0, 0, 36, 338, 1, 0, 0, 0, 38, 342, 1, 0, 0, 0, 40, 357, 1, 0, 0, 0, 42, 361, 1, 0, 0, 0, 44, 363, 1, 0, 0, 0, 46, 365, 1, 0, 0, 0, 48, 374, 1, 0, 0, 0, 50, 390, 1, 0, 0, 0, 52, 393, 1, 0, 0, 0, 54, 401, 1, 0, 0, 0, 56, 409, 1, 0, 0, 0, 58, 414, 1, 0, 0, 0, 60, 422, 1, 0, 0, 0, 62, 430, 1, 0, 0, 0, 64, 438, 1, 0, 0, 0, 66, 443, 1, 0, 0, 0, 68, 487, 1, 0, 0, 0, 70, 491, 1, 0, 0, 0, 72, 496, 1, 0, 0, 0, 74, 498, 1, 0, 0, 0, 76, 501, 1, 0, 0, 0, 78, 510, 1, 0, 0, 0, 80, 518, 1, 0, 0, 0, 82, 521, 1, 0, 0, 0, 84, 524, 1, 0, 0, 0, 86, 533, 1, 0, 0, 0, 88, 537, 1, 0, 0, 0, 90, 543, 1, 0, 0, 0, 92, 547, 1, 0, 0, 0, 94, 550, 1, 0, 0, 0, 96, 558, 1, 0, 0, 0, 98, 562, 1, 0, 0, 0, 100, 566, 1, 0, 0, 0, 102, 569, 1, 0, 0, 0, 104, 574, 1, 0, 0, 0, 106, 578, 1, 0, 0, 0, 108, 580, 1, 0, 0, 0, 110, 582, 1, 0, 0, 0, 112, 585, 1, 0, 0, 0, 114, 589, 1, 0, 0, 0, 116, 592, 1, 0, 0, 0, 118, 612, 1, 0, 0, 0, 120, 616, 1, 0, 0, 0, 122, 621, 1, 0, 0, 0, 124, 628, 1, 0, 0, 0, 126, 634, 1, 0, 0, 0, 128, 639, 1, 0, 0, 0, 130, 648, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 29, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 38, 19, 0, 148, 153, 3, 32, 16, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 207, 3, 10, 5, 8, 180, 207, 3, 16, 8, 0, 181, 207, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 207, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 207, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 177, 1, 0, 0, 0, 206, 180, 1, 0, 0, 0, 206, 181, 1, 0, 0, 0, 206, 182, 1, 0, 0, 0, 206, 198, 1, 0, 0, 0, 206, 205, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 5, 0, 0, 209, 210, 5, 34, 0, 0, 210, 215, 3, 10, 5, 6, 211, 212, 10, 4, 0, 0, 212, 213, 5, 52, 0, 0, 213, 215, 3, 10, 5, 5, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 221, 3, 16, 8, 0, 220, 222, 5, 49, 0, 0, 221, 220, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 5, 47, 0, 0, 224, 225, 3, 106, 53, 0, 225, 234, 1, 0, 0, 0, 226, 228, 3, 16, 8, 0, 227, 229, 5, 49, 0, 0, 228, 227, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 5, 54, 0, 0, 231, 232, 3, 106, 53, 0, 232, 234, 1, 0, 0, 0, 233, 219, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 234, 13, 1, 0, 0, 0, 235, 238, 3, 58, 29, 0, 236, 237, 5, 37, 0, 0, 237, 239, 3, 30, 15, 0, 238, 236, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 241, 5, 38, 0, 0, 241, 242, 3, 68, 34, 0, 242, 15, 1, 0, 0, 0, 243, 249, 3, 18, 9, 0, 244, 245, 3, 18, 9, 0, 245, 246, 3, 108, 54, 0, 246, 247, 3, 18, 9, 0, 247, 249, 1, 0, 0, 0, 248, 243, 1, 0, 0, 0, 248, 244, 1, 0, 0, 0, 249, 17, 1, 0, 0, 0, 250, 251, 6, 9, -1, 0, 251, 255, 3, 20, 10, 0, 252, 253, 7, 0, 0, 0, 253, 255, 3, 18, 9, 3, 254, 250, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 255, 264, 1, 0, 0, 0, 256, 257, 10, 2, 0, 0, 257, 258, 7, 1, 0, 0, 258, 263, 3, 18, 9, 3, 259, 260, 10, 1, 0, 0, 260, 261, 7, 0, 0, 0, 261, 263, 3, 18, 9, 2, 262, 256, 1, 0, 0, 0, 262, 259, 1, 0, 0, 0, 263, 266, 1, 0, 0, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 19, 1, 0, 0, 0, 266, 264, 1, 0, 0, 0, 267, 268, 6, 10, -1, 0, 268, 276, 3, 68, 34, 0, 269, 276, 3, 58, 29, 0, 270, 276, 3, 22, 11, 0, 271, 272, 5, 48, 0, 0, 272, 273, 3, 10, 5, 0, 273, 274, 5, 55, 0, 0, 274, 276, 1, 0, 0, 0, 275, 267, 1, 0, 0, 0, 275, 269, 1, 0, 0, 0, 275, 270, 1, 0, 0, 0, 275, 271, 1, 0, 0, 0, 276, 282, 1, 0, 0, 0, 277, 278, 10, 1, 0, 0, 278, 279, 5, 37, 0, 0, 279, 281, 3, 30, 15, 0, 280, 277, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 282, 283, 1, 0, 0, 0, 283, 21, 1, 0, 0, 0, 284, 282, 1, 0, 0, 0, 285, 286, 3, 24, 12, 0, 286, 300, 5, 48, 0, 0, 287, 301, 5, 66, 0, 0, 288, 293, 3, 10, 5, 0, 289, 290, 5, 39, 0, 0, 290, 292, 3, 10, 5, 0, 291, 289, 1, 0, 0, 0, 292, 295, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 298, 1, 0, 0, 0, 295, 293, 1, 0, 0, 0, 296, 297, 5, 39, 0, 0, 297, 299, 3, 26, 13, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 301, 1, 0, 0, 0, 300, 287, 1, 0, 0, 0, 300, 288, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 303, 5, 55, 0, 0, 303, 23, 1, 0, 0, 0, 304, 305, 3, 72, 36, 0, 305, 25, 1, 0, 0, 0, 306, 307, 4, 13, 10, 0, 307, 308, 5, 69, 0, 0, 308, 313, 3, 28, 14, 0, 309, 310, 5, 39, 0, 0, 310, 312, 3, 28, 14, 0, 311, 309, 1, 0, 0, 0, 312, 315, 1, 0, 0, 0, 313, 311, 1, 0, 0, 0, 313, 314, 1, 0, 0, 0, 314, 316, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 316, 317, 5, 70, 0, 0, 317, 27, 1, 0, 0, 0, 318, 319, 3, 106, 53, 0, 319, 320, 5, 38, 0, 0, 320, 321, 3, 68, 34, 0, 321, 29, 1, 0, 0, 0, 322, 323, 3, 64, 32, 0, 323, 31, 1, 0, 0, 0, 324, 325, 5, 12, 0, 0, 325, 326, 3, 34, 17, 0, 326, 33, 1, 0, 0, 0, 327, 332, 3, 36, 18, 0, 328, 329, 5, 39, 0, 0, 329, 331, 3, 36, 18, 0, 330, 328, 1, 0, 0, 0, 331, 334, 1, 0, 0, 0, 332, 330, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 35, 1, 0, 0, 0, 334, 332, 1, 0, 0, 0, 335, 336, 3, 58, 29, 0, 336, 337, 5, 36, 0, 0, 337, 339, 1, 0, 0, 0, 338, 335, 1, 0, 0, 0, 338, 339, 1, 0, 0, 0, 339, 340, 1, 0, 0, 0, 340, 341, 3, 10, 5, 0, 341, 37, 1, 0, 0, 0, 342, 343, 5, 6, 0, 0, 343, 348, 3, 40, 20, 0, 344, 345, 5, 39, 0, 0, 345, 347, 3, 40, 20, 0, 346, 344, 1, 0, 0, 0, 347, 350, 1, 0, 0, 0, 348, 346, 1, 0, 0, 0, 348, 349, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351, 353, 3, 46, 23, 0, 352, 351, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 39, 1, 0, 0, 0, 354, 355, 3, 42, 21, 0, 355, 356, 5, 38, 0, 0, 356, 358, 1, 0, 0, 0, 357, 354, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 360, 3, 44, 22, 0, 360, 41, 1, 0, 0, 0, 361, 362, 5, 83, 0, 0, 362, 43, 1, 0, 0, 0, 363, 364, 7, 2, 0, 0, 364, 45, 1, 0, 0, 0, 365, 366, 5, 82, 0, 0, 366, 371, 5, 83, 0, 0, 367, 368, 5, 39, 0, 0, 368, 370, 5, 83, 0, 0, 369, 367, 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 371, 372, 1, 0, 0, 0, 372, 47, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 374, 375, 5, 19, 0, 0, 375, 380, 3, 40, 20, 0, 376, 377, 5, 39, 0, 0, 377, 379, 3, 40, 20, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 385, 3, 54, 27, 0, 384, 383, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 388, 1, 0, 0, 0, 386, 387, 5, 33, 0, 0, 387, 389, 3, 34, 17, 0, 388, 386, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 49, 1, 0, 0, 0, 390, 391, 5, 4, 0, 0, 391, 392, 3, 34, 17, 0, 392, 51, 1, 0, 0, 0, 393, 395, 5, 15, 0, 0, 394, 396, 3, 54, 27, 0, 395, 394, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 399, 1, 0, 0, 0, 397, 398, 5, 33, 0, 0, 398, 400, 3, 34, 17, 0, 399, 397, 1, 0, 0, 0, 399, 400, 1, 0, 0, 0, 400, 53, 1, 0, 0, 0, 401, 406, 3, 56, 28, 0, 402, 403, 5, 39, 0, 0, 403, 405, 3, 56, 28, 0, 404, 402, 1, 0, 0, 0, 405, 408, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 406, 407, 1, 0, 0, 0, 407, 55, 1, 0, 0, 0, 408, 406, 1, 0, 0, 0, 409, 412, 3, 36, 18, 0, 410, 411, 5, 16, 0, 0, 411, 413, 3, 10, 5, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 57, 1, 0, 0, 0, 414, 419, 3, 72, 36, 0, 415, 416, 5, 41, 0, 0, 416, 418, 3, 72, 36, 0, 417, 415, 1, 0, 0, 0, 418, 421, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 419, 420, 1, 0, 0, 0, 420, 59, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 422, 427, 3, 66, 33, 0, 423, 424, 5, 41, 0, 0, 424, 426, 3, 66, 33, 0, 425, 423, 1, 0, 0, 0, 426, 429, 1, 0, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 61, 1, 0, 0, 0, 429, 427, 1, 0, 0, 0, 430, 435, 3, 60, 30, 0, 431, 432, 5, 39, 0, 0, 432, 434, 3, 60, 30, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 63, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 7, 3, 0, 0, 439, 65, 1, 0, 0, 0, 440, 444, 5, 87, 0, 0, 441, 442, 4, 33, 11, 0, 442, 444, 3, 70, 35, 0, 443, 440, 1, 0, 0, 0, 443, 441, 1, 0, 0, 0, 444, 67, 1, 0, 0, 0, 445, 488, 5, 50, 0, 0, 446, 447, 3, 104, 52, 0, 447, 448, 5, 74, 0, 0, 448, 488, 1, 0, 0, 0, 449, 488, 3, 102, 51, 0, 450, 488, 3, 104, 52, 0, 451, 488, 3, 98, 49, 0, 452, 488, 3, 70, 35, 0, 453, 488, 3, 106, 53, 0, 454, 455, 5, 72, 0, 0, 455, 460, 3, 100, 50, 0, 456, 457, 5, 39, 0, 0, 457, 459, 3, 100, 50, 0, 458, 456, 1, 0, 0, 0, 459, 462, 1, 0, 0, 0, 460, 458, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 463, 1, 0, 0, 0, 462, 460, 1, 0, 0, 0, 463, 464, 5, 73, 0, 0, 464, 488, 1, 0, 0, 0, 465, 466, 5, 72, 0, 0, 466, 471, 3, 98, 49, 0, 467, 468, 5, 39, 0, 0, 468, 470, 3, 98, 49, 0, 469, 467, 1, 0, 0, 0, 470, 473, 1, 0, 0, 0, 471, 469, 1, 0, 0, 0, 471, 472, 1, 0, 0, 0, 472, 474, 1, 0, 0, 0, 473, 471, 1, 0, 0, 0, 474, 475, 5, 73, 0, 0, 475, 488, 1, 0, 0, 0, 476, 477, 5, 72, 0, 0, 477, 482, 3, 106, 53, 0, 478, 479, 5, 39, 0, 0, 479, 481, 3, 106, 53, 0, 480, 478, 1, 0, 0, 0, 481, 484, 1, 0, 0, 0, 482, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 485, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 485, 486, 5, 73, 0, 0, 486, 488, 1, 0, 0, 0, 487, 445, 1, 0, 0, 0, 487, 446, 1, 0, 0, 0, 487, 449, 1, 0, 0, 0, 487, 450, 1, 0, 0, 0, 487, 451, 1, 0, 0, 0, 487, 452, 1, 0, 0, 0, 487, 453, 1, 0, 0, 0, 487, 454, 1, 0, 0, 0, 487, 465, 1, 0, 0, 0, 487, 476, 1, 0, 0, 0, 488, 69, 1, 0, 0, 0, 489, 492, 5, 53, 0, 0, 490, 492, 5, 71, 0, 0, 491, 489, 1, 0, 0, 0, 491, 490, 1, 0, 0, 0, 492, 71, 1, 0, 0, 0, 493, 497, 3, 64, 32, 0, 494, 495, 4, 36, 12, 0, 495, 497, 3, 70, 35, 0, 496, 493, 1, 0, 0, 0, 496, 494, 1, 0, 0, 0, 497, 73, 1, 0, 0, 0, 498, 499, 5, 9, 0, 0, 499, 500, 5, 31, 0, 0, 500, 75, 1, 0, 0, 0, 501, 502, 5, 14, 0, 0, 502, 507, 3, 78, 39, 0, 503, 504, 5, 39, 0, 0, 504, 506, 3, 78, 39, 0, 505, 503, 1, 0, 0, 0, 506, 509, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 77, 1, 0, 0, 0, 509, 507, 1, 0, 0, 0, 510, 512, 3, 10, 5, 0, 511, 513, 7, 4, 0, 0, 512, 511, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 516, 1, 0, 0, 0, 514, 515, 5, 51, 0, 0, 515, 517, 7, 5, 0, 0, 516, 514, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 79, 1, 0, 0, 0, 518, 519, 5, 8, 0, 0, 519, 520, 3, 62, 31, 0, 520, 81, 1, 0, 0, 0, 521, 522, 5, 2, 0, 0, 522, 523, 3, 62, 31, 0, 523, 83, 1, 0, 0, 0, 524, 525, 5, 11, 0, 0, 525, 530, 3, 86, 43, 0, 526, 527, 5, 39, 0, 0, 527, 529, 3, 86, 43, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 85, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 534, 3, 60, 30, 0, 534, 535, 5, 91, 0, 0, 535, 536, 3, 60, 30, 0, 536, 87, 1, 0, 0, 0, 537, 538, 5, 1, 0, 0, 538, 539, 3, 20, 10, 0, 539, 541, 3, 106, 53, 0, 540, 542, 3, 94, 47, 0, 541, 540, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 89, 1, 0, 0, 0, 543, 544, 5, 7, 0, 0, 544, 545, 3, 20, 10, 0, 545, 546, 3, 106, 53, 0, 546, 91, 1, 0, 0, 0, 547, 548, 5, 10, 0, 0, 548, 549, 3, 58, 29, 0, 549, 93, 1, 0, 0, 0, 550, 555, 3, 96, 48, 0, 551, 552, 5, 39, 0, 0, 552, 554, 3, 96, 48, 0, 553, 551, 1, 0, 0, 0, 554, 557, 1, 0, 0, 0, 555, 553, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 95, 1, 0, 0, 0, 557, 555, 1, 0, 0, 0, 558, 559, 3, 64, 32, 0, 559, 560, 5, 36, 0, 0, 560, 561, 3, 68, 34, 0, 561, 97, 1, 0, 0, 0, 562, 563, 7, 6, 0, 0, 563, 99, 1, 0, 0, 0, 564, 567, 3, 102, 51, 0, 565, 567, 3, 104, 52, 0, 566, 564, 1, 0, 0, 0, 566, 565, 1, 0, 0, 0, 567, 101, 1, 0, 0, 0, 568, 570, 7, 0, 0, 0, 569, 568, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 5, 32, 0, 0, 572, 103, 1, 0, 0, 0, 573, 575, 7, 0, 0, 0, 574, 573, 1, 0, 0, 0, 574, 575, 1, 0, 0, 0, 575, 576, 1, 0, 0, 0, 576, 577, 5, 31, 0, 0, 577, 105, 1, 0, 0, 0, 578, 579, 5, 30, 0, 0, 579, 107, 1, 0, 0, 0, 580, 581, 7, 7, 0, 0, 581, 109, 1, 0, 0, 0, 582, 583, 5, 5, 0, 0, 583, 584, 3, 112, 56, 0, 584, 111, 1, 0, 0, 0, 585, 586, 5, 72, 0, 0, 586, 587, 3, 2, 1, 0, 587, 588, 5, 73, 0, 0, 588, 113, 1, 0, 0, 0, 589, 590, 5, 13, 0, 0, 590, 591, 5, 107, 0, 0, 591, 115, 1, 0, 0, 0, 592, 593, 5, 3, 0, 0, 593, 596, 5, 97, 0, 0, 594, 595, 5, 95, 0, 0, 595, 597, 3, 60, 30, 0, 596, 594, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 607, 1, 0, 0, 0, 598, 599, 5, 96, 0, 0, 599, 604, 3, 118, 59, 0, 600, 601, 5, 39, 0, 0, 601, 603, 3, 118, 59, 0, 602, 600, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 608, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 598, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 608, 117, 1, 0, 0, 0, 609, 610, 3, 60, 30, 0, 610, 611, 5, 36, 0, 0, 611, 613, 1, 0, 0, 0, 612, 609, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 3, 60, 30, 0, 615, 119, 1, 0, 0, 0, 616, 617, 5, 18, 0, 0, 617, 618, 3, 40, 20, 0, 618, 619, 5, 95, 0, 0, 619, 620, 3, 62, 31, 0, 620, 121, 1, 0, 0, 0, 621, 622, 5, 17, 0, 0, 622, 625, 3, 54, 27, 0, 623, 624, 5, 33, 0, 0, 624, 626, 3, 34, 17, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 123, 1, 0, 0, 0, 627, 629, 7, 8, 0, 0, 628, 627, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 631, 5, 20, 0, 0, 631, 632, 3, 126, 63, 0, 632, 633, 3, 128, 64, 0, 633, 125, 1, 0, 0, 0, 634, 637, 3, 40, 20, 0, 635, 636, 5, 91, 0, 0, 636, 638, 3, 64, 32, 0, 637, 635, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 127, 1, 0, 0, 0, 639, 640, 5, 95, 0, 0, 640, 645, 3, 130, 65, 0, 641, 642, 5, 39, 0, 0, 642, 644, 3, 130, 65, 0, 643, 641, 1, 0, 0, 0, 644, 647, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 129, 1, 0, 0, 0, 647, 645, 1, 0, 0, 0, 648, 649, 3, 16, 8, 0, 649, 131, 1, 0, 0, 0, 63, 143, 152, 172, 184, 193, 201, 206, 214, 216, 221, 228, 233, 238, 248, 254, 262, 264, 275, 282, 293, 298, 300, 313, 332, 338, 348, 352, 357, 371, 380, 384, 388, 395, 399, 406, 412, 419, 427, 435, 443, 460, 471, 482, 487, 491, 496, 507, 512, 516, 530, 541, 555, 566, 569, 574, 596, 604, 607, 612, 625, 628, 637, 645] \ No newline at end of file +[4, 1, 130, 650, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 207, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 215, 8, 5, 10, 5, 12, 5, 218, 9, 5, 1, 6, 1, 6, 3, 6, 222, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 229, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 234, 8, 6, 1, 7, 1, 7, 1, 7, 3, 7, 239, 8, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 249, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 255, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 263, 8, 9, 10, 9, 12, 9, 266, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 276, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 281, 8, 10, 10, 10, 12, 10, 284, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 292, 8, 11, 10, 11, 12, 11, 295, 9, 11, 1, 11, 1, 11, 3, 11, 299, 8, 11, 3, 11, 301, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 311, 8, 13, 10, 13, 12, 13, 314, 9, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 5, 17, 330, 8, 17, 10, 17, 12, 17, 333, 9, 17, 1, 18, 1, 18, 1, 18, 3, 18, 338, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 346, 8, 19, 10, 19, 12, 19, 349, 9, 19, 1, 19, 3, 19, 352, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 357, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 369, 8, 23, 10, 23, 12, 23, 372, 9, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 378, 8, 24, 10, 24, 12, 24, 381, 9, 24, 1, 24, 3, 24, 384, 8, 24, 1, 24, 1, 24, 3, 24, 388, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 395, 8, 26, 1, 26, 1, 26, 3, 26, 399, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 404, 8, 27, 10, 27, 12, 27, 407, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 412, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 417, 8, 29, 10, 29, 12, 29, 420, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 425, 8, 30, 10, 30, 12, 30, 428, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 433, 8, 31, 10, 31, 12, 31, 436, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 443, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 458, 8, 34, 10, 34, 12, 34, 461, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 469, 8, 34, 10, 34, 12, 34, 472, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 480, 8, 34, 10, 34, 12, 34, 483, 9, 34, 1, 34, 1, 34, 3, 34, 487, 8, 34, 1, 35, 1, 35, 3, 35, 491, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 496, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 505, 8, 38, 10, 38, 12, 38, 508, 9, 38, 1, 39, 1, 39, 3, 39, 512, 8, 39, 1, 39, 1, 39, 3, 39, 516, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 528, 8, 42, 10, 42, 12, 42, 531, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 541, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 553, 8, 47, 10, 47, 12, 47, 556, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 566, 8, 50, 1, 51, 3, 51, 569, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 574, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 596, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 602, 8, 58, 10, 58, 12, 58, 605, 9, 58, 3, 58, 607, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 612, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 625, 8, 61, 1, 62, 3, 62, 628, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 637, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 643, 8, 64, 10, 64, 12, 64, 646, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 83, 83, 1, 0, 74, 75, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 677, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 206, 1, 0, 0, 0, 12, 233, 1, 0, 0, 0, 14, 235, 1, 0, 0, 0, 16, 248, 1, 0, 0, 0, 18, 254, 1, 0, 0, 0, 20, 275, 1, 0, 0, 0, 22, 285, 1, 0, 0, 0, 24, 304, 1, 0, 0, 0, 26, 306, 1, 0, 0, 0, 28, 317, 1, 0, 0, 0, 30, 321, 1, 0, 0, 0, 32, 323, 1, 0, 0, 0, 34, 326, 1, 0, 0, 0, 36, 337, 1, 0, 0, 0, 38, 341, 1, 0, 0, 0, 40, 356, 1, 0, 0, 0, 42, 360, 1, 0, 0, 0, 44, 362, 1, 0, 0, 0, 46, 364, 1, 0, 0, 0, 48, 373, 1, 0, 0, 0, 50, 389, 1, 0, 0, 0, 52, 392, 1, 0, 0, 0, 54, 400, 1, 0, 0, 0, 56, 408, 1, 0, 0, 0, 58, 413, 1, 0, 0, 0, 60, 421, 1, 0, 0, 0, 62, 429, 1, 0, 0, 0, 64, 437, 1, 0, 0, 0, 66, 442, 1, 0, 0, 0, 68, 486, 1, 0, 0, 0, 70, 490, 1, 0, 0, 0, 72, 495, 1, 0, 0, 0, 74, 497, 1, 0, 0, 0, 76, 500, 1, 0, 0, 0, 78, 509, 1, 0, 0, 0, 80, 517, 1, 0, 0, 0, 82, 520, 1, 0, 0, 0, 84, 523, 1, 0, 0, 0, 86, 532, 1, 0, 0, 0, 88, 536, 1, 0, 0, 0, 90, 542, 1, 0, 0, 0, 92, 546, 1, 0, 0, 0, 94, 549, 1, 0, 0, 0, 96, 557, 1, 0, 0, 0, 98, 561, 1, 0, 0, 0, 100, 565, 1, 0, 0, 0, 102, 568, 1, 0, 0, 0, 104, 573, 1, 0, 0, 0, 106, 577, 1, 0, 0, 0, 108, 579, 1, 0, 0, 0, 110, 581, 1, 0, 0, 0, 112, 584, 1, 0, 0, 0, 114, 588, 1, 0, 0, 0, 116, 591, 1, 0, 0, 0, 118, 611, 1, 0, 0, 0, 120, 615, 1, 0, 0, 0, 122, 620, 1, 0, 0, 0, 124, 627, 1, 0, 0, 0, 126, 633, 1, 0, 0, 0, 128, 638, 1, 0, 0, 0, 130, 647, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 29, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 38, 19, 0, 148, 153, 3, 32, 16, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 207, 3, 10, 5, 8, 180, 207, 3, 16, 8, 0, 181, 207, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 207, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 207, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 177, 1, 0, 0, 0, 206, 180, 1, 0, 0, 0, 206, 181, 1, 0, 0, 0, 206, 182, 1, 0, 0, 0, 206, 198, 1, 0, 0, 0, 206, 205, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 5, 0, 0, 209, 210, 5, 34, 0, 0, 210, 215, 3, 10, 5, 6, 211, 212, 10, 4, 0, 0, 212, 213, 5, 52, 0, 0, 213, 215, 3, 10, 5, 5, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 221, 3, 16, 8, 0, 220, 222, 5, 49, 0, 0, 221, 220, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 5, 47, 0, 0, 224, 225, 3, 106, 53, 0, 225, 234, 1, 0, 0, 0, 226, 228, 3, 16, 8, 0, 227, 229, 5, 49, 0, 0, 228, 227, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 5, 54, 0, 0, 231, 232, 3, 106, 53, 0, 232, 234, 1, 0, 0, 0, 233, 219, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 234, 13, 1, 0, 0, 0, 235, 238, 3, 58, 29, 0, 236, 237, 5, 37, 0, 0, 237, 239, 3, 30, 15, 0, 238, 236, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 241, 5, 38, 0, 0, 241, 242, 3, 68, 34, 0, 242, 15, 1, 0, 0, 0, 243, 249, 3, 18, 9, 0, 244, 245, 3, 18, 9, 0, 245, 246, 3, 108, 54, 0, 246, 247, 3, 18, 9, 0, 247, 249, 1, 0, 0, 0, 248, 243, 1, 0, 0, 0, 248, 244, 1, 0, 0, 0, 249, 17, 1, 0, 0, 0, 250, 251, 6, 9, -1, 0, 251, 255, 3, 20, 10, 0, 252, 253, 7, 0, 0, 0, 253, 255, 3, 18, 9, 3, 254, 250, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 255, 264, 1, 0, 0, 0, 256, 257, 10, 2, 0, 0, 257, 258, 7, 1, 0, 0, 258, 263, 3, 18, 9, 3, 259, 260, 10, 1, 0, 0, 260, 261, 7, 0, 0, 0, 261, 263, 3, 18, 9, 2, 262, 256, 1, 0, 0, 0, 262, 259, 1, 0, 0, 0, 263, 266, 1, 0, 0, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 19, 1, 0, 0, 0, 266, 264, 1, 0, 0, 0, 267, 268, 6, 10, -1, 0, 268, 276, 3, 68, 34, 0, 269, 276, 3, 58, 29, 0, 270, 276, 3, 22, 11, 0, 271, 272, 5, 48, 0, 0, 272, 273, 3, 10, 5, 0, 273, 274, 5, 55, 0, 0, 274, 276, 1, 0, 0, 0, 275, 267, 1, 0, 0, 0, 275, 269, 1, 0, 0, 0, 275, 270, 1, 0, 0, 0, 275, 271, 1, 0, 0, 0, 276, 282, 1, 0, 0, 0, 277, 278, 10, 1, 0, 0, 278, 279, 5, 37, 0, 0, 279, 281, 3, 30, 15, 0, 280, 277, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 282, 283, 1, 0, 0, 0, 283, 21, 1, 0, 0, 0, 284, 282, 1, 0, 0, 0, 285, 286, 3, 24, 12, 0, 286, 300, 5, 48, 0, 0, 287, 301, 5, 66, 0, 0, 288, 293, 3, 10, 5, 0, 289, 290, 5, 39, 0, 0, 290, 292, 3, 10, 5, 0, 291, 289, 1, 0, 0, 0, 292, 295, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 298, 1, 0, 0, 0, 295, 293, 1, 0, 0, 0, 296, 297, 5, 39, 0, 0, 297, 299, 3, 26, 13, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 301, 1, 0, 0, 0, 300, 287, 1, 0, 0, 0, 300, 288, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 303, 5, 55, 0, 0, 303, 23, 1, 0, 0, 0, 304, 305, 3, 72, 36, 0, 305, 25, 1, 0, 0, 0, 306, 307, 5, 69, 0, 0, 307, 312, 3, 28, 14, 0, 308, 309, 5, 39, 0, 0, 309, 311, 3, 28, 14, 0, 310, 308, 1, 0, 0, 0, 311, 314, 1, 0, 0, 0, 312, 310, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0, 313, 315, 1, 0, 0, 0, 314, 312, 1, 0, 0, 0, 315, 316, 5, 70, 0, 0, 316, 27, 1, 0, 0, 0, 317, 318, 3, 106, 53, 0, 318, 319, 5, 38, 0, 0, 319, 320, 3, 68, 34, 0, 320, 29, 1, 0, 0, 0, 321, 322, 3, 64, 32, 0, 322, 31, 1, 0, 0, 0, 323, 324, 5, 12, 0, 0, 324, 325, 3, 34, 17, 0, 325, 33, 1, 0, 0, 0, 326, 331, 3, 36, 18, 0, 327, 328, 5, 39, 0, 0, 328, 330, 3, 36, 18, 0, 329, 327, 1, 0, 0, 0, 330, 333, 1, 0, 0, 0, 331, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 35, 1, 0, 0, 0, 333, 331, 1, 0, 0, 0, 334, 335, 3, 58, 29, 0, 335, 336, 5, 36, 0, 0, 336, 338, 1, 0, 0, 0, 337, 334, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 339, 1, 0, 0, 0, 339, 340, 3, 10, 5, 0, 340, 37, 1, 0, 0, 0, 341, 342, 5, 6, 0, 0, 342, 347, 3, 40, 20, 0, 343, 344, 5, 39, 0, 0, 344, 346, 3, 40, 20, 0, 345, 343, 1, 0, 0, 0, 346, 349, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 347, 348, 1, 0, 0, 0, 348, 351, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 350, 352, 3, 46, 23, 0, 351, 350, 1, 0, 0, 0, 351, 352, 1, 0, 0, 0, 352, 39, 1, 0, 0, 0, 353, 354, 3, 42, 21, 0, 354, 355, 5, 38, 0, 0, 355, 357, 1, 0, 0, 0, 356, 353, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 359, 3, 44, 22, 0, 359, 41, 1, 0, 0, 0, 360, 361, 5, 83, 0, 0, 361, 43, 1, 0, 0, 0, 362, 363, 7, 2, 0, 0, 363, 45, 1, 0, 0, 0, 364, 365, 5, 82, 0, 0, 365, 370, 5, 83, 0, 0, 366, 367, 5, 39, 0, 0, 367, 369, 5, 83, 0, 0, 368, 366, 1, 0, 0, 0, 369, 372, 1, 0, 0, 0, 370, 368, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 47, 1, 0, 0, 0, 372, 370, 1, 0, 0, 0, 373, 374, 5, 19, 0, 0, 374, 379, 3, 40, 20, 0, 375, 376, 5, 39, 0, 0, 376, 378, 3, 40, 20, 0, 377, 375, 1, 0, 0, 0, 378, 381, 1, 0, 0, 0, 379, 377, 1, 0, 0, 0, 379, 380, 1, 0, 0, 0, 380, 383, 1, 0, 0, 0, 381, 379, 1, 0, 0, 0, 382, 384, 3, 54, 27, 0, 383, 382, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 387, 1, 0, 0, 0, 385, 386, 5, 33, 0, 0, 386, 388, 3, 34, 17, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 49, 1, 0, 0, 0, 389, 390, 5, 4, 0, 0, 390, 391, 3, 34, 17, 0, 391, 51, 1, 0, 0, 0, 392, 394, 5, 15, 0, 0, 393, 395, 3, 54, 27, 0, 394, 393, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 398, 1, 0, 0, 0, 396, 397, 5, 33, 0, 0, 397, 399, 3, 34, 17, 0, 398, 396, 1, 0, 0, 0, 398, 399, 1, 0, 0, 0, 399, 53, 1, 0, 0, 0, 400, 405, 3, 56, 28, 0, 401, 402, 5, 39, 0, 0, 402, 404, 3, 56, 28, 0, 403, 401, 1, 0, 0, 0, 404, 407, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 405, 406, 1, 0, 0, 0, 406, 55, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 408, 411, 3, 36, 18, 0, 409, 410, 5, 16, 0, 0, 410, 412, 3, 10, 5, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 57, 1, 0, 0, 0, 413, 418, 3, 72, 36, 0, 414, 415, 5, 41, 0, 0, 415, 417, 3, 72, 36, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 59, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 426, 3, 66, 33, 0, 422, 423, 5, 41, 0, 0, 423, 425, 3, 66, 33, 0, 424, 422, 1, 0, 0, 0, 425, 428, 1, 0, 0, 0, 426, 424, 1, 0, 0, 0, 426, 427, 1, 0, 0, 0, 427, 61, 1, 0, 0, 0, 428, 426, 1, 0, 0, 0, 429, 434, 3, 60, 30, 0, 430, 431, 5, 39, 0, 0, 431, 433, 3, 60, 30, 0, 432, 430, 1, 0, 0, 0, 433, 436, 1, 0, 0, 0, 434, 432, 1, 0, 0, 0, 434, 435, 1, 0, 0, 0, 435, 63, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 437, 438, 7, 3, 0, 0, 438, 65, 1, 0, 0, 0, 439, 443, 5, 87, 0, 0, 440, 441, 4, 33, 10, 0, 441, 443, 3, 70, 35, 0, 442, 439, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 443, 67, 1, 0, 0, 0, 444, 487, 5, 50, 0, 0, 445, 446, 3, 104, 52, 0, 446, 447, 5, 74, 0, 0, 447, 487, 1, 0, 0, 0, 448, 487, 3, 102, 51, 0, 449, 487, 3, 104, 52, 0, 450, 487, 3, 98, 49, 0, 451, 487, 3, 70, 35, 0, 452, 487, 3, 106, 53, 0, 453, 454, 5, 72, 0, 0, 454, 459, 3, 100, 50, 0, 455, 456, 5, 39, 0, 0, 456, 458, 3, 100, 50, 0, 457, 455, 1, 0, 0, 0, 458, 461, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 462, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 462, 463, 5, 73, 0, 0, 463, 487, 1, 0, 0, 0, 464, 465, 5, 72, 0, 0, 465, 470, 3, 98, 49, 0, 466, 467, 5, 39, 0, 0, 467, 469, 3, 98, 49, 0, 468, 466, 1, 0, 0, 0, 469, 472, 1, 0, 0, 0, 470, 468, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, 471, 473, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 473, 474, 5, 73, 0, 0, 474, 487, 1, 0, 0, 0, 475, 476, 5, 72, 0, 0, 476, 481, 3, 106, 53, 0, 477, 478, 5, 39, 0, 0, 478, 480, 3, 106, 53, 0, 479, 477, 1, 0, 0, 0, 480, 483, 1, 0, 0, 0, 481, 479, 1, 0, 0, 0, 481, 482, 1, 0, 0, 0, 482, 484, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 484, 485, 5, 73, 0, 0, 485, 487, 1, 0, 0, 0, 486, 444, 1, 0, 0, 0, 486, 445, 1, 0, 0, 0, 486, 448, 1, 0, 0, 0, 486, 449, 1, 0, 0, 0, 486, 450, 1, 0, 0, 0, 486, 451, 1, 0, 0, 0, 486, 452, 1, 0, 0, 0, 486, 453, 1, 0, 0, 0, 486, 464, 1, 0, 0, 0, 486, 475, 1, 0, 0, 0, 487, 69, 1, 0, 0, 0, 488, 491, 5, 53, 0, 0, 489, 491, 5, 71, 0, 0, 490, 488, 1, 0, 0, 0, 490, 489, 1, 0, 0, 0, 491, 71, 1, 0, 0, 0, 492, 496, 3, 64, 32, 0, 493, 494, 4, 36, 11, 0, 494, 496, 3, 70, 35, 0, 495, 492, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 73, 1, 0, 0, 0, 497, 498, 5, 9, 0, 0, 498, 499, 5, 31, 0, 0, 499, 75, 1, 0, 0, 0, 500, 501, 5, 14, 0, 0, 501, 506, 3, 78, 39, 0, 502, 503, 5, 39, 0, 0, 503, 505, 3, 78, 39, 0, 504, 502, 1, 0, 0, 0, 505, 508, 1, 0, 0, 0, 506, 504, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 77, 1, 0, 0, 0, 508, 506, 1, 0, 0, 0, 509, 511, 3, 10, 5, 0, 510, 512, 7, 4, 0, 0, 511, 510, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 512, 515, 1, 0, 0, 0, 513, 514, 5, 51, 0, 0, 514, 516, 7, 5, 0, 0, 515, 513, 1, 0, 0, 0, 515, 516, 1, 0, 0, 0, 516, 79, 1, 0, 0, 0, 517, 518, 5, 8, 0, 0, 518, 519, 3, 62, 31, 0, 519, 81, 1, 0, 0, 0, 520, 521, 5, 2, 0, 0, 521, 522, 3, 62, 31, 0, 522, 83, 1, 0, 0, 0, 523, 524, 5, 11, 0, 0, 524, 529, 3, 86, 43, 0, 525, 526, 5, 39, 0, 0, 526, 528, 3, 86, 43, 0, 527, 525, 1, 0, 0, 0, 528, 531, 1, 0, 0, 0, 529, 527, 1, 0, 0, 0, 529, 530, 1, 0, 0, 0, 530, 85, 1, 0, 0, 0, 531, 529, 1, 0, 0, 0, 532, 533, 3, 60, 30, 0, 533, 534, 5, 91, 0, 0, 534, 535, 3, 60, 30, 0, 535, 87, 1, 0, 0, 0, 536, 537, 5, 1, 0, 0, 537, 538, 3, 20, 10, 0, 538, 540, 3, 106, 53, 0, 539, 541, 3, 94, 47, 0, 540, 539, 1, 0, 0, 0, 540, 541, 1, 0, 0, 0, 541, 89, 1, 0, 0, 0, 542, 543, 5, 7, 0, 0, 543, 544, 3, 20, 10, 0, 544, 545, 3, 106, 53, 0, 545, 91, 1, 0, 0, 0, 546, 547, 5, 10, 0, 0, 547, 548, 3, 58, 29, 0, 548, 93, 1, 0, 0, 0, 549, 554, 3, 96, 48, 0, 550, 551, 5, 39, 0, 0, 551, 553, 3, 96, 48, 0, 552, 550, 1, 0, 0, 0, 553, 556, 1, 0, 0, 0, 554, 552, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 95, 1, 0, 0, 0, 556, 554, 1, 0, 0, 0, 557, 558, 3, 64, 32, 0, 558, 559, 5, 36, 0, 0, 559, 560, 3, 68, 34, 0, 560, 97, 1, 0, 0, 0, 561, 562, 7, 6, 0, 0, 562, 99, 1, 0, 0, 0, 563, 566, 3, 102, 51, 0, 564, 566, 3, 104, 52, 0, 565, 563, 1, 0, 0, 0, 565, 564, 1, 0, 0, 0, 566, 101, 1, 0, 0, 0, 567, 569, 7, 0, 0, 0, 568, 567, 1, 0, 0, 0, 568, 569, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 571, 5, 32, 0, 0, 571, 103, 1, 0, 0, 0, 572, 574, 7, 0, 0, 0, 573, 572, 1, 0, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 1, 0, 0, 0, 575, 576, 5, 31, 0, 0, 576, 105, 1, 0, 0, 0, 577, 578, 5, 30, 0, 0, 578, 107, 1, 0, 0, 0, 579, 580, 7, 7, 0, 0, 580, 109, 1, 0, 0, 0, 581, 582, 5, 5, 0, 0, 582, 583, 3, 112, 56, 0, 583, 111, 1, 0, 0, 0, 584, 585, 5, 72, 0, 0, 585, 586, 3, 2, 1, 0, 586, 587, 5, 73, 0, 0, 587, 113, 1, 0, 0, 0, 588, 589, 5, 13, 0, 0, 589, 590, 5, 107, 0, 0, 590, 115, 1, 0, 0, 0, 591, 592, 5, 3, 0, 0, 592, 595, 5, 97, 0, 0, 593, 594, 5, 95, 0, 0, 594, 596, 3, 60, 30, 0, 595, 593, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 606, 1, 0, 0, 0, 597, 598, 5, 96, 0, 0, 598, 603, 3, 118, 59, 0, 599, 600, 5, 39, 0, 0, 600, 602, 3, 118, 59, 0, 601, 599, 1, 0, 0, 0, 602, 605, 1, 0, 0, 0, 603, 601, 1, 0, 0, 0, 603, 604, 1, 0, 0, 0, 604, 607, 1, 0, 0, 0, 605, 603, 1, 0, 0, 0, 606, 597, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 117, 1, 0, 0, 0, 608, 609, 3, 60, 30, 0, 609, 610, 5, 36, 0, 0, 610, 612, 1, 0, 0, 0, 611, 608, 1, 0, 0, 0, 611, 612, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 3, 60, 30, 0, 614, 119, 1, 0, 0, 0, 615, 616, 5, 18, 0, 0, 616, 617, 3, 40, 20, 0, 617, 618, 5, 95, 0, 0, 618, 619, 3, 62, 31, 0, 619, 121, 1, 0, 0, 0, 620, 621, 5, 17, 0, 0, 621, 624, 3, 54, 27, 0, 622, 623, 5, 33, 0, 0, 623, 625, 3, 34, 17, 0, 624, 622, 1, 0, 0, 0, 624, 625, 1, 0, 0, 0, 625, 123, 1, 0, 0, 0, 626, 628, 7, 8, 0, 0, 627, 626, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 630, 5, 20, 0, 0, 630, 631, 3, 126, 63, 0, 631, 632, 3, 128, 64, 0, 632, 125, 1, 0, 0, 0, 633, 636, 3, 40, 20, 0, 634, 635, 5, 91, 0, 0, 635, 637, 3, 64, 32, 0, 636, 634, 1, 0, 0, 0, 636, 637, 1, 0, 0, 0, 637, 127, 1, 0, 0, 0, 638, 639, 5, 95, 0, 0, 639, 644, 3, 130, 65, 0, 640, 641, 5, 39, 0, 0, 641, 643, 3, 130, 65, 0, 642, 640, 1, 0, 0, 0, 643, 646, 1, 0, 0, 0, 644, 642, 1, 0, 0, 0, 644, 645, 1, 0, 0, 0, 645, 129, 1, 0, 0, 0, 646, 644, 1, 0, 0, 0, 647, 648, 3, 16, 8, 0, 648, 131, 1, 0, 0, 0, 63, 143, 152, 172, 184, 193, 201, 206, 214, 216, 221, 228, 233, 238, 248, 254, 262, 264, 275, 282, 293, 298, 300, 312, 331, 337, 347, 351, 356, 370, 379, 383, 387, 394, 398, 405, 411, 418, 426, 434, 442, 459, 470, 481, 486, 490, 495, 506, 511, 515, 529, 540, 554, 565, 568, 573, 595, 603, 606, 611, 624, 627, 636, 644] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 35ace5a34f73f..1e0a636d67182 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -103,7 +103,7 @@ private static String[] makeLiteralNames() { "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'{'", "'}'", null, null, "']'", null, null, null, null, null, null, null, null, "'metadata'", null, null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, null, null, null, null, @@ -1995,28 +1995,26 @@ public final MapExpressionContext mapExpression() throws RecognitionException { enterOuterAlt(_localctx, 1); { setState(306); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(307); match(LEFT_BRACES); - setState(308); + setState(307); entryExpression(); - setState(313); + setState(312); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(309); + setState(308); match(COMMA); - setState(310); + setState(309); entryExpression(); } } - setState(315); + setState(314); _errHandler.sync(this); _la = _input.LA(1); } - setState(316); + setState(315); match(RIGHT_BRACES); } } @@ -2068,11 +2066,11 @@ public final EntryExpressionContext entryExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(318); + setState(317); ((EntryExpressionContext)_localctx).key = string(); - setState(319); + setState(318); match(COLON); - setState(320); + setState(319); ((EntryExpressionContext)_localctx).value = constant(); } } @@ -2130,7 +2128,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(322); + setState(321); identifier(); } } @@ -2177,9 +2175,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(324); + setState(323); match(ROW); - setState(325); + setState(324); fields(); } } @@ -2233,23 +2231,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(327); + setState(326); field(); - setState(332); + setState(331); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(328); + setState(327); match(COMMA); - setState(329); + setState(328); field(); } } } - setState(334); + setState(333); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); } @@ -2301,19 +2299,19 @@ public final FieldContext field() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(338); + setState(337); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(335); + setState(334); qualifiedName(); - setState(336); + setState(335); match(ASSIGN); } break; } - setState(340); + setState(339); booleanExpression(0); } } @@ -2371,34 +2369,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(342); + setState(341); match(FROM); - setState(343); + setState(342); indexPattern(); - setState(348); + setState(347); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(344); + setState(343); match(COMMA); - setState(345); + setState(344); indexPattern(); } } } - setState(350); + setState(349); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); } - setState(352); + setState(351); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: { - setState(351); + setState(350); metadata(); } break; @@ -2451,19 +2449,19 @@ public final IndexPatternContext indexPattern() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(357); + setState(356); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { case 1: { - setState(354); + setState(353); clusterString(); - setState(355); + setState(354); match(COLON); } break; } - setState(359); + setState(358); indexString(); } } @@ -2507,7 +2505,7 @@ public final ClusterStringContext clusterString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(361); + setState(360); match(UNQUOTED_SOURCE); } } @@ -2553,7 +2551,7 @@ public final IndexStringContext indexString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(363); + setState(362); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2614,25 +2612,25 @@ public final MetadataContext metadata() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(365); + setState(364); match(METADATA); - setState(366); + setState(365); match(UNQUOTED_SOURCE); - setState(371); + setState(370); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(367); + setState(366); match(COMMA); - setState(368); + setState(367); match(UNQUOTED_SOURCE); } } } - setState(373); + setState(372); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } @@ -2698,46 +2696,46 @@ public final MetricsCommandContext metricsCommand() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(374); + setState(373); match(DEV_METRICS); - setState(375); + setState(374); indexPattern(); - setState(380); + setState(379); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,29,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(376); + setState(375); match(COMMA); - setState(377); + setState(376); indexPattern(); } } } - setState(382); + setState(381); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,29,_ctx); } - setState(384); + setState(383); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(383); + setState(382); ((MetricsCommandContext)_localctx).aggregates = aggFields(); } break; } - setState(388); + setState(387); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(386); + setState(385); match(BY); - setState(387); + setState(386); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2787,9 +2785,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(390); + setState(389); match(EVAL); - setState(391); + setState(390); fields(); } } @@ -2842,26 +2840,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(393); + setState(392); match(STATS); - setState(395); + setState(394); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(394); + setState(393); ((StatsCommandContext)_localctx).stats = aggFields(); } break; } - setState(399); + setState(398); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(397); + setState(396); match(BY); - setState(398); + setState(397); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2918,23 +2916,23 @@ public final AggFieldsContext aggFields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(401); + setState(400); aggField(); - setState(406); + setState(405); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(402); + setState(401); match(COMMA); - setState(403); + setState(402); aggField(); } } } - setState(408); + setState(407); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -2986,16 +2984,16 @@ public final AggFieldContext aggField() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(409); + setState(408); field(); - setState(412); + setState(411); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(410); + setState(409); match(WHERE); - setState(411); + setState(410); booleanExpression(0); } break; @@ -3052,23 +3050,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(414); + setState(413); identifierOrParameter(); - setState(419); + setState(418); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(415); + setState(414); match(DOT); - setState(416); + setState(415); identifierOrParameter(); } } } - setState(421); + setState(420); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } @@ -3124,23 +3122,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(422); + setState(421); identifierPattern(); - setState(427); + setState(426); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(423); + setState(422); match(DOT); - setState(424); + setState(423); identifierPattern(); } } } - setState(429); + setState(428); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } @@ -3196,23 +3194,23 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni int _alt; enterOuterAlt(_localctx, 1); { - setState(430); + setState(429); qualifiedNamePattern(); - setState(435); + setState(434); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(431); + setState(430); match(COMMA); - setState(432); + setState(431); qualifiedNamePattern(); } } } - setState(437); + setState(436); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); } @@ -3260,7 +3258,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(438); + setState(437); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3313,22 +3311,22 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); enterRule(_localctx, 66, RULE_identifierPattern); try { - setState(443); + setState(442); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(440); + setState(439); match(ID_PATTERN); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(441); + setState(440); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(442); + setState(441); parameter(); } break; @@ -3601,14 +3599,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 68, RULE_constant); int _la; try { - setState(487); + setState(486); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(445); + setState(444); match(NULL); } break; @@ -3616,9 +3614,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(446); + setState(445); integerValue(); - setState(447); + setState(446); match(UNQUOTED_IDENTIFIER); } break; @@ -3626,7 +3624,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(449); + setState(448); decimalValue(); } break; @@ -3634,7 +3632,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(450); + setState(449); integerValue(); } break; @@ -3642,7 +3640,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(451); + setState(450); booleanValue(); } break; @@ -3650,7 +3648,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(452); + setState(451); parameter(); } break; @@ -3658,7 +3656,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(453); + setState(452); string(); } break; @@ -3666,27 +3664,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(454); + setState(453); match(OPENING_BRACKET); - setState(455); + setState(454); numericValue(); - setState(460); + setState(459); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(456); + setState(455); match(COMMA); - setState(457); + setState(456); numericValue(); } } - setState(462); + setState(461); _errHandler.sync(this); _la = _input.LA(1); } - setState(463); + setState(462); match(CLOSING_BRACKET); } break; @@ -3694,27 +3692,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(465); + setState(464); match(OPENING_BRACKET); - setState(466); + setState(465); booleanValue(); - setState(471); + setState(470); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(467); + setState(466); match(COMMA); - setState(468); + setState(467); booleanValue(); } } - setState(473); + setState(472); _errHandler.sync(this); _la = _input.LA(1); } - setState(474); + setState(473); match(CLOSING_BRACKET); } break; @@ -3722,27 +3720,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(476); + setState(475); match(OPENING_BRACKET); - setState(477); + setState(476); string(); - setState(482); + setState(481); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(478); + setState(477); match(COMMA); - setState(479); + setState(478); string(); } } - setState(484); + setState(483); _errHandler.sync(this); _la = _input.LA(1); } - setState(485); + setState(484); match(CLOSING_BRACKET); } break; @@ -3816,14 +3814,14 @@ public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); enterRule(_localctx, 70, RULE_parameter); try { - setState(491); + setState(490); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(489); + setState(488); match(PARAM); } break; @@ -3831,7 +3829,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(490); + setState(489); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3882,22 +3880,22 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); enterRule(_localctx, 72, RULE_identifierOrParameter); try { - setState(496); + setState(495); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(493); + setState(492); identifier(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(494); + setState(493); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(495); + setState(494); parameter(); } break; @@ -3944,9 +3942,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(498); + setState(497); match(LIMIT); - setState(499); + setState(498); match(INTEGER_LITERAL); } } @@ -4001,25 +3999,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(501); + setState(500); match(SORT); - setState(502); + setState(501); orderExpression(); - setState(507); + setState(506); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,46,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(503); + setState(502); match(COMMA); - setState(504); + setState(503); orderExpression(); } } } - setState(509); + setState(508); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,46,_ctx); } @@ -4075,14 +4073,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(510); + setState(509); booleanExpression(0); - setState(512); + setState(511); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: { - setState(511); + setState(510); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -4096,14 +4094,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(516); + setState(515); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(514); + setState(513); match(NULLS); - setState(515); + setState(514); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -4162,9 +4160,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(518); + setState(517); match(KEEP); - setState(519); + setState(518); qualifiedNamePatterns(); } } @@ -4211,9 +4209,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(521); + setState(520); match(DROP); - setState(522); + setState(521); qualifiedNamePatterns(); } } @@ -4268,25 +4266,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(524); + setState(523); match(RENAME); - setState(525); + setState(524); renameClause(); - setState(530); + setState(529); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(526); + setState(525); match(COMMA); - setState(527); + setState(526); renameClause(); } } } - setState(532); + setState(531); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } @@ -4340,11 +4338,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(533); + setState(532); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(534); + setState(533); match(AS); - setState(535); + setState(534); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4397,18 +4395,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(537); + setState(536); match(DISSECT); - setState(538); + setState(537); primaryExpression(0); - setState(539); + setState(538); string(); - setState(541); + setState(540); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: { - setState(540); + setState(539); commandOptions(); } break; @@ -4461,11 +4459,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(543); + setState(542); match(GROK); - setState(544); + setState(543); primaryExpression(0); - setState(545); + setState(544); string(); } } @@ -4512,9 +4510,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(547); + setState(546); match(MV_EXPAND); - setState(548); + setState(547); qualifiedName(); } } @@ -4568,23 +4566,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(550); + setState(549); commandOption(); - setState(555); + setState(554); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,51,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(551); + setState(550); match(COMMA); - setState(552); + setState(551); commandOption(); } } } - setState(557); + setState(556); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,51,_ctx); } @@ -4636,11 +4634,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(558); + setState(557); identifier(); - setState(559); + setState(558); match(ASSIGN); - setState(560); + setState(559); constant(); } } @@ -4686,7 +4684,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(562); + setState(561); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4741,20 +4739,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 100, RULE_numericValue); try { - setState(566); + setState(565); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(564); + setState(563); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(565); + setState(564); integerValue(); } break; @@ -4803,12 +4801,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(569); + setState(568); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(568); + setState(567); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4821,7 +4819,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(571); + setState(570); match(DECIMAL_LITERAL); } } @@ -4868,12 +4866,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(574); + setState(573); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(573); + setState(572); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4886,7 +4884,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(576); + setState(575); match(INTEGER_LITERAL); } } @@ -4930,7 +4928,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(578); + setState(577); match(QUOTED_STRING); } } @@ -4980,7 +4978,7 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(580); + setState(579); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -432345564227567616L) != 0)) ) { _errHandler.recoverInline(this); @@ -5035,9 +5033,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(582); + setState(581); match(EXPLAIN); - setState(583); + setState(582); subqueryExpression(); } } @@ -5085,11 +5083,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(585); + setState(584); match(OPENING_BRACKET); - setState(586); + setState(585); query(0); - setState(587); + setState(586); match(CLOSING_BRACKET); } } @@ -5146,9 +5144,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(589); + setState(588); match(SHOW); - setState(590); + setState(589); match(INFO); } } @@ -5211,46 +5209,46 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(592); + setState(591); match(ENRICH); - setState(593); + setState(592); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(596); + setState(595); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(594); + setState(593); match(ON); - setState(595); + setState(594); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(607); + setState(606); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(598); + setState(597); match(WITH); - setState(599); + setState(598); enrichWithClause(); - setState(604); + setState(603); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,56,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(600); + setState(599); match(COMMA); - setState(601); + setState(600); enrichWithClause(); } } } - setState(606); + setState(605); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,56,_ctx); } @@ -5307,19 +5305,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(612); + setState(611); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { case 1: { - setState(609); + setState(608); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(610); + setState(609); match(ASSIGN); } break; } - setState(614); + setState(613); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5372,13 +5370,13 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(616); + setState(615); match(DEV_LOOKUP); - setState(617); + setState(616); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(618); + setState(617); match(ON); - setState(619); + setState(618); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5431,18 +5429,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(621); + setState(620); match(DEV_INLINESTATS); - setState(622); + setState(621); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(625); + setState(624); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,59,_ctx) ) { case 1: { - setState(623); + setState(622); match(BY); - setState(624); + setState(623); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5500,12 +5498,12 @@ public final JoinCommandContext joinCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(628); + setState(627); _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) { { - setState(627); + setState(626); ((JoinCommandContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) ) { @@ -5519,11 +5517,11 @@ public final JoinCommandContext joinCommand() throws RecognitionException { } } - setState(630); + setState(629); match(DEV_JOIN); - setState(631); + setState(630); joinTarget(); - setState(632); + setState(631); joinCondition(); } } @@ -5576,16 +5574,16 @@ public final JoinTargetContext joinTarget() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(634); + setState(633); ((JoinTargetContext)_localctx).index = indexPattern(); - setState(637); + setState(636); _errHandler.sync(this); _la = _input.LA(1); if (_la==AS) { { - setState(635); + setState(634); match(AS); - setState(636); + setState(635); ((JoinTargetContext)_localctx).alias = identifier(); } } @@ -5643,25 +5641,25 @@ public final JoinConditionContext joinCondition() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(639); + setState(638); match(ON); - setState(640); + setState(639); joinPredicate(); - setState(645); + setState(644); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,62,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(641); + setState(640); match(COMMA); - setState(642); + setState(641); joinPredicate(); } } } - setState(647); + setState(646); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,62,_ctx); } @@ -5709,7 +5707,7 @@ public final JoinPredicateContext joinPredicate() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(648); + setState(647); valueExpression(); } } @@ -5738,8 +5736,6 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); case 10: return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); - case 13: - return mapExpression_sempred((MapExpressionContext)_localctx, predIndex); case 33: return identifierPattern_sempred((IdentifierPatternContext)_localctx, predIndex); case 36: @@ -5797,30 +5793,23 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } return true; } - private boolean mapExpression_sempred(MapExpressionContext _localctx, int predIndex) { - switch (predIndex) { - case 10: - return this.isDevVersion(); - } - return true; - } private boolean identifierPattern_sempred(IdentifierPatternContext _localctx, int predIndex) { switch (predIndex) { - case 11: + case 10: return this.isDevVersion(); } return true; } private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _localctx, int predIndex) { switch (predIndex) { - case 12: + case 11: return this.isDevVersion(); } return true; } public static final String _serializedATN = - "\u0004\u0001\u0082\u028b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0004\u0001\u0082\u028a\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ @@ -5864,373 +5853,372 @@ private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _loca "\u000b\u0001\u000b\u0005\u000b\u0124\b\u000b\n\u000b\f\u000b\u0127\t\u000b"+ "\u0001\u000b\u0001\u000b\u0003\u000b\u012b\b\u000b\u0003\u000b\u012d\b"+ "\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r"+ - "\u0001\r\u0001\r\u0005\r\u0138\b\r\n\r\f\r\u013b\t\r\u0001\r\u0001\r\u0001"+ - "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0005"+ - "\u0011\u014b\b\u0011\n\u0011\f\u0011\u014e\t\u0011\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0003\u0012\u0153\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013"+ - "\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u015b\b\u0013\n\u0013"+ - "\f\u0013\u015e\t\u0013\u0001\u0013\u0003\u0013\u0161\b\u0013\u0001\u0014"+ - "\u0001\u0014\u0001\u0014\u0003\u0014\u0166\b\u0014\u0001\u0014\u0001\u0014"+ - "\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017"+ - "\u0001\u0017\u0001\u0017\u0005\u0017\u0172\b\u0017\n\u0017\f\u0017\u0175"+ - "\t\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u017b"+ - "\b\u0018\n\u0018\f\u0018\u017e\t\u0018\u0001\u0018\u0003\u0018\u0181\b"+ - "\u0018\u0001\u0018\u0001\u0018\u0003\u0018\u0185\b\u0018\u0001\u0019\u0001"+ - "\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u018c\b\u001a\u0001"+ - "\u001a\u0001\u001a\u0003\u001a\u0190\b\u001a\u0001\u001b\u0001\u001b\u0001"+ - "\u001b\u0005\u001b\u0195\b\u001b\n\u001b\f\u001b\u0198\t\u001b\u0001\u001c"+ - "\u0001\u001c\u0001\u001c\u0003\u001c\u019d\b\u001c\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0005\u001d\u01a2\b\u001d\n\u001d\f\u001d\u01a5\t\u001d\u0001"+ - "\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u01aa\b\u001e\n\u001e\f\u001e"+ - "\u01ad\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01b2\b"+ - "\u001f\n\u001f\f\u001f\u01b5\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001"+ - "!\u0003!\u01bc\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01cb\b\"\n"+ - "\"\f\"\u01ce\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005"+ - "\"\u01d6\b\"\n\"\f\"\u01d9\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\""+ - "\u0001\"\u0005\"\u01e1\b\"\n\"\f\"\u01e4\t\"\u0001\"\u0001\"\u0003\"\u01e8"+ - "\b\"\u0001#\u0001#\u0003#\u01ec\b#\u0001$\u0001$\u0001$\u0003$\u01f1\b"+ - "$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01fa\b&\n&"+ - "\f&\u01fd\t&\u0001\'\u0001\'\u0003\'\u0201\b\'\u0001\'\u0001\'\u0003\'"+ - "\u0205\b\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001"+ - "*\u0001*\u0005*\u0211\b*\n*\f*\u0214\t*\u0001+\u0001+\u0001+\u0001+\u0001"+ - ",\u0001,\u0001,\u0001,\u0003,\u021e\b,\u0001-\u0001-\u0001-\u0001-\u0001"+ - ".\u0001.\u0001.\u0001/\u0001/\u0001/\u0005/\u022a\b/\n/\f/\u022d\t/\u0001"+ - "0\u00010\u00010\u00010\u00011\u00011\u00012\u00012\u00032\u0237\b2\u0001"+ - "3\u00033\u023a\b3\u00013\u00013\u00014\u00034\u023f\b4\u00014\u00014\u0001"+ - "5\u00015\u00016\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u0001"+ - "8\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0255\b:\u0001"+ - ":\u0001:\u0001:\u0001:\u0005:\u025b\b:\n:\f:\u025e\t:\u0003:\u0260\b:"+ - "\u0001;\u0001;\u0001;\u0003;\u0265\b;\u0001;\u0001;\u0001<\u0001<\u0001"+ - "<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001=\u0003=\u0272\b=\u0001>\u0003"+ - ">\u0275\b>\u0001>\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0003?\u027e"+ - "\b?\u0001@\u0001@\u0001@\u0001@\u0005@\u0284\b@\n@\f@\u0287\t@\u0001A"+ - "\u0001A\u0001A\u0000\u0004\u0002\n\u0012\u0014B\u0000\u0002\u0004\u0006"+ - "\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,."+ - "02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0000\t\u0001\u0000"+ - "@A\u0001\u0000BD\u0002\u0000\u001e\u001eSS\u0001\u0000JK\u0002\u0000#"+ - "#((\u0002\u0000++..\u0002\u0000**88\u0002\u000099;?\u0001\u0000\u0016"+ - "\u0018\u02a6\u0000\u0084\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000"+ - "\u0000\u0000\u0004\u0098\u0001\u0000\u0000\u0000\u0006\u00ac\u0001\u0000"+ - "\u0000\u0000\b\u00ae\u0001\u0000\u0000\u0000\n\u00ce\u0001\u0000\u0000"+ - "\u0000\f\u00e9\u0001\u0000\u0000\u0000\u000e\u00eb\u0001\u0000\u0000\u0000"+ - "\u0010\u00f8\u0001\u0000\u0000\u0000\u0012\u00fe\u0001\u0000\u0000\u0000"+ - "\u0014\u0113\u0001\u0000\u0000\u0000\u0016\u011d\u0001\u0000\u0000\u0000"+ - "\u0018\u0130\u0001\u0000\u0000\u0000\u001a\u0132\u0001\u0000\u0000\u0000"+ - "\u001c\u013e\u0001\u0000\u0000\u0000\u001e\u0142\u0001\u0000\u0000\u0000"+ - " \u0144\u0001\u0000\u0000\u0000\"\u0147\u0001\u0000\u0000\u0000$\u0152"+ - "\u0001\u0000\u0000\u0000&\u0156\u0001\u0000\u0000\u0000(\u0165\u0001\u0000"+ - "\u0000\u0000*\u0169\u0001\u0000\u0000\u0000,\u016b\u0001\u0000\u0000\u0000"+ - ".\u016d\u0001\u0000\u0000\u00000\u0176\u0001\u0000\u0000\u00002\u0186"+ - "\u0001\u0000\u0000\u00004\u0189\u0001\u0000\u0000\u00006\u0191\u0001\u0000"+ - "\u0000\u00008\u0199\u0001\u0000\u0000\u0000:\u019e\u0001\u0000\u0000\u0000"+ - "<\u01a6\u0001\u0000\u0000\u0000>\u01ae\u0001\u0000\u0000\u0000@\u01b6"+ - "\u0001\u0000\u0000\u0000B\u01bb\u0001\u0000\u0000\u0000D\u01e7\u0001\u0000"+ - "\u0000\u0000F\u01eb\u0001\u0000\u0000\u0000H\u01f0\u0001\u0000\u0000\u0000"+ - "J\u01f2\u0001\u0000\u0000\u0000L\u01f5\u0001\u0000\u0000\u0000N\u01fe"+ - "\u0001\u0000\u0000\u0000P\u0206\u0001\u0000\u0000\u0000R\u0209\u0001\u0000"+ - "\u0000\u0000T\u020c\u0001\u0000\u0000\u0000V\u0215\u0001\u0000\u0000\u0000"+ - "X\u0219\u0001\u0000\u0000\u0000Z\u021f\u0001\u0000\u0000\u0000\\\u0223"+ - "\u0001\u0000\u0000\u0000^\u0226\u0001\u0000\u0000\u0000`\u022e\u0001\u0000"+ - "\u0000\u0000b\u0232\u0001\u0000\u0000\u0000d\u0236\u0001\u0000\u0000\u0000"+ - "f\u0239\u0001\u0000\u0000\u0000h\u023e\u0001\u0000\u0000\u0000j\u0242"+ - "\u0001\u0000\u0000\u0000l\u0244\u0001\u0000\u0000\u0000n\u0246\u0001\u0000"+ - "\u0000\u0000p\u0249\u0001\u0000\u0000\u0000r\u024d\u0001\u0000\u0000\u0000"+ - "t\u0250\u0001\u0000\u0000\u0000v\u0264\u0001\u0000\u0000\u0000x\u0268"+ - "\u0001\u0000\u0000\u0000z\u026d\u0001\u0000\u0000\u0000|\u0274\u0001\u0000"+ - "\u0000\u0000~\u027a\u0001\u0000\u0000\u0000\u0080\u027f\u0001\u0000\u0000"+ - "\u0000\u0082\u0288\u0001\u0000\u0000\u0000\u0084\u0085\u0003\u0002\u0001"+ - "\u0000\u0085\u0086\u0005\u0000\u0000\u0001\u0086\u0001\u0001\u0000\u0000"+ - "\u0000\u0087\u0088\u0006\u0001\uffff\uffff\u0000\u0088\u0089\u0003\u0004"+ - "\u0002\u0000\u0089\u008f\u0001\u0000\u0000\u0000\u008a\u008b\n\u0001\u0000"+ - "\u0000\u008b\u008c\u0005\u001d\u0000\u0000\u008c\u008e\u0003\u0006\u0003"+ - "\u0000\u008d\u008a\u0001\u0000\u0000\u0000\u008e\u0091\u0001\u0000\u0000"+ - "\u0000\u008f\u008d\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000"+ - "\u0000\u0090\u0003\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000"+ - "\u0000\u0092\u0099\u0003n7\u0000\u0093\u0099\u0003&\u0013\u0000\u0094"+ - "\u0099\u0003 \u0010\u0000\u0095\u0099\u0003r9\u0000\u0096\u0097\u0004"+ - "\u0002\u0001\u0000\u0097\u0099\u00030\u0018\u0000\u0098\u0092\u0001\u0000"+ - "\u0000\u0000\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001\u0000"+ - "\u0000\u0000\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000"+ - "\u0000\u0000\u0099\u0005\u0001\u0000\u0000\u0000\u009a\u00ad\u00032\u0019"+ - "\u0000\u009b\u00ad\u0003\b\u0004\u0000\u009c\u00ad\u0003P(\u0000\u009d"+ - "\u00ad\u0003J%\u0000\u009e\u00ad\u00034\u001a\u0000\u009f\u00ad\u0003"+ - "L&\u0000\u00a0\u00ad\u0003R)\u0000\u00a1\u00ad\u0003T*\u0000\u00a2\u00ad"+ - "\u0003X,\u0000\u00a3\u00ad\u0003Z-\u0000\u00a4\u00ad\u0003t:\u0000\u00a5"+ - "\u00ad\u0003\\.\u0000\u00a6\u00a7\u0004\u0003\u0002\u0000\u00a7\u00ad"+ - "\u0003z=\u0000\u00a8\u00a9\u0004\u0003\u0003\u0000\u00a9\u00ad\u0003x"+ - "<\u0000\u00aa\u00ab\u0004\u0003\u0004\u0000\u00ab\u00ad\u0003|>\u0000"+ - "\u00ac\u009a\u0001\u0000\u0000\u0000\u00ac\u009b\u0001\u0000\u0000\u0000"+ - "\u00ac\u009c\u0001\u0000\u0000\u0000\u00ac\u009d\u0001\u0000\u0000\u0000"+ - "\u00ac\u009e\u0001\u0000\u0000\u0000\u00ac\u009f\u0001\u0000\u0000\u0000"+ - "\u00ac\u00a0\u0001\u0000\u0000\u0000\u00ac\u00a1\u0001\u0000\u0000\u0000"+ - "\u00ac\u00a2\u0001\u0000\u0000\u0000\u00ac\u00a3\u0001\u0000\u0000\u0000"+ - "\u00ac\u00a4\u0001\u0000\u0000\u0000\u00ac\u00a5\u0001\u0000\u0000\u0000"+ - "\u00ac\u00a6\u0001\u0000\u0000\u0000\u00ac\u00a8\u0001\u0000\u0000\u0000"+ - "\u00ac\u00aa\u0001\u0000\u0000\u0000\u00ad\u0007\u0001\u0000\u0000\u0000"+ - "\u00ae\u00af\u0005\u0010\u0000\u0000\u00af\u00b0\u0003\n\u0005\u0000\u00b0"+ - "\t\u0001\u0000\u0000\u0000\u00b1\u00b2\u0006\u0005\uffff\uffff\u0000\u00b2"+ - "\u00b3\u00051\u0000\u0000\u00b3\u00cf\u0003\n\u0005\b\u00b4\u00cf\u0003"+ - "\u0010\b\u0000\u00b5\u00cf\u0003\f\u0006\u0000\u00b6\u00b8\u0003\u0010"+ - "\b\u0000\u00b7\u00b9\u00051\u0000\u0000\u00b8\u00b7\u0001\u0000\u0000"+ - "\u0000\u00b8\u00b9\u0001\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000"+ - "\u0000\u00ba\u00bb\u0005,\u0000\u0000\u00bb\u00bc\u00050\u0000\u0000\u00bc"+ - "\u00c1\u0003\u0010\b\u0000\u00bd\u00be\u0005\'\u0000\u0000\u00be\u00c0"+ - "\u0003\u0010\b\u0000\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c3\u0001"+ - "\u0000\u0000\u0000\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c1\u00c2\u0001"+ - "\u0000\u0000\u0000\u00c2\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001"+ - "\u0000\u0000\u0000\u00c4\u00c5\u00057\u0000\u0000\u00c5\u00cf\u0001\u0000"+ - "\u0000\u0000\u00c6\u00c7\u0003\u0010\b\u0000\u00c7\u00c9\u0005-\u0000"+ - "\u0000\u00c8\u00ca\u00051\u0000\u0000\u00c9\u00c8\u0001\u0000\u0000\u0000"+ - "\u00c9\u00ca\u0001\u0000\u0000\u0000\u00ca\u00cb\u0001\u0000\u0000\u0000"+ - "\u00cb\u00cc\u00052\u0000\u0000\u00cc\u00cf\u0001\u0000\u0000\u0000\u00cd"+ - "\u00cf\u0003\u000e\u0007\u0000\u00ce\u00b1\u0001\u0000\u0000\u0000\u00ce"+ - "\u00b4\u0001\u0000\u0000\u0000\u00ce\u00b5\u0001\u0000\u0000\u0000\u00ce"+ - "\u00b6\u0001\u0000\u0000\u0000\u00ce\u00c6\u0001\u0000\u0000\u0000\u00ce"+ - "\u00cd\u0001\u0000\u0000\u0000\u00cf\u00d8\u0001\u0000\u0000\u0000\u00d0"+ - "\u00d1\n\u0005\u0000\u0000\u00d1\u00d2\u0005\"\u0000\u0000\u00d2\u00d7"+ - "\u0003\n\u0005\u0006\u00d3\u00d4\n\u0004\u0000\u0000\u00d4\u00d5\u0005"+ - "4\u0000\u0000\u00d5\u00d7\u0003\n\u0005\u0005\u00d6\u00d0\u0001\u0000"+ - "\u0000\u0000\u00d6\u00d3\u0001\u0000\u0000\u0000\u00d7\u00da\u0001\u0000"+ - "\u0000\u0000\u00d8\u00d6\u0001\u0000\u0000\u0000\u00d8\u00d9\u0001\u0000"+ - "\u0000\u0000\u00d9\u000b\u0001\u0000\u0000\u0000\u00da\u00d8\u0001\u0000"+ - "\u0000\u0000\u00db\u00dd\u0003\u0010\b\u0000\u00dc\u00de\u00051\u0000"+ - "\u0000\u00dd\u00dc\u0001\u0000\u0000\u0000\u00dd\u00de\u0001\u0000\u0000"+ - "\u0000\u00de\u00df\u0001\u0000\u0000\u0000\u00df\u00e0\u0005/\u0000\u0000"+ - "\u00e0\u00e1\u0003j5\u0000\u00e1\u00ea\u0001\u0000\u0000\u0000\u00e2\u00e4"+ - "\u0003\u0010\b\u0000\u00e3\u00e5\u00051\u0000\u0000\u00e4\u00e3\u0001"+ - "\u0000\u0000\u0000\u00e4\u00e5\u0001\u0000\u0000\u0000\u00e5\u00e6\u0001"+ - "\u0000\u0000\u0000\u00e6\u00e7\u00056\u0000\u0000\u00e7\u00e8\u0003j5"+ - "\u0000\u00e8\u00ea\u0001\u0000\u0000\u0000\u00e9\u00db\u0001\u0000\u0000"+ - "\u0000\u00e9\u00e2\u0001\u0000\u0000\u0000\u00ea\r\u0001\u0000\u0000\u0000"+ - "\u00eb\u00ee\u0003:\u001d\u0000\u00ec\u00ed\u0005%\u0000\u0000\u00ed\u00ef"+ - "\u0003\u001e\u000f\u0000\u00ee\u00ec\u0001\u0000\u0000\u0000\u00ee\u00ef"+ - "\u0001\u0000\u0000\u0000\u00ef\u00f0\u0001\u0000\u0000\u0000\u00f0\u00f1"+ - "\u0005&\u0000\u0000\u00f1\u00f2\u0003D\"\u0000\u00f2\u000f\u0001\u0000"+ - "\u0000\u0000\u00f3\u00f9\u0003\u0012\t\u0000\u00f4\u00f5\u0003\u0012\t"+ - "\u0000\u00f5\u00f6\u0003l6\u0000\u00f6\u00f7\u0003\u0012\t\u0000\u00f7"+ - "\u00f9\u0001\u0000\u0000\u0000\u00f8\u00f3\u0001\u0000\u0000\u0000\u00f8"+ - "\u00f4\u0001\u0000\u0000\u0000\u00f9\u0011\u0001\u0000\u0000\u0000\u00fa"+ - "\u00fb\u0006\t\uffff\uffff\u0000\u00fb\u00ff\u0003\u0014\n\u0000\u00fc"+ - "\u00fd\u0007\u0000\u0000\u0000\u00fd\u00ff\u0003\u0012\t\u0003\u00fe\u00fa"+ - "\u0001\u0000\u0000\u0000\u00fe\u00fc\u0001\u0000\u0000\u0000\u00ff\u0108"+ - "\u0001\u0000\u0000\u0000\u0100\u0101\n\u0002\u0000\u0000\u0101\u0102\u0007"+ - "\u0001\u0000\u0000\u0102\u0107\u0003\u0012\t\u0003\u0103\u0104\n\u0001"+ - "\u0000\u0000\u0104\u0105\u0007\u0000\u0000\u0000\u0105\u0107\u0003\u0012"+ - "\t\u0002\u0106\u0100\u0001\u0000\u0000\u0000\u0106\u0103\u0001\u0000\u0000"+ - "\u0000\u0107\u010a\u0001\u0000\u0000\u0000\u0108\u0106\u0001\u0000\u0000"+ - "\u0000\u0108\u0109\u0001\u0000\u0000\u0000\u0109\u0013\u0001\u0000\u0000"+ - "\u0000\u010a\u0108\u0001\u0000\u0000\u0000\u010b\u010c\u0006\n\uffff\uffff"+ - "\u0000\u010c\u0114\u0003D\"\u0000\u010d\u0114\u0003:\u001d\u0000\u010e"+ - "\u0114\u0003\u0016\u000b\u0000\u010f\u0110\u00050\u0000\u0000\u0110\u0111"+ - "\u0003\n\u0005\u0000\u0111\u0112\u00057\u0000\u0000\u0112\u0114\u0001"+ - "\u0000\u0000\u0000\u0113\u010b\u0001\u0000\u0000\u0000\u0113\u010d\u0001"+ - "\u0000\u0000\u0000\u0113\u010e\u0001\u0000\u0000\u0000\u0113\u010f\u0001"+ - "\u0000\u0000\u0000\u0114\u011a\u0001\u0000\u0000\u0000\u0115\u0116\n\u0001"+ - "\u0000\u0000\u0116\u0117\u0005%\u0000\u0000\u0117\u0119\u0003\u001e\u000f"+ - "\u0000\u0118\u0115\u0001\u0000\u0000\u0000\u0119\u011c\u0001\u0000\u0000"+ - "\u0000\u011a\u0118\u0001\u0000\u0000\u0000\u011a\u011b\u0001\u0000\u0000"+ - "\u0000\u011b\u0015\u0001\u0000\u0000\u0000\u011c\u011a\u0001\u0000\u0000"+ - "\u0000\u011d\u011e\u0003\u0018\f\u0000\u011e\u012c\u00050\u0000\u0000"+ - "\u011f\u012d\u0005B\u0000\u0000\u0120\u0125\u0003\n\u0005\u0000\u0121"+ - "\u0122\u0005\'\u0000\u0000\u0122\u0124\u0003\n\u0005\u0000\u0123\u0121"+ - "\u0001\u0000\u0000\u0000\u0124\u0127\u0001\u0000\u0000\u0000\u0125\u0123"+ - "\u0001\u0000\u0000\u0000\u0125\u0126\u0001\u0000\u0000\u0000\u0126\u012a"+ - "\u0001\u0000\u0000\u0000\u0127\u0125\u0001\u0000\u0000\u0000\u0128\u0129"+ - "\u0005\'\u0000\u0000\u0129\u012b\u0003\u001a\r\u0000\u012a\u0128\u0001"+ - "\u0000\u0000\u0000\u012a\u012b\u0001\u0000\u0000\u0000\u012b\u012d\u0001"+ - "\u0000\u0000\u0000\u012c\u011f\u0001\u0000\u0000\u0000\u012c\u0120\u0001"+ - "\u0000\u0000\u0000\u012c\u012d\u0001\u0000\u0000\u0000\u012d\u012e\u0001"+ - "\u0000\u0000\u0000\u012e\u012f\u00057\u0000\u0000\u012f\u0017\u0001\u0000"+ - "\u0000\u0000\u0130\u0131\u0003H$\u0000\u0131\u0019\u0001\u0000\u0000\u0000"+ - "\u0132\u0133\u0004\r\n\u0000\u0133\u0134\u0005E\u0000\u0000\u0134\u0139"+ - "\u0003\u001c\u000e\u0000\u0135\u0136\u0005\'\u0000\u0000\u0136\u0138\u0003"+ - "\u001c\u000e\u0000\u0137\u0135\u0001\u0000\u0000\u0000\u0138\u013b\u0001"+ - "\u0000\u0000\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u0139\u013a\u0001"+ - "\u0000\u0000\u0000\u013a\u013c\u0001\u0000\u0000\u0000\u013b\u0139\u0001"+ - "\u0000\u0000\u0000\u013c\u013d\u0005F\u0000\u0000\u013d\u001b\u0001\u0000"+ - "\u0000\u0000\u013e\u013f\u0003j5\u0000\u013f\u0140\u0005&\u0000\u0000"+ - "\u0140\u0141\u0003D\"\u0000\u0141\u001d\u0001\u0000\u0000\u0000\u0142"+ - "\u0143\u0003@ \u0000\u0143\u001f\u0001\u0000\u0000\u0000\u0144\u0145\u0005"+ - "\f\u0000\u0000\u0145\u0146\u0003\"\u0011\u0000\u0146!\u0001\u0000\u0000"+ - "\u0000\u0147\u014c\u0003$\u0012\u0000\u0148\u0149\u0005\'\u0000\u0000"+ - "\u0149\u014b\u0003$\u0012\u0000\u014a\u0148\u0001\u0000\u0000\u0000\u014b"+ - "\u014e\u0001\u0000\u0000\u0000\u014c\u014a\u0001\u0000\u0000\u0000\u014c"+ - "\u014d\u0001\u0000\u0000\u0000\u014d#\u0001\u0000\u0000\u0000\u014e\u014c"+ - "\u0001\u0000\u0000\u0000\u014f\u0150\u0003:\u001d\u0000\u0150\u0151\u0005"+ - "$\u0000\u0000\u0151\u0153\u0001\u0000\u0000\u0000\u0152\u014f\u0001\u0000"+ - "\u0000\u0000\u0152\u0153\u0001\u0000\u0000\u0000\u0153\u0154\u0001\u0000"+ - "\u0000\u0000\u0154\u0155\u0003\n\u0005\u0000\u0155%\u0001\u0000\u0000"+ - "\u0000\u0156\u0157\u0005\u0006\u0000\u0000\u0157\u015c\u0003(\u0014\u0000"+ - "\u0158\u0159\u0005\'\u0000\u0000\u0159\u015b\u0003(\u0014\u0000\u015a"+ - "\u0158\u0001\u0000\u0000\u0000\u015b\u015e\u0001\u0000\u0000\u0000\u015c"+ - "\u015a\u0001\u0000\u0000\u0000\u015c\u015d\u0001\u0000\u0000\u0000\u015d"+ - "\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000\u015f"+ - "\u0161\u0003.\u0017\u0000\u0160\u015f\u0001\u0000\u0000\u0000\u0160\u0161"+ - "\u0001\u0000\u0000\u0000\u0161\'\u0001\u0000\u0000\u0000\u0162\u0163\u0003"+ - "*\u0015\u0000\u0163\u0164\u0005&\u0000\u0000\u0164\u0166\u0001\u0000\u0000"+ - "\u0000\u0165\u0162\u0001\u0000\u0000\u0000\u0165\u0166\u0001\u0000\u0000"+ - "\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u0167\u0168\u0003,\u0016\u0000"+ - "\u0168)\u0001\u0000\u0000\u0000\u0169\u016a\u0005S\u0000\u0000\u016a+"+ - "\u0001\u0000\u0000\u0000\u016b\u016c\u0007\u0002\u0000\u0000\u016c-\u0001"+ - "\u0000\u0000\u0000\u016d\u016e\u0005R\u0000\u0000\u016e\u0173\u0005S\u0000"+ - "\u0000\u016f\u0170\u0005\'\u0000\u0000\u0170\u0172\u0005S\u0000\u0000"+ - "\u0171\u016f\u0001\u0000\u0000\u0000\u0172\u0175\u0001\u0000\u0000\u0000"+ - "\u0173\u0171\u0001\u0000\u0000\u0000\u0173\u0174\u0001\u0000\u0000\u0000"+ - "\u0174/\u0001\u0000\u0000\u0000\u0175\u0173\u0001\u0000\u0000\u0000\u0176"+ - "\u0177\u0005\u0013\u0000\u0000\u0177\u017c\u0003(\u0014\u0000\u0178\u0179"+ - "\u0005\'\u0000\u0000\u0179\u017b\u0003(\u0014\u0000\u017a\u0178\u0001"+ - "\u0000\u0000\u0000\u017b\u017e\u0001\u0000\u0000\u0000\u017c\u017a\u0001"+ - "\u0000\u0000\u0000\u017c\u017d\u0001\u0000\u0000\u0000\u017d\u0180\u0001"+ - "\u0000\u0000\u0000\u017e\u017c\u0001\u0000\u0000\u0000\u017f\u0181\u0003"+ - "6\u001b\u0000\u0180\u017f\u0001\u0000\u0000\u0000\u0180\u0181\u0001\u0000"+ - "\u0000\u0000\u0181\u0184\u0001\u0000\u0000\u0000\u0182\u0183\u0005!\u0000"+ - "\u0000\u0183\u0185\u0003\"\u0011\u0000\u0184\u0182\u0001\u0000\u0000\u0000"+ - "\u0184\u0185\u0001\u0000\u0000\u0000\u01851\u0001\u0000\u0000\u0000\u0186"+ - "\u0187\u0005\u0004\u0000\u0000\u0187\u0188\u0003\"\u0011\u0000\u01883"+ - "\u0001\u0000\u0000\u0000\u0189\u018b\u0005\u000f\u0000\u0000\u018a\u018c"+ - "\u00036\u001b\u0000\u018b\u018a\u0001\u0000\u0000\u0000\u018b\u018c\u0001"+ - "\u0000\u0000\u0000\u018c\u018f\u0001\u0000\u0000\u0000\u018d\u018e\u0005"+ - "!\u0000\u0000\u018e\u0190\u0003\"\u0011\u0000\u018f\u018d\u0001\u0000"+ - "\u0000\u0000\u018f\u0190\u0001\u0000\u0000\u0000\u01905\u0001\u0000\u0000"+ - "\u0000\u0191\u0196\u00038\u001c\u0000\u0192\u0193\u0005\'\u0000\u0000"+ - "\u0193\u0195\u00038\u001c\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0195"+ - "\u0198\u0001\u0000\u0000\u0000\u0196\u0194\u0001\u0000\u0000\u0000\u0196"+ - "\u0197\u0001\u0000\u0000\u0000\u01977\u0001\u0000\u0000\u0000\u0198\u0196"+ - "\u0001\u0000\u0000\u0000\u0199\u019c\u0003$\u0012\u0000\u019a\u019b\u0005"+ - "\u0010\u0000\u0000\u019b\u019d\u0003\n\u0005\u0000\u019c\u019a\u0001\u0000"+ - "\u0000\u0000\u019c\u019d\u0001\u0000\u0000\u0000\u019d9\u0001\u0000\u0000"+ - "\u0000\u019e\u01a3\u0003H$\u0000\u019f\u01a0\u0005)\u0000\u0000\u01a0"+ - "\u01a2\u0003H$\u0000\u01a1\u019f\u0001\u0000\u0000\u0000\u01a2\u01a5\u0001"+ - "\u0000\u0000\u0000\u01a3\u01a1\u0001\u0000\u0000\u0000\u01a3\u01a4\u0001"+ - "\u0000\u0000\u0000\u01a4;\u0001\u0000\u0000\u0000\u01a5\u01a3\u0001\u0000"+ - "\u0000\u0000\u01a6\u01ab\u0003B!\u0000\u01a7\u01a8\u0005)\u0000\u0000"+ - "\u01a8\u01aa\u0003B!\u0000\u01a9\u01a7\u0001\u0000\u0000\u0000\u01aa\u01ad"+ - "\u0001\u0000\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ab\u01ac"+ - "\u0001\u0000\u0000\u0000\u01ac=\u0001\u0000\u0000\u0000\u01ad\u01ab\u0001"+ - "\u0000\u0000\u0000\u01ae\u01b3\u0003<\u001e\u0000\u01af\u01b0\u0005\'"+ - "\u0000\u0000\u01b0\u01b2\u0003<\u001e\u0000\u01b1\u01af\u0001\u0000\u0000"+ - "\u0000\u01b2\u01b5\u0001\u0000\u0000\u0000\u01b3\u01b1\u0001\u0000\u0000"+ - "\u0000\u01b3\u01b4\u0001\u0000\u0000\u0000\u01b4?\u0001\u0000\u0000\u0000"+ - "\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b6\u01b7\u0007\u0003\u0000\u0000"+ - "\u01b7A\u0001\u0000\u0000\u0000\u01b8\u01bc\u0005W\u0000\u0000\u01b9\u01ba"+ - "\u0004!\u000b\u0000\u01ba\u01bc\u0003F#\u0000\u01bb\u01b8\u0001\u0000"+ - "\u0000\u0000\u01bb\u01b9\u0001\u0000\u0000\u0000\u01bcC\u0001\u0000\u0000"+ - "\u0000\u01bd\u01e8\u00052\u0000\u0000\u01be\u01bf\u0003h4\u0000\u01bf"+ - "\u01c0\u0005J\u0000\u0000\u01c0\u01e8\u0001\u0000\u0000\u0000\u01c1\u01e8"+ - "\u0003f3\u0000\u01c2\u01e8\u0003h4\u0000\u01c3\u01e8\u0003b1\u0000\u01c4"+ - "\u01e8\u0003F#\u0000\u01c5\u01e8\u0003j5\u0000\u01c6\u01c7\u0005H\u0000"+ - "\u0000\u01c7\u01cc\u0003d2\u0000\u01c8\u01c9\u0005\'\u0000\u0000\u01c9"+ - "\u01cb\u0003d2\u0000\u01ca\u01c8\u0001\u0000\u0000\u0000\u01cb\u01ce\u0001"+ - "\u0000\u0000\u0000\u01cc\u01ca\u0001\u0000\u0000\u0000\u01cc\u01cd\u0001"+ - "\u0000\u0000\u0000\u01cd\u01cf\u0001\u0000\u0000\u0000\u01ce\u01cc\u0001"+ - "\u0000\u0000\u0000\u01cf\u01d0\u0005I\u0000\u0000\u01d0\u01e8\u0001\u0000"+ - "\u0000\u0000\u01d1\u01d2\u0005H\u0000\u0000\u01d2\u01d7\u0003b1\u0000"+ - "\u01d3\u01d4\u0005\'\u0000\u0000\u01d4\u01d6\u0003b1\u0000\u01d5\u01d3"+ - "\u0001\u0000\u0000\u0000\u01d6\u01d9\u0001\u0000\u0000\u0000\u01d7\u01d5"+ - "\u0001\u0000\u0000\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01da"+ - "\u0001\u0000\u0000\u0000\u01d9\u01d7\u0001\u0000\u0000\u0000\u01da\u01db"+ - "\u0005I\u0000\u0000\u01db\u01e8\u0001\u0000\u0000\u0000\u01dc\u01dd\u0005"+ - "H\u0000\u0000\u01dd\u01e2\u0003j5\u0000\u01de\u01df\u0005\'\u0000\u0000"+ - "\u01df\u01e1\u0003j5\u0000\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e4"+ - "\u0001\u0000\u0000\u0000\u01e2\u01e0\u0001\u0000\u0000\u0000\u01e2\u01e3"+ - "\u0001\u0000\u0000\u0000\u01e3\u01e5\u0001\u0000\u0000\u0000\u01e4\u01e2"+ - "\u0001\u0000\u0000\u0000\u01e5\u01e6\u0005I\u0000\u0000\u01e6\u01e8\u0001"+ - "\u0000\u0000\u0000\u01e7\u01bd\u0001\u0000\u0000\u0000\u01e7\u01be\u0001"+ - "\u0000\u0000\u0000\u01e7\u01c1\u0001\u0000\u0000\u0000\u01e7\u01c2\u0001"+ - "\u0000\u0000\u0000\u01e7\u01c3\u0001\u0000\u0000\u0000\u01e7\u01c4\u0001"+ - "\u0000\u0000\u0000\u01e7\u01c5\u0001\u0000\u0000\u0000\u01e7\u01c6\u0001"+ - "\u0000\u0000\u0000\u01e7\u01d1\u0001\u0000\u0000\u0000\u01e7\u01dc\u0001"+ - "\u0000\u0000\u0000\u01e8E\u0001\u0000\u0000\u0000\u01e9\u01ec\u00055\u0000"+ - "\u0000\u01ea\u01ec\u0005G\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000"+ - "\u01eb\u01ea\u0001\u0000\u0000\u0000\u01ecG\u0001\u0000\u0000\u0000\u01ed"+ - "\u01f1\u0003@ \u0000\u01ee\u01ef\u0004$\f\u0000\u01ef\u01f1\u0003F#\u0000"+ - "\u01f0\u01ed\u0001\u0000\u0000\u0000\u01f0\u01ee\u0001\u0000\u0000\u0000"+ - "\u01f1I\u0001\u0000\u0000\u0000\u01f2\u01f3\u0005\t\u0000\u0000\u01f3"+ - "\u01f4\u0005\u001f\u0000\u0000\u01f4K\u0001\u0000\u0000\u0000\u01f5\u01f6"+ - "\u0005\u000e\u0000\u0000\u01f6\u01fb\u0003N\'\u0000\u01f7\u01f8\u0005"+ - "\'\u0000\u0000\u01f8\u01fa\u0003N\'\u0000\u01f9\u01f7\u0001\u0000\u0000"+ - "\u0000\u01fa\u01fd\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000\u0000"+ - "\u0000\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fcM\u0001\u0000\u0000\u0000"+ - "\u01fd\u01fb\u0001\u0000\u0000\u0000\u01fe\u0200\u0003\n\u0005\u0000\u01ff"+ - "\u0201\u0007\u0004\u0000\u0000\u0200\u01ff\u0001\u0000\u0000\u0000\u0200"+ - "\u0201\u0001\u0000\u0000\u0000\u0201\u0204\u0001\u0000\u0000\u0000\u0202"+ - "\u0203\u00053\u0000\u0000\u0203\u0205\u0007\u0005\u0000\u0000\u0204\u0202"+ - "\u0001\u0000\u0000\u0000\u0204\u0205\u0001\u0000\u0000\u0000\u0205O\u0001"+ - "\u0000\u0000\u0000\u0206\u0207\u0005\b\u0000\u0000\u0207\u0208\u0003>"+ - "\u001f\u0000\u0208Q\u0001\u0000\u0000\u0000\u0209\u020a\u0005\u0002\u0000"+ - "\u0000\u020a\u020b\u0003>\u001f\u0000\u020bS\u0001\u0000\u0000\u0000\u020c"+ - "\u020d\u0005\u000b\u0000\u0000\u020d\u0212\u0003V+\u0000\u020e\u020f\u0005"+ - "\'\u0000\u0000\u020f\u0211\u0003V+\u0000\u0210\u020e\u0001\u0000\u0000"+ - "\u0000\u0211\u0214\u0001\u0000\u0000\u0000\u0212\u0210\u0001\u0000\u0000"+ - "\u0000\u0212\u0213\u0001\u0000\u0000\u0000\u0213U\u0001\u0000\u0000\u0000"+ - "\u0214\u0212\u0001\u0000\u0000\u0000\u0215\u0216\u0003<\u001e\u0000\u0216"+ - "\u0217\u0005[\u0000\u0000\u0217\u0218\u0003<\u001e\u0000\u0218W\u0001"+ - "\u0000\u0000\u0000\u0219\u021a\u0005\u0001\u0000\u0000\u021a\u021b\u0003"+ - "\u0014\n\u0000\u021b\u021d\u0003j5\u0000\u021c\u021e\u0003^/\u0000\u021d"+ - "\u021c\u0001\u0000\u0000\u0000\u021d\u021e\u0001\u0000\u0000\u0000\u021e"+ - "Y\u0001\u0000\u0000\u0000\u021f\u0220\u0005\u0007\u0000\u0000\u0220\u0221"+ - "\u0003\u0014\n\u0000\u0221\u0222\u0003j5\u0000\u0222[\u0001\u0000\u0000"+ - "\u0000\u0223\u0224\u0005\n\u0000\u0000\u0224\u0225\u0003:\u001d\u0000"+ - "\u0225]\u0001\u0000\u0000\u0000\u0226\u022b\u0003`0\u0000\u0227\u0228"+ - "\u0005\'\u0000\u0000\u0228\u022a\u0003`0\u0000\u0229\u0227\u0001\u0000"+ - "\u0000\u0000\u022a\u022d\u0001\u0000\u0000\u0000\u022b\u0229\u0001\u0000"+ - "\u0000\u0000\u022b\u022c\u0001\u0000\u0000\u0000\u022c_\u0001\u0000\u0000"+ - "\u0000\u022d\u022b\u0001\u0000\u0000\u0000\u022e\u022f\u0003@ \u0000\u022f"+ - "\u0230\u0005$\u0000\u0000\u0230\u0231\u0003D\"\u0000\u0231a\u0001\u0000"+ - "\u0000\u0000\u0232\u0233\u0007\u0006\u0000\u0000\u0233c\u0001\u0000\u0000"+ - "\u0000\u0234\u0237\u0003f3\u0000\u0235\u0237\u0003h4\u0000\u0236\u0234"+ - "\u0001\u0000\u0000\u0000\u0236\u0235\u0001\u0000\u0000\u0000\u0237e\u0001"+ - "\u0000\u0000\u0000\u0238\u023a\u0007\u0000\u0000\u0000\u0239\u0238\u0001"+ - "\u0000\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000\u023a\u023b\u0001"+ - "\u0000\u0000\u0000\u023b\u023c\u0005 \u0000\u0000\u023cg\u0001\u0000\u0000"+ - "\u0000\u023d\u023f\u0007\u0000\u0000\u0000\u023e\u023d\u0001\u0000\u0000"+ - "\u0000\u023e\u023f\u0001\u0000\u0000\u0000\u023f\u0240\u0001\u0000\u0000"+ - "\u0000\u0240\u0241\u0005\u001f\u0000\u0000\u0241i\u0001\u0000\u0000\u0000"+ - "\u0242\u0243\u0005\u001e\u0000\u0000\u0243k\u0001\u0000\u0000\u0000\u0244"+ - "\u0245\u0007\u0007\u0000\u0000\u0245m\u0001\u0000\u0000\u0000\u0246\u0247"+ - "\u0005\u0005\u0000\u0000\u0247\u0248\u0003p8\u0000\u0248o\u0001\u0000"+ - "\u0000\u0000\u0249\u024a\u0005H\u0000\u0000\u024a\u024b\u0003\u0002\u0001"+ - "\u0000\u024b\u024c\u0005I\u0000\u0000\u024cq\u0001\u0000\u0000\u0000\u024d"+ - "\u024e\u0005\r\u0000\u0000\u024e\u024f\u0005k\u0000\u0000\u024fs\u0001"+ - "\u0000\u0000\u0000\u0250\u0251\u0005\u0003\u0000\u0000\u0251\u0254\u0005"+ - "a\u0000\u0000\u0252\u0253\u0005_\u0000\u0000\u0253\u0255\u0003<\u001e"+ - "\u0000\u0254\u0252\u0001\u0000\u0000\u0000\u0254\u0255\u0001\u0000\u0000"+ - "\u0000\u0255\u025f\u0001\u0000\u0000\u0000\u0256\u0257\u0005`\u0000\u0000"+ - "\u0257\u025c\u0003v;\u0000\u0258\u0259\u0005\'\u0000\u0000\u0259\u025b"+ - "\u0003v;\u0000\u025a\u0258\u0001\u0000\u0000\u0000\u025b\u025e\u0001\u0000"+ - "\u0000\u0000\u025c\u025a\u0001\u0000\u0000\u0000\u025c\u025d\u0001\u0000"+ - "\u0000\u0000\u025d\u0260\u0001\u0000\u0000\u0000\u025e\u025c\u0001\u0000"+ - "\u0000\u0000\u025f\u0256\u0001\u0000\u0000\u0000\u025f\u0260\u0001\u0000"+ - "\u0000\u0000\u0260u\u0001\u0000\u0000\u0000\u0261\u0262\u0003<\u001e\u0000"+ - "\u0262\u0263\u0005$\u0000\u0000\u0263\u0265\u0001\u0000\u0000\u0000\u0264"+ - "\u0261\u0001\u0000\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000\u0265"+ - "\u0266\u0001\u0000\u0000\u0000\u0266\u0267\u0003<\u001e\u0000\u0267w\u0001"+ - "\u0000\u0000\u0000\u0268\u0269\u0005\u0012\u0000\u0000\u0269\u026a\u0003"+ - "(\u0014\u0000\u026a\u026b\u0005_\u0000\u0000\u026b\u026c\u0003>\u001f"+ - "\u0000\u026cy\u0001\u0000\u0000\u0000\u026d\u026e\u0005\u0011\u0000\u0000"+ - "\u026e\u0271\u00036\u001b\u0000\u026f\u0270\u0005!\u0000\u0000\u0270\u0272"+ - "\u0003\"\u0011\u0000\u0271\u026f\u0001\u0000\u0000\u0000\u0271\u0272\u0001"+ - "\u0000\u0000\u0000\u0272{\u0001\u0000\u0000\u0000\u0273\u0275\u0007\b"+ - "\u0000\u0000\u0274\u0273\u0001\u0000\u0000\u0000\u0274\u0275\u0001\u0000"+ - "\u0000\u0000\u0275\u0276\u0001\u0000\u0000\u0000\u0276\u0277\u0005\u0014"+ - "\u0000\u0000\u0277\u0278\u0003~?\u0000\u0278\u0279\u0003\u0080@\u0000"+ - "\u0279}\u0001\u0000\u0000\u0000\u027a\u027d\u0003(\u0014\u0000\u027b\u027c"+ - "\u0005[\u0000\u0000\u027c\u027e\u0003@ \u0000\u027d\u027b\u0001\u0000"+ - "\u0000\u0000\u027d\u027e\u0001\u0000\u0000\u0000\u027e\u007f\u0001\u0000"+ - "\u0000\u0000\u027f\u0280\u0005_\u0000\u0000\u0280\u0285\u0003\u0082A\u0000"+ - "\u0281\u0282\u0005\'\u0000\u0000\u0282\u0284\u0003\u0082A\u0000\u0283"+ - "\u0281\u0001\u0000\u0000\u0000\u0284\u0287\u0001\u0000\u0000\u0000\u0285"+ - "\u0283\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000\u0000\u0286"+ - "\u0081\u0001\u0000\u0000\u0000\u0287\u0285\u0001\u0000\u0000\u0000\u0288"+ - "\u0289\u0003\u0010\b\u0000\u0289\u0083\u0001\u0000\u0000\u0000?\u008f"+ + "\u0001\r\u0005\r\u0137\b\r\n\r\f\r\u013a\t\r\u0001\r\u0001\r\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u0010"+ + "\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0005\u0011"+ + "\u014a\b\u0011\n\u0011\f\u0011\u014d\t\u0011\u0001\u0012\u0001\u0012\u0001"+ + "\u0012\u0003\u0012\u0152\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ + "\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u015a\b\u0013\n\u0013\f\u0013"+ + "\u015d\t\u0013\u0001\u0013\u0003\u0013\u0160\b\u0013\u0001\u0014\u0001"+ + "\u0014\u0001\u0014\u0003\u0014\u0165\b\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0005\u0017\u0171\b\u0017\n\u0017\f\u0017\u0174\t\u0017"+ + "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u017a\b\u0018"+ + "\n\u0018\f\u0018\u017d\t\u0018\u0001\u0018\u0003\u0018\u0180\b\u0018\u0001"+ + "\u0018\u0001\u0018\u0003\u0018\u0184\b\u0018\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u018b\b\u001a\u0001\u001a\u0001"+ + "\u001a\u0003\u001a\u018f\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005"+ + "\u001b\u0194\b\u001b\n\u001b\f\u001b\u0197\t\u001b\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0003\u001c\u019c\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0005\u001d\u01a1\b\u001d\n\u001d\f\u001d\u01a4\t\u001d\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0005\u001e\u01a9\b\u001e\n\u001e\f\u001e\u01ac\t\u001e"+ + "\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01b1\b\u001f\n\u001f"+ + "\f\u001f\u01b4\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u01bb"+ + "\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01ca\b\"\n\"\f\"\u01cd\t\""+ + "\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01d5\b\"\n\""+ + "\f\"\u01d8\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\""+ + "\u01e0\b\"\n\"\f\"\u01e3\t\"\u0001\"\u0001\"\u0003\"\u01e7\b\"\u0001#"+ + "\u0001#\u0003#\u01eb\b#\u0001$\u0001$\u0001$\u0003$\u01f0\b$\u0001%\u0001"+ + "%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01f9\b&\n&\f&\u01fc\t&\u0001"+ + "\'\u0001\'\u0003\'\u0200\b\'\u0001\'\u0001\'\u0003\'\u0204\b\'\u0001("+ + "\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0005"+ + "*\u0210\b*\n*\f*\u0213\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ + ",\u0001,\u0003,\u021d\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001"+ + ".\u0001/\u0001/\u0001/\u0005/\u0229\b/\n/\f/\u022c\t/\u00010\u00010\u0001"+ + "0\u00010\u00011\u00011\u00012\u00012\u00032\u0236\b2\u00013\u00033\u0239"+ + "\b3\u00013\u00013\u00014\u00034\u023e\b4\u00014\u00014\u00015\u00015\u0001"+ + "6\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00019\u0001"+ + "9\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0254\b:\u0001:\u0001:\u0001"+ + ":\u0001:\u0005:\u025a\b:\n:\f:\u025d\t:\u0003:\u025f\b:\u0001;\u0001;"+ + "\u0001;\u0003;\u0264\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001"+ + "<\u0001=\u0001=\u0001=\u0001=\u0003=\u0271\b=\u0001>\u0003>\u0274\b>\u0001"+ + ">\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0003?\u027d\b?\u0001@\u0001"+ + "@\u0001@\u0001@\u0005@\u0283\b@\n@\f@\u0286\t@\u0001A\u0001A\u0001A\u0000"+ + "\u0004\u0002\n\u0012\u0014B\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ + "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ + "TVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0000\t\u0001\u0000@A\u0001\u0000"+ + "BD\u0002\u0000\u001e\u001eSS\u0001\u0000JK\u0002\u0000##((\u0002\u0000"+ + "++..\u0002\u0000**88\u0002\u000099;?\u0001\u0000\u0016\u0018\u02a5\u0000"+ + "\u0084\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000\u0000\u0000\u0004"+ + "\u0098\u0001\u0000\u0000\u0000\u0006\u00ac\u0001\u0000\u0000\u0000\b\u00ae"+ + "\u0001\u0000\u0000\u0000\n\u00ce\u0001\u0000\u0000\u0000\f\u00e9\u0001"+ + "\u0000\u0000\u0000\u000e\u00eb\u0001\u0000\u0000\u0000\u0010\u00f8\u0001"+ + "\u0000\u0000\u0000\u0012\u00fe\u0001\u0000\u0000\u0000\u0014\u0113\u0001"+ + "\u0000\u0000\u0000\u0016\u011d\u0001\u0000\u0000\u0000\u0018\u0130\u0001"+ + "\u0000\u0000\u0000\u001a\u0132\u0001\u0000\u0000\u0000\u001c\u013d\u0001"+ + "\u0000\u0000\u0000\u001e\u0141\u0001\u0000\u0000\u0000 \u0143\u0001\u0000"+ + "\u0000\u0000\"\u0146\u0001\u0000\u0000\u0000$\u0151\u0001\u0000\u0000"+ + "\u0000&\u0155\u0001\u0000\u0000\u0000(\u0164\u0001\u0000\u0000\u0000*"+ + "\u0168\u0001\u0000\u0000\u0000,\u016a\u0001\u0000\u0000\u0000.\u016c\u0001"+ + "\u0000\u0000\u00000\u0175\u0001\u0000\u0000\u00002\u0185\u0001\u0000\u0000"+ + "\u00004\u0188\u0001\u0000\u0000\u00006\u0190\u0001\u0000\u0000\u00008"+ + "\u0198\u0001\u0000\u0000\u0000:\u019d\u0001\u0000\u0000\u0000<\u01a5\u0001"+ + "\u0000\u0000\u0000>\u01ad\u0001\u0000\u0000\u0000@\u01b5\u0001\u0000\u0000"+ + "\u0000B\u01ba\u0001\u0000\u0000\u0000D\u01e6\u0001\u0000\u0000\u0000F"+ + "\u01ea\u0001\u0000\u0000\u0000H\u01ef\u0001\u0000\u0000\u0000J\u01f1\u0001"+ + "\u0000\u0000\u0000L\u01f4\u0001\u0000\u0000\u0000N\u01fd\u0001\u0000\u0000"+ + "\u0000P\u0205\u0001\u0000\u0000\u0000R\u0208\u0001\u0000\u0000\u0000T"+ + "\u020b\u0001\u0000\u0000\u0000V\u0214\u0001\u0000\u0000\u0000X\u0218\u0001"+ + "\u0000\u0000\u0000Z\u021e\u0001\u0000\u0000\u0000\\\u0222\u0001\u0000"+ + "\u0000\u0000^\u0225\u0001\u0000\u0000\u0000`\u022d\u0001\u0000\u0000\u0000"+ + "b\u0231\u0001\u0000\u0000\u0000d\u0235\u0001\u0000\u0000\u0000f\u0238"+ + "\u0001\u0000\u0000\u0000h\u023d\u0001\u0000\u0000\u0000j\u0241\u0001\u0000"+ + "\u0000\u0000l\u0243\u0001\u0000\u0000\u0000n\u0245\u0001\u0000\u0000\u0000"+ + "p\u0248\u0001\u0000\u0000\u0000r\u024c\u0001\u0000\u0000\u0000t\u024f"+ + "\u0001\u0000\u0000\u0000v\u0263\u0001\u0000\u0000\u0000x\u0267\u0001\u0000"+ + "\u0000\u0000z\u026c\u0001\u0000\u0000\u0000|\u0273\u0001\u0000\u0000\u0000"+ + "~\u0279\u0001\u0000\u0000\u0000\u0080\u027e\u0001\u0000\u0000\u0000\u0082"+ + "\u0287\u0001\u0000\u0000\u0000\u0084\u0085\u0003\u0002\u0001\u0000\u0085"+ + "\u0086\u0005\u0000\u0000\u0001\u0086\u0001\u0001\u0000\u0000\u0000\u0087"+ + "\u0088\u0006\u0001\uffff\uffff\u0000\u0088\u0089\u0003\u0004\u0002\u0000"+ + "\u0089\u008f\u0001\u0000\u0000\u0000\u008a\u008b\n\u0001\u0000\u0000\u008b"+ + "\u008c\u0005\u001d\u0000\u0000\u008c\u008e\u0003\u0006\u0003\u0000\u008d"+ + "\u008a\u0001\u0000\u0000\u0000\u008e\u0091\u0001\u0000\u0000\u0000\u008f"+ + "\u008d\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000\u0000\u0090"+ + "\u0003\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000\u0000\u0092"+ + "\u0099\u0003n7\u0000\u0093\u0099\u0003&\u0013\u0000\u0094\u0099\u0003"+ + " \u0010\u0000\u0095\u0099\u0003r9\u0000\u0096\u0097\u0004\u0002\u0001"+ + "\u0000\u0097\u0099\u00030\u0018\u0000\u0098\u0092\u0001\u0000\u0000\u0000"+ + "\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001\u0000\u0000\u0000"+ + "\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000\u0000\u0000"+ + "\u0099\u0005\u0001\u0000\u0000\u0000\u009a\u00ad\u00032\u0019\u0000\u009b"+ + "\u00ad\u0003\b\u0004\u0000\u009c\u00ad\u0003P(\u0000\u009d\u00ad\u0003"+ + "J%\u0000\u009e\u00ad\u00034\u001a\u0000\u009f\u00ad\u0003L&\u0000\u00a0"+ + "\u00ad\u0003R)\u0000\u00a1\u00ad\u0003T*\u0000\u00a2\u00ad\u0003X,\u0000"+ + "\u00a3\u00ad\u0003Z-\u0000\u00a4\u00ad\u0003t:\u0000\u00a5\u00ad\u0003"+ + "\\.\u0000\u00a6\u00a7\u0004\u0003\u0002\u0000\u00a7\u00ad\u0003z=\u0000"+ + "\u00a8\u00a9\u0004\u0003\u0003\u0000\u00a9\u00ad\u0003x<\u0000\u00aa\u00ab"+ + "\u0004\u0003\u0004\u0000\u00ab\u00ad\u0003|>\u0000\u00ac\u009a\u0001\u0000"+ + "\u0000\u0000\u00ac\u009b\u0001\u0000\u0000\u0000\u00ac\u009c\u0001\u0000"+ + "\u0000\u0000\u00ac\u009d\u0001\u0000\u0000\u0000\u00ac\u009e\u0001\u0000"+ + "\u0000\u0000\u00ac\u009f\u0001\u0000\u0000\u0000\u00ac\u00a0\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a1\u0001\u0000\u0000\u0000\u00ac\u00a2\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a3\u0001\u0000\u0000\u0000\u00ac\u00a4\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a5\u0001\u0000\u0000\u0000\u00ac\u00a6\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a8\u0001\u0000\u0000\u0000\u00ac\u00aa\u0001\u0000"+ + "\u0000\u0000\u00ad\u0007\u0001\u0000\u0000\u0000\u00ae\u00af\u0005\u0010"+ + "\u0000\u0000\u00af\u00b0\u0003\n\u0005\u0000\u00b0\t\u0001\u0000\u0000"+ + "\u0000\u00b1\u00b2\u0006\u0005\uffff\uffff\u0000\u00b2\u00b3\u00051\u0000"+ + "\u0000\u00b3\u00cf\u0003\n\u0005\b\u00b4\u00cf\u0003\u0010\b\u0000\u00b5"+ + "\u00cf\u0003\f\u0006\u0000\u00b6\u00b8\u0003\u0010\b\u0000\u00b7\u00b9"+ + "\u00051\u0000\u0000\u00b8\u00b7\u0001\u0000\u0000\u0000\u00b8\u00b9\u0001"+ + "\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u00bb\u0005"+ + ",\u0000\u0000\u00bb\u00bc\u00050\u0000\u0000\u00bc\u00c1\u0003\u0010\b"+ + "\u0000\u00bd\u00be\u0005\'\u0000\u0000\u00be\u00c0\u0003\u0010\b\u0000"+ + "\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c3\u0001\u0000\u0000\u0000"+ + "\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c1\u00c2\u0001\u0000\u0000\u0000"+ + "\u00c2\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001\u0000\u0000\u0000"+ + "\u00c4\u00c5\u00057\u0000\u0000\u00c5\u00cf\u0001\u0000\u0000\u0000\u00c6"+ + "\u00c7\u0003\u0010\b\u0000\u00c7\u00c9\u0005-\u0000\u0000\u00c8\u00ca"+ + "\u00051\u0000\u0000\u00c9\u00c8\u0001\u0000\u0000\u0000\u00c9\u00ca\u0001"+ + "\u0000\u0000\u0000\u00ca\u00cb\u0001\u0000\u0000\u0000\u00cb\u00cc\u0005"+ + "2\u0000\u0000\u00cc\u00cf\u0001\u0000\u0000\u0000\u00cd\u00cf\u0003\u000e"+ + "\u0007\u0000\u00ce\u00b1\u0001\u0000\u0000\u0000\u00ce\u00b4\u0001\u0000"+ + "\u0000\u0000\u00ce\u00b5\u0001\u0000\u0000\u0000\u00ce\u00b6\u0001\u0000"+ + "\u0000\u0000\u00ce\u00c6\u0001\u0000\u0000\u0000\u00ce\u00cd\u0001\u0000"+ + "\u0000\u0000\u00cf\u00d8\u0001\u0000\u0000\u0000\u00d0\u00d1\n\u0005\u0000"+ + "\u0000\u00d1\u00d2\u0005\"\u0000\u0000\u00d2\u00d7\u0003\n\u0005\u0006"+ + "\u00d3\u00d4\n\u0004\u0000\u0000\u00d4\u00d5\u00054\u0000\u0000\u00d5"+ + "\u00d7\u0003\n\u0005\u0005\u00d6\u00d0\u0001\u0000\u0000\u0000\u00d6\u00d3"+ + "\u0001\u0000\u0000\u0000\u00d7\u00da\u0001\u0000\u0000\u0000\u00d8\u00d6"+ + "\u0001\u0000\u0000\u0000\u00d8\u00d9\u0001\u0000\u0000\u0000\u00d9\u000b"+ + "\u0001\u0000\u0000\u0000\u00da\u00d8\u0001\u0000\u0000\u0000\u00db\u00dd"+ + "\u0003\u0010\b\u0000\u00dc\u00de\u00051\u0000\u0000\u00dd\u00dc\u0001"+ + "\u0000\u0000\u0000\u00dd\u00de\u0001\u0000\u0000\u0000\u00de\u00df\u0001"+ + "\u0000\u0000\u0000\u00df\u00e0\u0005/\u0000\u0000\u00e0\u00e1\u0003j5"+ + "\u0000\u00e1\u00ea\u0001\u0000\u0000\u0000\u00e2\u00e4\u0003\u0010\b\u0000"+ + "\u00e3\u00e5\u00051\u0000\u0000\u00e4\u00e3\u0001\u0000\u0000\u0000\u00e4"+ + "\u00e5\u0001\u0000\u0000\u0000\u00e5\u00e6\u0001\u0000\u0000\u0000\u00e6"+ + "\u00e7\u00056\u0000\u0000\u00e7\u00e8\u0003j5\u0000\u00e8\u00ea\u0001"+ + "\u0000\u0000\u0000\u00e9\u00db\u0001\u0000\u0000\u0000\u00e9\u00e2\u0001"+ + "\u0000\u0000\u0000\u00ea\r\u0001\u0000\u0000\u0000\u00eb\u00ee\u0003:"+ + "\u001d\u0000\u00ec\u00ed\u0005%\u0000\u0000\u00ed\u00ef\u0003\u001e\u000f"+ + "\u0000\u00ee\u00ec\u0001\u0000\u0000\u0000\u00ee\u00ef\u0001\u0000\u0000"+ + "\u0000\u00ef\u00f0\u0001\u0000\u0000\u0000\u00f0\u00f1\u0005&\u0000\u0000"+ + "\u00f1\u00f2\u0003D\"\u0000\u00f2\u000f\u0001\u0000\u0000\u0000\u00f3"+ + "\u00f9\u0003\u0012\t\u0000\u00f4\u00f5\u0003\u0012\t\u0000\u00f5\u00f6"+ + "\u0003l6\u0000\u00f6\u00f7\u0003\u0012\t\u0000\u00f7\u00f9\u0001\u0000"+ + "\u0000\u0000\u00f8\u00f3\u0001\u0000\u0000\u0000\u00f8\u00f4\u0001\u0000"+ + "\u0000\u0000\u00f9\u0011\u0001\u0000\u0000\u0000\u00fa\u00fb\u0006\t\uffff"+ + "\uffff\u0000\u00fb\u00ff\u0003\u0014\n\u0000\u00fc\u00fd\u0007\u0000\u0000"+ + "\u0000\u00fd\u00ff\u0003\u0012\t\u0003\u00fe\u00fa\u0001\u0000\u0000\u0000"+ + "\u00fe\u00fc\u0001\u0000\u0000\u0000\u00ff\u0108\u0001\u0000\u0000\u0000"+ + "\u0100\u0101\n\u0002\u0000\u0000\u0101\u0102\u0007\u0001\u0000\u0000\u0102"+ + "\u0107\u0003\u0012\t\u0003\u0103\u0104\n\u0001\u0000\u0000\u0104\u0105"+ + "\u0007\u0000\u0000\u0000\u0105\u0107\u0003\u0012\t\u0002\u0106\u0100\u0001"+ + "\u0000\u0000\u0000\u0106\u0103\u0001\u0000\u0000\u0000\u0107\u010a\u0001"+ + "\u0000\u0000\u0000\u0108\u0106\u0001\u0000\u0000\u0000\u0108\u0109\u0001"+ + "\u0000\u0000\u0000\u0109\u0013\u0001\u0000\u0000\u0000\u010a\u0108\u0001"+ + "\u0000\u0000\u0000\u010b\u010c\u0006\n\uffff\uffff\u0000\u010c\u0114\u0003"+ + "D\"\u0000\u010d\u0114\u0003:\u001d\u0000\u010e\u0114\u0003\u0016\u000b"+ + "\u0000\u010f\u0110\u00050\u0000\u0000\u0110\u0111\u0003\n\u0005\u0000"+ + "\u0111\u0112\u00057\u0000\u0000\u0112\u0114\u0001\u0000\u0000\u0000\u0113"+ + "\u010b\u0001\u0000\u0000\u0000\u0113\u010d\u0001\u0000\u0000\u0000\u0113"+ + "\u010e\u0001\u0000\u0000\u0000\u0113\u010f\u0001\u0000\u0000\u0000\u0114"+ + "\u011a\u0001\u0000\u0000\u0000\u0115\u0116\n\u0001\u0000\u0000\u0116\u0117"+ + "\u0005%\u0000\u0000\u0117\u0119\u0003\u001e\u000f\u0000\u0118\u0115\u0001"+ + "\u0000\u0000\u0000\u0119\u011c\u0001\u0000\u0000\u0000\u011a\u0118\u0001"+ + "\u0000\u0000\u0000\u011a\u011b\u0001\u0000\u0000\u0000\u011b\u0015\u0001"+ + "\u0000\u0000\u0000\u011c\u011a\u0001\u0000\u0000\u0000\u011d\u011e\u0003"+ + "\u0018\f\u0000\u011e\u012c\u00050\u0000\u0000\u011f\u012d\u0005B\u0000"+ + "\u0000\u0120\u0125\u0003\n\u0005\u0000\u0121\u0122\u0005\'\u0000\u0000"+ + "\u0122\u0124\u0003\n\u0005\u0000\u0123\u0121\u0001\u0000\u0000\u0000\u0124"+ + "\u0127\u0001\u0000\u0000\u0000\u0125\u0123\u0001\u0000\u0000\u0000\u0125"+ + "\u0126\u0001\u0000\u0000\u0000\u0126\u012a\u0001\u0000\u0000\u0000\u0127"+ + "\u0125\u0001\u0000\u0000\u0000\u0128\u0129\u0005\'\u0000\u0000\u0129\u012b"+ + "\u0003\u001a\r\u0000\u012a\u0128\u0001\u0000\u0000\u0000\u012a\u012b\u0001"+ + "\u0000\u0000\u0000\u012b\u012d\u0001\u0000\u0000\u0000\u012c\u011f\u0001"+ + "\u0000\u0000\u0000\u012c\u0120\u0001\u0000\u0000\u0000\u012c\u012d\u0001"+ + "\u0000\u0000\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e\u012f\u0005"+ + "7\u0000\u0000\u012f\u0017\u0001\u0000\u0000\u0000\u0130\u0131\u0003H$"+ + "\u0000\u0131\u0019\u0001\u0000\u0000\u0000\u0132\u0133\u0005E\u0000\u0000"+ + "\u0133\u0138\u0003\u001c\u000e\u0000\u0134\u0135\u0005\'\u0000\u0000\u0135"+ + "\u0137\u0003\u001c\u000e\u0000\u0136\u0134\u0001\u0000\u0000\u0000\u0137"+ + "\u013a\u0001\u0000\u0000\u0000\u0138\u0136\u0001\u0000\u0000\u0000\u0138"+ + "\u0139\u0001\u0000\u0000\u0000\u0139\u013b\u0001\u0000\u0000\u0000\u013a"+ + "\u0138\u0001\u0000\u0000\u0000\u013b\u013c\u0005F\u0000\u0000\u013c\u001b"+ + "\u0001\u0000\u0000\u0000\u013d\u013e\u0003j5\u0000\u013e\u013f\u0005&"+ + "\u0000\u0000\u013f\u0140\u0003D\"\u0000\u0140\u001d\u0001\u0000\u0000"+ + "\u0000\u0141\u0142\u0003@ \u0000\u0142\u001f\u0001\u0000\u0000\u0000\u0143"+ + "\u0144\u0005\f\u0000\u0000\u0144\u0145\u0003\"\u0011\u0000\u0145!\u0001"+ + "\u0000\u0000\u0000\u0146\u014b\u0003$\u0012\u0000\u0147\u0148\u0005\'"+ + "\u0000\u0000\u0148\u014a\u0003$\u0012\u0000\u0149\u0147\u0001\u0000\u0000"+ + "\u0000\u014a\u014d\u0001\u0000\u0000\u0000\u014b\u0149\u0001\u0000\u0000"+ + "\u0000\u014b\u014c\u0001\u0000\u0000\u0000\u014c#\u0001\u0000\u0000\u0000"+ + "\u014d\u014b\u0001\u0000\u0000\u0000\u014e\u014f\u0003:\u001d\u0000\u014f"+ + "\u0150\u0005$\u0000\u0000\u0150\u0152\u0001\u0000\u0000\u0000\u0151\u014e"+ + "\u0001\u0000\u0000\u0000\u0151\u0152\u0001\u0000\u0000\u0000\u0152\u0153"+ + "\u0001\u0000\u0000\u0000\u0153\u0154\u0003\n\u0005\u0000\u0154%\u0001"+ + "\u0000\u0000\u0000\u0155\u0156\u0005\u0006\u0000\u0000\u0156\u015b\u0003"+ + "(\u0014\u0000\u0157\u0158\u0005\'\u0000\u0000\u0158\u015a\u0003(\u0014"+ + "\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u015a\u015d\u0001\u0000\u0000"+ + "\u0000\u015b\u0159\u0001\u0000\u0000\u0000\u015b\u015c\u0001\u0000\u0000"+ + "\u0000\u015c\u015f\u0001\u0000\u0000\u0000\u015d\u015b\u0001\u0000\u0000"+ + "\u0000\u015e\u0160\u0003.\u0017\u0000\u015f\u015e\u0001\u0000\u0000\u0000"+ + "\u015f\u0160\u0001\u0000\u0000\u0000\u0160\'\u0001\u0000\u0000\u0000\u0161"+ + "\u0162\u0003*\u0015\u0000\u0162\u0163\u0005&\u0000\u0000\u0163\u0165\u0001"+ + "\u0000\u0000\u0000\u0164\u0161\u0001\u0000\u0000\u0000\u0164\u0165\u0001"+ + "\u0000\u0000\u0000\u0165\u0166\u0001\u0000\u0000\u0000\u0166\u0167\u0003"+ + ",\u0016\u0000\u0167)\u0001\u0000\u0000\u0000\u0168\u0169\u0005S\u0000"+ + "\u0000\u0169+\u0001\u0000\u0000\u0000\u016a\u016b\u0007\u0002\u0000\u0000"+ + "\u016b-\u0001\u0000\u0000\u0000\u016c\u016d\u0005R\u0000\u0000\u016d\u0172"+ + "\u0005S\u0000\u0000\u016e\u016f\u0005\'\u0000\u0000\u016f\u0171\u0005"+ + "S\u0000\u0000\u0170\u016e\u0001\u0000\u0000\u0000\u0171\u0174\u0001\u0000"+ + "\u0000\u0000\u0172\u0170\u0001\u0000\u0000\u0000\u0172\u0173\u0001\u0000"+ + "\u0000\u0000\u0173/\u0001\u0000\u0000\u0000\u0174\u0172\u0001\u0000\u0000"+ + "\u0000\u0175\u0176\u0005\u0013\u0000\u0000\u0176\u017b\u0003(\u0014\u0000"+ + "\u0177\u0178\u0005\'\u0000\u0000\u0178\u017a\u0003(\u0014\u0000\u0179"+ + "\u0177\u0001\u0000\u0000\u0000\u017a\u017d\u0001\u0000\u0000\u0000\u017b"+ + "\u0179\u0001\u0000\u0000\u0000\u017b\u017c\u0001\u0000\u0000\u0000\u017c"+ + "\u017f\u0001\u0000\u0000\u0000\u017d\u017b\u0001\u0000\u0000\u0000\u017e"+ + "\u0180\u00036\u001b\u0000\u017f\u017e\u0001\u0000\u0000\u0000\u017f\u0180"+ + "\u0001\u0000\u0000\u0000\u0180\u0183\u0001\u0000\u0000\u0000\u0181\u0182"+ + "\u0005!\u0000\u0000\u0182\u0184\u0003\"\u0011\u0000\u0183\u0181\u0001"+ + "\u0000\u0000\u0000\u0183\u0184\u0001\u0000\u0000\u0000\u01841\u0001\u0000"+ + "\u0000\u0000\u0185\u0186\u0005\u0004\u0000\u0000\u0186\u0187\u0003\"\u0011"+ + "\u0000\u01873\u0001\u0000\u0000\u0000\u0188\u018a\u0005\u000f\u0000\u0000"+ + "\u0189\u018b\u00036\u001b\u0000\u018a\u0189\u0001\u0000\u0000\u0000\u018a"+ + "\u018b\u0001\u0000\u0000\u0000\u018b\u018e\u0001\u0000\u0000\u0000\u018c"+ + "\u018d\u0005!\u0000\u0000\u018d\u018f\u0003\"\u0011\u0000\u018e\u018c"+ + "\u0001\u0000\u0000\u0000\u018e\u018f\u0001\u0000\u0000\u0000\u018f5\u0001"+ + "\u0000\u0000\u0000\u0190\u0195\u00038\u001c\u0000\u0191\u0192\u0005\'"+ + "\u0000\u0000\u0192\u0194\u00038\u001c\u0000\u0193\u0191\u0001\u0000\u0000"+ + "\u0000\u0194\u0197\u0001\u0000\u0000\u0000\u0195\u0193\u0001\u0000\u0000"+ + "\u0000\u0195\u0196\u0001\u0000\u0000\u0000\u01967\u0001\u0000\u0000\u0000"+ + "\u0197\u0195\u0001\u0000\u0000\u0000\u0198\u019b\u0003$\u0012\u0000\u0199"+ + "\u019a\u0005\u0010\u0000\u0000\u019a\u019c\u0003\n\u0005\u0000\u019b\u0199"+ + "\u0001\u0000\u0000\u0000\u019b\u019c\u0001\u0000\u0000\u0000\u019c9\u0001"+ + "\u0000\u0000\u0000\u019d\u01a2\u0003H$\u0000\u019e\u019f\u0005)\u0000"+ + "\u0000\u019f\u01a1\u0003H$\u0000\u01a0\u019e\u0001\u0000\u0000\u0000\u01a1"+ + "\u01a4\u0001\u0000\u0000\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000\u01a2"+ + "\u01a3\u0001\u0000\u0000\u0000\u01a3;\u0001\u0000\u0000\u0000\u01a4\u01a2"+ + "\u0001\u0000\u0000\u0000\u01a5\u01aa\u0003B!\u0000\u01a6\u01a7\u0005)"+ + "\u0000\u0000\u01a7\u01a9\u0003B!\u0000\u01a8\u01a6\u0001\u0000\u0000\u0000"+ + "\u01a9\u01ac\u0001\u0000\u0000\u0000\u01aa\u01a8\u0001\u0000\u0000\u0000"+ + "\u01aa\u01ab\u0001\u0000\u0000\u0000\u01ab=\u0001\u0000\u0000\u0000\u01ac"+ + "\u01aa\u0001\u0000\u0000\u0000\u01ad\u01b2\u0003<\u001e\u0000\u01ae\u01af"+ + "\u0005\'\u0000\u0000\u01af\u01b1\u0003<\u001e\u0000\u01b0\u01ae\u0001"+ + "\u0000\u0000\u0000\u01b1\u01b4\u0001\u0000\u0000\u0000\u01b2\u01b0\u0001"+ + "\u0000\u0000\u0000\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3?\u0001\u0000"+ + "\u0000\u0000\u01b4\u01b2\u0001\u0000\u0000\u0000\u01b5\u01b6\u0007\u0003"+ + "\u0000\u0000\u01b6A\u0001\u0000\u0000\u0000\u01b7\u01bb\u0005W\u0000\u0000"+ + "\u01b8\u01b9\u0004!\n\u0000\u01b9\u01bb\u0003F#\u0000\u01ba\u01b7\u0001"+ + "\u0000\u0000\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bbC\u0001\u0000"+ + "\u0000\u0000\u01bc\u01e7\u00052\u0000\u0000\u01bd\u01be\u0003h4\u0000"+ + "\u01be\u01bf\u0005J\u0000\u0000\u01bf\u01e7\u0001\u0000\u0000\u0000\u01c0"+ + "\u01e7\u0003f3\u0000\u01c1\u01e7\u0003h4\u0000\u01c2\u01e7\u0003b1\u0000"+ + "\u01c3\u01e7\u0003F#\u0000\u01c4\u01e7\u0003j5\u0000\u01c5\u01c6\u0005"+ + "H\u0000\u0000\u01c6\u01cb\u0003d2\u0000\u01c7\u01c8\u0005\'\u0000\u0000"+ + "\u01c8\u01ca\u0003d2\u0000\u01c9\u01c7\u0001\u0000\u0000\u0000\u01ca\u01cd"+ + "\u0001\u0000\u0000\u0000\u01cb\u01c9\u0001\u0000\u0000\u0000\u01cb\u01cc"+ + "\u0001\u0000\u0000\u0000\u01cc\u01ce\u0001\u0000\u0000\u0000\u01cd\u01cb"+ + "\u0001\u0000\u0000\u0000\u01ce\u01cf\u0005I\u0000\u0000\u01cf\u01e7\u0001"+ + "\u0000\u0000\u0000\u01d0\u01d1\u0005H\u0000\u0000\u01d1\u01d6\u0003b1"+ + "\u0000\u01d2\u01d3\u0005\'\u0000\u0000\u01d3\u01d5\u0003b1\u0000\u01d4"+ + "\u01d2\u0001\u0000\u0000\u0000\u01d5\u01d8\u0001\u0000\u0000\u0000\u01d6"+ + "\u01d4\u0001\u0000\u0000\u0000\u01d6\u01d7\u0001\u0000\u0000\u0000\u01d7"+ + "\u01d9\u0001\u0000\u0000\u0000\u01d8\u01d6\u0001\u0000\u0000\u0000\u01d9"+ + "\u01da\u0005I\u0000\u0000\u01da\u01e7\u0001\u0000\u0000\u0000\u01db\u01dc"+ + "\u0005H\u0000\u0000\u01dc\u01e1\u0003j5\u0000\u01dd\u01de\u0005\'\u0000"+ + "\u0000\u01de\u01e0\u0003j5\u0000\u01df\u01dd\u0001\u0000\u0000\u0000\u01e0"+ + "\u01e3\u0001\u0000\u0000\u0000\u01e1\u01df\u0001\u0000\u0000\u0000\u01e1"+ + "\u01e2\u0001\u0000\u0000\u0000\u01e2\u01e4\u0001\u0000\u0000\u0000\u01e3"+ + "\u01e1\u0001\u0000\u0000\u0000\u01e4\u01e5\u0005I\u0000\u0000\u01e5\u01e7"+ + "\u0001\u0000\u0000\u0000\u01e6\u01bc\u0001\u0000\u0000\u0000\u01e6\u01bd"+ + "\u0001\u0000\u0000\u0000\u01e6\u01c0\u0001\u0000\u0000\u0000\u01e6\u01c1"+ + "\u0001\u0000\u0000\u0000\u01e6\u01c2\u0001\u0000\u0000\u0000\u01e6\u01c3"+ + "\u0001\u0000\u0000\u0000\u01e6\u01c4\u0001\u0000\u0000\u0000\u01e6\u01c5"+ + "\u0001\u0000\u0000\u0000\u01e6\u01d0\u0001\u0000\u0000\u0000\u01e6\u01db"+ + "\u0001\u0000\u0000\u0000\u01e7E\u0001\u0000\u0000\u0000\u01e8\u01eb\u0005"+ + "5\u0000\u0000\u01e9\u01eb\u0005G\u0000\u0000\u01ea\u01e8\u0001\u0000\u0000"+ + "\u0000\u01ea\u01e9\u0001\u0000\u0000\u0000\u01ebG\u0001\u0000\u0000\u0000"+ + "\u01ec\u01f0\u0003@ \u0000\u01ed\u01ee\u0004$\u000b\u0000\u01ee\u01f0"+ + "\u0003F#\u0000\u01ef\u01ec\u0001\u0000\u0000\u0000\u01ef\u01ed\u0001\u0000"+ + "\u0000\u0000\u01f0I\u0001\u0000\u0000\u0000\u01f1\u01f2\u0005\t\u0000"+ + "\u0000\u01f2\u01f3\u0005\u001f\u0000\u0000\u01f3K\u0001\u0000\u0000\u0000"+ + "\u01f4\u01f5\u0005\u000e\u0000\u0000\u01f5\u01fa\u0003N\'\u0000\u01f6"+ + "\u01f7\u0005\'\u0000\u0000\u01f7\u01f9\u0003N\'\u0000\u01f8\u01f6\u0001"+ + "\u0000\u0000\u0000\u01f9\u01fc\u0001\u0000\u0000\u0000\u01fa\u01f8\u0001"+ + "\u0000\u0000\u0000\u01fa\u01fb\u0001\u0000\u0000\u0000\u01fbM\u0001\u0000"+ + "\u0000\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fd\u01ff\u0003\n\u0005"+ + "\u0000\u01fe\u0200\u0007\u0004\u0000\u0000\u01ff\u01fe\u0001\u0000\u0000"+ + "\u0000\u01ff\u0200\u0001\u0000\u0000\u0000\u0200\u0203\u0001\u0000\u0000"+ + "\u0000\u0201\u0202\u00053\u0000\u0000\u0202\u0204\u0007\u0005\u0000\u0000"+ + "\u0203\u0201\u0001\u0000\u0000\u0000\u0203\u0204\u0001\u0000\u0000\u0000"+ + "\u0204O\u0001\u0000\u0000\u0000\u0205\u0206\u0005\b\u0000\u0000\u0206"+ + "\u0207\u0003>\u001f\u0000\u0207Q\u0001\u0000\u0000\u0000\u0208\u0209\u0005"+ + "\u0002\u0000\u0000\u0209\u020a\u0003>\u001f\u0000\u020aS\u0001\u0000\u0000"+ + "\u0000\u020b\u020c\u0005\u000b\u0000\u0000\u020c\u0211\u0003V+\u0000\u020d"+ + "\u020e\u0005\'\u0000\u0000\u020e\u0210\u0003V+\u0000\u020f\u020d\u0001"+ + "\u0000\u0000\u0000\u0210\u0213\u0001\u0000\u0000\u0000\u0211\u020f\u0001"+ + "\u0000\u0000\u0000\u0211\u0212\u0001\u0000\u0000\u0000\u0212U\u0001\u0000"+ + "\u0000\u0000\u0213\u0211\u0001\u0000\u0000\u0000\u0214\u0215\u0003<\u001e"+ + "\u0000\u0215\u0216\u0005[\u0000\u0000\u0216\u0217\u0003<\u001e\u0000\u0217"+ + "W\u0001\u0000\u0000\u0000\u0218\u0219\u0005\u0001\u0000\u0000\u0219\u021a"+ + "\u0003\u0014\n\u0000\u021a\u021c\u0003j5\u0000\u021b\u021d\u0003^/\u0000"+ + "\u021c\u021b\u0001\u0000\u0000\u0000\u021c\u021d\u0001\u0000\u0000\u0000"+ + "\u021dY\u0001\u0000\u0000\u0000\u021e\u021f\u0005\u0007\u0000\u0000\u021f"+ + "\u0220\u0003\u0014\n\u0000\u0220\u0221\u0003j5\u0000\u0221[\u0001\u0000"+ + "\u0000\u0000\u0222\u0223\u0005\n\u0000\u0000\u0223\u0224\u0003:\u001d"+ + "\u0000\u0224]\u0001\u0000\u0000\u0000\u0225\u022a\u0003`0\u0000\u0226"+ + "\u0227\u0005\'\u0000\u0000\u0227\u0229\u0003`0\u0000\u0228\u0226\u0001"+ + "\u0000\u0000\u0000\u0229\u022c\u0001\u0000\u0000\u0000\u022a\u0228\u0001"+ + "\u0000\u0000\u0000\u022a\u022b\u0001\u0000\u0000\u0000\u022b_\u0001\u0000"+ + "\u0000\u0000\u022c\u022a\u0001\u0000\u0000\u0000\u022d\u022e\u0003@ \u0000"+ + "\u022e\u022f\u0005$\u0000\u0000\u022f\u0230\u0003D\"\u0000\u0230a\u0001"+ + "\u0000\u0000\u0000\u0231\u0232\u0007\u0006\u0000\u0000\u0232c\u0001\u0000"+ + "\u0000\u0000\u0233\u0236\u0003f3\u0000\u0234\u0236\u0003h4\u0000\u0235"+ + "\u0233\u0001\u0000\u0000\u0000\u0235\u0234\u0001\u0000\u0000\u0000\u0236"+ + "e\u0001\u0000\u0000\u0000\u0237\u0239\u0007\u0000\u0000\u0000\u0238\u0237"+ + "\u0001\u0000\u0000\u0000\u0238\u0239\u0001\u0000\u0000\u0000\u0239\u023a"+ + "\u0001\u0000\u0000\u0000\u023a\u023b\u0005 \u0000\u0000\u023bg\u0001\u0000"+ + "\u0000\u0000\u023c\u023e\u0007\u0000\u0000\u0000\u023d\u023c\u0001\u0000"+ + "\u0000\u0000\u023d\u023e\u0001\u0000\u0000\u0000\u023e\u023f\u0001\u0000"+ + "\u0000\u0000\u023f\u0240\u0005\u001f\u0000\u0000\u0240i\u0001\u0000\u0000"+ + "\u0000\u0241\u0242\u0005\u001e\u0000\u0000\u0242k\u0001\u0000\u0000\u0000"+ + "\u0243\u0244\u0007\u0007\u0000\u0000\u0244m\u0001\u0000\u0000\u0000\u0245"+ + "\u0246\u0005\u0005\u0000\u0000\u0246\u0247\u0003p8\u0000\u0247o\u0001"+ + "\u0000\u0000\u0000\u0248\u0249\u0005H\u0000\u0000\u0249\u024a\u0003\u0002"+ + "\u0001\u0000\u024a\u024b\u0005I\u0000\u0000\u024bq\u0001\u0000\u0000\u0000"+ + "\u024c\u024d\u0005\r\u0000\u0000\u024d\u024e\u0005k\u0000\u0000\u024e"+ + "s\u0001\u0000\u0000\u0000\u024f\u0250\u0005\u0003\u0000\u0000\u0250\u0253"+ + "\u0005a\u0000\u0000\u0251\u0252\u0005_\u0000\u0000\u0252\u0254\u0003<"+ + "\u001e\u0000\u0253\u0251\u0001\u0000\u0000\u0000\u0253\u0254\u0001\u0000"+ + "\u0000\u0000\u0254\u025e\u0001\u0000\u0000\u0000\u0255\u0256\u0005`\u0000"+ + "\u0000\u0256\u025b\u0003v;\u0000\u0257\u0258\u0005\'\u0000\u0000\u0258"+ + "\u025a\u0003v;\u0000\u0259\u0257\u0001\u0000\u0000\u0000\u025a\u025d\u0001"+ + "\u0000\u0000\u0000\u025b\u0259\u0001\u0000\u0000\u0000\u025b\u025c\u0001"+ + "\u0000\u0000\u0000\u025c\u025f\u0001\u0000\u0000\u0000\u025d\u025b\u0001"+ + "\u0000\u0000\u0000\u025e\u0255\u0001\u0000\u0000\u0000\u025e\u025f\u0001"+ + "\u0000\u0000\u0000\u025fu\u0001\u0000\u0000\u0000\u0260\u0261\u0003<\u001e"+ + "\u0000\u0261\u0262\u0005$\u0000\u0000\u0262\u0264\u0001\u0000\u0000\u0000"+ + "\u0263\u0260\u0001\u0000\u0000\u0000\u0263\u0264\u0001\u0000\u0000\u0000"+ + "\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0266\u0003<\u001e\u0000\u0266"+ + "w\u0001\u0000\u0000\u0000\u0267\u0268\u0005\u0012\u0000\u0000\u0268\u0269"+ + "\u0003(\u0014\u0000\u0269\u026a\u0005_\u0000\u0000\u026a\u026b\u0003>"+ + "\u001f\u0000\u026by\u0001\u0000\u0000\u0000\u026c\u026d\u0005\u0011\u0000"+ + "\u0000\u026d\u0270\u00036\u001b\u0000\u026e\u026f\u0005!\u0000\u0000\u026f"+ + "\u0271\u0003\"\u0011\u0000\u0270\u026e\u0001\u0000\u0000\u0000\u0270\u0271"+ + "\u0001\u0000\u0000\u0000\u0271{\u0001\u0000\u0000\u0000\u0272\u0274\u0007"+ + "\b\u0000\u0000\u0273\u0272\u0001\u0000\u0000\u0000\u0273\u0274\u0001\u0000"+ + "\u0000\u0000\u0274\u0275\u0001\u0000\u0000\u0000\u0275\u0276\u0005\u0014"+ + "\u0000\u0000\u0276\u0277\u0003~?\u0000\u0277\u0278\u0003\u0080@\u0000"+ + "\u0278}\u0001\u0000\u0000\u0000\u0279\u027c\u0003(\u0014\u0000\u027a\u027b"+ + "\u0005[\u0000\u0000\u027b\u027d\u0003@ \u0000\u027c\u027a\u0001\u0000"+ + "\u0000\u0000\u027c\u027d\u0001\u0000\u0000\u0000\u027d\u007f\u0001\u0000"+ + "\u0000\u0000\u027e\u027f\u0005_\u0000\u0000\u027f\u0284\u0003\u0082A\u0000"+ + "\u0280\u0281\u0005\'\u0000\u0000\u0281\u0283\u0003\u0082A\u0000\u0282"+ + "\u0280\u0001\u0000\u0000\u0000\u0283\u0286\u0001\u0000\u0000\u0000\u0284"+ + "\u0282\u0001\u0000\u0000\u0000\u0284\u0285\u0001\u0000\u0000\u0000\u0285"+ + "\u0081\u0001\u0000\u0000\u0000\u0286\u0284\u0001\u0000\u0000\u0000\u0287"+ + "\u0288\u0003\u0010\b\u0000\u0288\u0083\u0001\u0000\u0000\u0000?\u008f"+ "\u0098\u00ac\u00b8\u00c1\u00c9\u00ce\u00d6\u00d8\u00dd\u00e4\u00e9\u00ee"+ - "\u00f8\u00fe\u0106\u0108\u0113\u011a\u0125\u012a\u012c\u0139\u014c\u0152"+ - "\u015c\u0160\u0165\u0173\u017c\u0180\u0184\u018b\u018f\u0196\u019c\u01a3"+ - "\u01ab\u01b3\u01bb\u01cc\u01d7\u01e2\u01e7\u01eb\u01f0\u01fb\u0200\u0204"+ - "\u0212\u021d\u022b\u0236\u0239\u023e\u0254\u025c\u025f\u0264\u0271\u0274"+ - "\u027d\u0285"; + "\u00f8\u00fe\u0106\u0108\u0113\u011a\u0125\u012a\u012c\u0138\u014b\u0151"+ + "\u015b\u015f\u0164\u0172\u017b\u017f\u0183\u018a\u018e\u0195\u019b\u01a2"+ + "\u01aa\u01b2\u01ba\u01cb\u01d6\u01e1\u01e6\u01ea\u01ef\u01fa\u01ff\u0203"+ + "\u0211\u021c\u022a\u0235\u0238\u023d\u0253\u025b\u025e\u0263\u0270\u0273"+ + "\u027c\u0284"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MatchQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MatchQuery.java index 1614b4f455456..3c40db1670c15 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MatchQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/MatchQuery.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.esql.querydsl.query; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.core.Booleans; +import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder; @@ -15,33 +15,39 @@ import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.tree.Source; -import java.util.Collections; import java.util.Map; import java.util.Objects; import java.util.function.BiConsumer; import static java.util.Map.entry; +import static org.elasticsearch.index.query.MatchQueryBuilder.ANALYZER_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.FUZZY_REWRITE_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.FUZZY_TRANSPOSITIONS_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.GENERATE_SYNONYMS_PHRASE_QUERY; +import static org.elasticsearch.index.query.MatchQueryBuilder.MAX_EXPANSIONS_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.MINIMUM_SHOULD_MATCH_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.OPERATOR_FIELD; +import static org.elasticsearch.index.query.MatchQueryBuilder.PREFIX_LENGTH_FIELD; public class MatchQuery extends Query { - private static final Map> BUILDER_APPLIERS; + private static final Map> BUILDER_APPLIERS; static { - // TODO: it'd be great if these could be constants instead of Strings, needs a core change to make the fields public first // TODO: add zero terms query support, I'm not sure the best way to parse it yet... // appliers.put("zero_terms_query", (qb, s) -> qb.zeroTermsQuery(s)); BUILDER_APPLIERS = Map.ofEntries( - entry("analyzer", MatchQueryBuilder::analyzer), - entry("auto_generate_synonyms_phrase_query", (qb, s) -> qb.autoGenerateSynonymsPhraseQuery(Booleans.parseBoolean(s))), - entry("fuzziness", (qb, s) -> qb.fuzziness(Fuzziness.fromString(s))), - entry("boost", (qb, s) -> qb.boost(Float.parseFloat(s))), - entry("fuzzy_transpositions", (qb, s) -> qb.fuzzyTranspositions(Booleans.parseBoolean(s))), - entry("fuzzy_rewrite", MatchQueryBuilder::fuzzyRewrite), - entry("lenient", (qb, s) -> qb.lenient(Booleans.parseBoolean(s))), - entry("max_expansions", (qb, s) -> qb.maxExpansions(Integer.valueOf(s))), - entry("minimum_should_match", MatchQueryBuilder::minimumShouldMatch), - entry("operator", (qb, s) -> qb.operator(Operator.fromString(s))), - entry("prefix_length", (qb, s) -> qb.prefixLength(Integer.valueOf(s))) + entry(ANALYZER_FIELD.getPreferredName(), (qb, s) -> qb.analyzer(s.toString())), + entry(GENERATE_SYNONYMS_PHRASE_QUERY.getPreferredName(), (qb, b) -> qb.autoGenerateSynonymsPhraseQuery((Boolean) b)), + entry(Fuzziness.FIELD.getPreferredName(), (qb, s) -> qb.fuzziness(Fuzziness.fromString(s.toString()))), + entry(AbstractQueryBuilder.BOOST_FIELD.getPreferredName(), (qb, s) -> qb.boost((Float) s)), + entry(FUZZY_TRANSPOSITIONS_FIELD.getPreferredName(), (qb, s) -> qb.fuzzyTranspositions((Boolean) s)), + entry(FUZZY_REWRITE_FIELD.getPreferredName(), (qb, s) -> qb.fuzzyRewrite(s.toString())), + entry(MatchQueryBuilder.LENIENT_FIELD.getPreferredName(), (qb, s) -> qb.lenient((Boolean) s)), + entry(MAX_EXPANSIONS_FIELD.getPreferredName(), (qb, s) -> qb.maxExpansions((Integer) s)), + entry(MINIMUM_SHOULD_MATCH_FIELD.getPreferredName(), (qb, s) -> qb.minimumShouldMatch(s.toString())), + entry(OPERATOR_FIELD.getPreferredName(), (qb, s) -> qb.operator(Operator.fromString(s.toString()))), + entry(PREFIX_LENGTH_FIELD.getPreferredName(), (qb, s) -> qb.prefixLength((Integer) s)) ); } @@ -49,13 +55,13 @@ public class MatchQuery extends Query { private final Object text; private final Double boost; private final Fuzziness fuzziness; - private final Map options; + private final Map options; public MatchQuery(Source source, String name, Object text) { this(source, name, text, Map.of()); } - public MatchQuery(Source source, String name, Object text, Map options) { + public MatchQuery(Source source, String name, Object text, Map options) { super(source); assert options != null; this.name = name; @@ -65,15 +71,6 @@ public MatchQuery(Source source, String name, Object text, Map o this.fuzziness = null; } - public MatchQuery(Source source, String name, Object text, Double boost, Fuzziness fuzziness) { - super(source); - this.name = name; - this.text = text; - this.options = Collections.emptyMap(); - this.boost = boost; - this.fuzziness = fuzziness; - } - @Override public QueryBuilder asBuilder() { final MatchQueryBuilder queryBuilder = QueryBuilders.matchQuery(name, text); @@ -125,15 +122,7 @@ protected String innerToString() { return name + ":" + text; } - public Double boost() { - return boost; - } - - public Fuzziness fuzziness() { - return fuzziness; - } - - public Map options() { + public Map options() { return options; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index b01a82819e2ea..151a91b587c1b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -36,7 +36,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; -import org.elasticsearch.xpack.esql.expression.function.scalar.map.LogWithBaseInMap; +import org.elasticsearch.xpack.esql.expression.function.fulltext.MatchOperator; import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.parser.ParsingException; @@ -2577,7 +2577,7 @@ public void testFromEnrichAndMatchColonUsage() { """, "mapping-default.json"); var limit = as(plan, Limit.class); var filter = as(limit.child(), Filter.class); - var match = as(filter.condition(), Match.class); + var match = as(filter.condition(), MatchOperator.class); var enrich = as(filter.child(), Enrich.class); assertEquals(enrich.mode(), Enrich.Mode.ANY); assertEquals(enrich.policy().getMatchField(), "language_code"); @@ -2586,50 +2586,20 @@ public void testFromEnrichAndMatchColonUsage() { assertEquals(esRelation.indexPattern(), "test"); } - public void testMapExpressionAsFunctionArgument() { - assumeTrue("MapExpression require snapshot build", EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled()); + public void testFunctionNamedParamsAsFunctionArgument() { LogicalPlan plan = analyze(""" from test - | EVAL l = log_with_base_in_map(languages, {"base":2.0}) - | KEEP l - """, "mapping-default.json"); + | WHERE MATCH(first_name, "Anna Smith", {"minimum_should_match": 2.0}) + """); Limit limit = as(plan, Limit.class); - EsqlProject proj = as(limit.child(), EsqlProject.class); - List fields = proj.projections(); - assertEquals(1, fields.size()); - ReferenceAttribute ra = as(fields.get(0), ReferenceAttribute.class); - assertEquals("l", ra.name()); - assertEquals(DataType.DOUBLE, ra.dataType()); - Eval eval = as(proj.child(), Eval.class); - assertEquals(1, eval.fields().size()); - Alias a = as(eval.fields().get(0), Alias.class); - LogWithBaseInMap l = as(a.child(), LogWithBaseInMap.class); - MapExpression me = as(l.base(), MapExpression.class); + Filter filter = as(limit.child(), Filter.class); + Match match = as(filter.condition(), Match.class); + MapExpression me = as(match.options(), MapExpression.class); assertEquals(1, me.entryExpressions().size()); EntryExpression ee = as(me.entryExpressions().get(0), EntryExpression.class); - assertEquals(new Literal(EMPTY, "base", DataType.KEYWORD), ee.key()); + assertEquals(new Literal(EMPTY, "minimum_should_match", DataType.KEYWORD), ee.key()); assertEquals(new Literal(EMPTY, 2.0, DataType.DOUBLE), ee.value()); assertEquals(DataType.DOUBLE, ee.dataType()); - EsRelation esRelation = as(eval.child(), EsRelation.class); - assertEquals(esRelation.indexPattern(), "test"); - } - - private void verifyMapExpression(MapExpression me) { - Literal option1 = new Literal(EMPTY, "option1", DataType.KEYWORD); - Literal value1 = new Literal(EMPTY, "value1", DataType.KEYWORD); - Literal option2 = new Literal(EMPTY, "option2", DataType.KEYWORD); - Literal value2 = new Literal(EMPTY, List.of(1, 2, 3), DataType.INTEGER); - - assertEquals(2, me.entryExpressions().size()); - EntryExpression ee = as(me.entryExpressions().get(0), EntryExpression.class); - assertEquals(option1, ee.key()); - assertEquals(value1, ee.value()); - assertEquals(value1.dataType(), ee.dataType()); - - ee = as(me.entryExpressions().get(1), EntryExpression.class); - assertEquals(option2, ee.key()); - assertEquals(value2, ee.value()); - assertEquals(value2.dataType(), ee.dataType()); } private void verifyUnsupported(String query, String errorMessage) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index c9950bfd34f2c..86166b0267258 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -12,10 +12,13 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.type.DataTypeConverter; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.type.InvalidMappedField; import org.elasticsearch.xpack.esql.core.type.UnsupportedEsField; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.parser.EsqlParser; @@ -36,6 +39,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.COUNTER_DOUBLE; import static org.elasticsearch.xpack.esql.core.type.DataType.COUNTER_INTEGER; import static org.elasticsearch.xpack.esql.core.type.DataType.COUNTER_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -1993,24 +1997,78 @@ public void testLookupJoinDataTypeMismatch() { ); } - public void testInvalidMapOption() { - assumeTrue("MapExpression require snapshot build", EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled()); - // invalid key - assertEquals( - "1:22: Invalid option key in [log_with_base_in_map(languages, {\"base\":2.0, \"invalidOption\":true})], " - + "expected base but got [\"invalidOption\"]", - error("FROM test | EVAL l = log_with_base_in_map(languages, {\"base\":2.0, \"invalidOption\":true})") - ); - // key is case-sensitive - assertEquals( - "1:22: Invalid option key in [log_with_base_in_map(languages, {\"Base\":2.0})], " + "expected base but got [\"Base\"]", - error("FROM test | EVAL l = log_with_base_in_map(languages, {\"Base\":2.0})") - ); - // invalid value - assertEquals( - "1:22: Invalid option value in [log_with_base_in_map(languages, {\"base\":\"invalid\"})], " - + "expected a numeric number but got [invalid]", - error("FROM test | EVAL l = log_with_base_in_map(languages, {\"base\":\"invalid\"})") + public void testMatchOptions() { + // Check positive cases + query("FROM test | WHERE match(first_name, \"Jean\", {\"analyzer\": \"standard\"})"); + query("FROM test | WHERE match(first_name, \"Jean\", {\"boost\": 2.1})"); + query("FROM test | WHERE match(first_name, \"Jean\", {\"fuzziness\": 2})"); + query("FROM test | WHERE match(first_name, \"Jean\", {\"fuzziness\": \"AUTO\"})"); + query("FROM test | WHERE match(first_name, \"Jean\", {\"fuzzy_transpositions\": false})"); + query("FROM test | WHERE match(first_name, \"Jean\", {\"lenient\": false})"); + query("FROM test | WHERE match(first_name, \"Jean\", {\"max_expansions\": 10})"); + query("FROM test | WHERE match(first_name, \"Jean\", {\"minimum_should_match\": \"2\"})"); + query("FROM test | WHERE match(first_name, \"Jean\", {\"operator\": \"AND\"})"); + query("FROM test | WHERE match(first_name, \"Jean\", {\"prefix_length\": 2})"); + query("FROM test | WHERE match(first_name, \"Jean\", {\"auto_generate_synonyms_phrase_query\": true})"); + + // Check all data types for available options + DataType[] optionTypes = new DataType[] { + DataType.INTEGER, + DataType.LONG, + DataType.FLOAT, + DataType.DOUBLE, + DataType.KEYWORD, + DataType.BOOLEAN }; + for (Map.Entry allowedOptions : Match.ALLOWED_OPTIONS.entrySet()) { + String optionName = allowedOptions.getKey(); + DataType optionType = allowedOptions.getValue(); + // Check every possible type for the option - we'll try to convert it to the expected type + for (DataType currentType : optionTypes) { + String optionValue = switch (currentType) { + case BOOLEAN -> String.valueOf(randomBoolean()); + case INTEGER -> String.valueOf(randomIntBetween(0, 100000)); + case LONG -> String.valueOf(randomLong()); + case FLOAT -> String.valueOf(randomFloat()); + case DOUBLE -> String.valueOf(randomDouble()); + case KEYWORD -> randomAlphaOfLength(10); + default -> throw new IllegalArgumentException("Unsupported option type: " + currentType); + }; + String queryOptionValue = optionValue; + if (currentType == KEYWORD) { + queryOptionValue = "\"" + optionValue + "\""; + } + + String query = "FROM test | WHERE match(first_name, \"Jean\", {\"" + optionName + "\": " + queryOptionValue + "})"; + try { + // Check conversion is possible + DataTypeConverter.convert(optionValue, optionType); + // If no exception was thrown, conversion is possible and should be done + query(query); + } catch (InvalidArgumentException e) { + // Conversion is not possible, query should fail + assertEquals( + "1:19: Invalid option [" + + optionName + + "] in [match(first_name, \"Jean\", {\"" + + optionName + + "\": " + + queryOptionValue + + "})], cannot cast [" + + optionValue + + "] to [" + + optionType.typeName() + + "]", + error(query) + ); + } + } + } + + assertThat( + error("FROM test | WHERE match(first_name, \"Jean\", {\"unknown_option\": true})"), + containsString( + "1:19: Invalid option [unknown_option] in [match(first_name, \"Jean\", {\"unknown_option\": true})]," + " expected one of " + ) ); } @@ -2047,7 +2105,11 @@ private String error(String query, Analyzer analyzer, Class throw new IllegalArgumentException("VerifierTests don't support params of type " + param.getClass()); } } - Throwable e = expectThrows(exception, () -> analyzer.analyze(parser.createStatement(query, new QueryParams(parameters)))); + Throwable e = expectThrows( + exception, + "Expected error for query [" + query + "] but no error was raised", + () -> analyzer.analyze(parser.createStatement(query, new QueryParams(parameters))) + ); assertThat(e, instanceOf(exception)); String message = e.getMessage(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index e08411240b841..f089e81621990 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -90,6 +90,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.TreeSet; import java.util.stream.Collectors; @@ -930,9 +931,17 @@ protected static void renderDocs(String name) throws IOException { renderParametersList(name, description.argNames(), description.argDescriptions()); FunctionInfo info = EsqlFunctionRegistry.functionInfo(definition); renderDescription(name, description.description(), info.detailedDescription(), info.note()); + Optional mapArgSignature = description.args() + .stream() + .filter(EsqlFunctionRegistry.ArgSignature::mapArg) + .findFirst(); + boolean hasFunctionOptions = mapArgSignature.isPresent(); + if (hasFunctionOptions) { + renderFunctionNamedParams(name, (EsqlFunctionRegistry.MapArgSignature) mapArgSignature.get()); + } boolean hasExamples = renderExamples(name, info); boolean hasAppendix = renderAppendix(name, info.appendix()); - renderFullLayout(name, info.preview(), hasExamples, hasAppendix); + renderFullLayout(name, info.preview(), hasExamples, hasAppendix, hasFunctionOptions); renderKibanaInlineDocs(name, info); renderKibanaFunctionDefinition(name, info, description.args(), description.variadic()); return; @@ -967,11 +976,13 @@ private static void renderTypes(String name, List table = new ArrayList<>(); + for (Map.Entry argSignatureEntry : mapArgSignature.mapParams().entrySet()) { + StringBuilder builder = new StringBuilder(); + EsqlFunctionRegistry.MapEntryArgSignature arg = argSignatureEntry.getValue(); + builder.append(arg.name()).append(" | ").append(arg.type()).append(" | ").append(arg.description()); + table.add(builder.toString()); + } + + String rendered = DOCS_WARNING + """ + *Supported function named parameters* + + [%header.monospaced.styled,format=dsv,separator=|] + |=== + """ + header + "\n" + table.stream().collect(Collectors.joining("\n")) + "\n|===\n"; + LogManager.getLogger(getTestClass()).info("Writing function named parameters for [{}]:\n{}", functionName(), rendered); + writeToTempDir("functionNamedParams", name, "asciidoc", rendered); + } + + private static void renderFullLayout(String name, boolean preview, boolean hasExamples, boolean hasAppendix, boolean hasFunctionOptions) + throws IOException { String rendered = DOCS_WARNING + """ [discrete] [[esql-$NAME$]] @@ -1089,6 +1122,9 @@ private static void renderFullLayout(String name, boolean preview, boolean hasEx """.replace("$NAME$", name) .replace("$UPPER_NAME$", name.toUpperCase(Locale.ROOT)) .replace("$PREVIEW_CALLOUT$", preview ? PREVIEW_CALLOUT : ""); + if (hasFunctionOptions) { + rendered += "include::../functionNamedParams/" + name + ".asciidoc[]\n"; + } if (hasExamples) { rendered += "include::../examples/" + name + ".asciidoc[]\n"; } @@ -1217,7 +1253,7 @@ private static void renderKibanaFunctionDefinition( builder.startObject(); builder.field("name", arg.name()); if (arg.mapArg()) { - builder.field("type", "map"); + builder.field("type", "function named parameters"); builder.field( "mapParams", arg.mapParams() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/AbstractMatchFullTextFunctionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/AbstractMatchFullTextFunctionTests.java index 0a80da9c60625..a3ae10d18b3f6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/AbstractMatchFullTextFunctionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/AbstractMatchFullTextFunctionTests.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; @@ -23,8 +21,8 @@ import static org.hamcrest.Matchers.equalTo; public abstract class AbstractMatchFullTextFunctionTests extends AbstractFunctionTestCase { - @ParametersFactory - public static Iterable parameters() { + + protected static List testCaseSuppliers() { List suppliers = new ArrayList<>(); AbstractMatchFullTextFunctionTests.addUnsignedLongCases(suppliers); @@ -32,8 +30,7 @@ public static Iterable parameters() { AbstractMatchFullTextFunctionTests.addNonNumericCases(suppliers); AbstractMatchFullTextFunctionTests.addQueryAsStringTestCases(suppliers); AbstractMatchFullTextFunctionTests.addStringTestCases(suppliers); - - return parameterSuppliersFromTypedData(suppliers); + return suppliers; } private static void addNonNumericCases(List suppliers) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchErrorTests.java index a83cb24a44a45..c63e0bd6486b5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchErrorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchErrorTests.java @@ -7,7 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -21,17 +23,27 @@ import java.util.Locale; import java.util.Set; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.esql.planner.TranslatorHandler.TRANSLATOR_HANDLER; import static org.hamcrest.Matchers.equalTo; public class MatchErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override protected List cases() { - return paramsToSuppliers(AbstractMatchFullTextFunctionTests.parameters()); + return paramsToSuppliers(MatchTests.parameters()); } @Override protected Expression build(Source source, List args) { - return new Match(source, args.get(0), args.get(1)); + Match match = new Match(source, args.get(0), args.get(1), args.size() > 2 ? args.get(2) : null); + // We need to add the QueryBuilder to the match expression, as it is used to implement equals() and hashCode() and + // thus test the serialization methods. But we can only do this if the parameters make sense . + if (args.get(0) instanceof FieldAttribute && args.get(1).foldable()) { + QueryBuilder queryBuilder = TRANSLATOR_HANDLER.asQuery(match).asBuilder(); + match.replaceQueryBuilder(queryBuilder); + } + return match; } @Override @@ -46,7 +58,8 @@ private static String errorMessageStringForMatch( List signature, AbstractFunctionTestCase.PositionalErrorMessageSupplier positionalErrorMessageSupplier ) { - for (int i = 0; i < signature.size(); i++) { + boolean invalid = false; + for (int i = 0; i < signature.size() && invalid == false; i++) { // Need to check for nulls and bad parameters in order if (signature.get(i) == DataType.NULL) { return TypeResolutions.ParamOrdinal.fromIndex(i).name().toLowerCase(Locale.ROOT) @@ -55,6 +68,10 @@ private static String errorMessageStringForMatch( + "] cannot be null, received []"; } if (validPerPosition.get(i).contains(signature.get(i)) == false) { + // Map expressions have different error messages + if (i == 2) { + return format(null, "third argument of [{}] must be a map expression, received []", sourceForSignature(signature)); + } break; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorErrorTests.java new file mode 100644 index 0000000000000..f9c986527f405 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorErrorTests.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Locale; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MatchOperatorErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + + @Override + protected List cases() { + return paramsToSuppliers(MatchOperatorTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MatchOperator(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + errorMessageStringForMatch(validPerPosition, signature, (l, p) -> p == 0 ? FIELD_TYPE_ERROR_STRING : QUERY_TYPE_ERROR_STRING) + ); + } + + private static String errorMessageStringForMatch( + List> validPerPosition, + List signature, + AbstractFunctionTestCase.PositionalErrorMessageSupplier positionalErrorMessageSupplier + ) { + boolean invalid = false; + for (int i = 0; i < signature.size() && invalid == false; i++) { + // Need to check for nulls and bad parameters in order + if (signature.get(i) == DataType.NULL) { + return TypeResolutions.ParamOrdinal.fromIndex(i).name().toLowerCase(Locale.ROOT) + + " argument of [" + + sourceForSignature(signature) + + "] cannot be null, received []"; + } + if (validPerPosition.get(i).contains(signature.get(i)) == false) { + break; + } + } + + try { + return typeErrorMessage(true, validPerPosition, signature, positionalErrorMessageSupplier); + } catch (IllegalStateException e) { + // This means all the positional args were okay, so the expected error is for nulls or from the combination + return EsqlBinaryComparison.formatIncompatibleTypesMessage(signature.get(0), signature.get(1), sourceForSignature(signature)); + } + } + + private static final String FIELD_TYPE_ERROR_STRING = + "keyword, text, boolean, date, date_nanos, double, integer, ip, long, unsigned_long, version"; + + private static final String QUERY_TYPE_ERROR_STRING = + "keyword, boolean, date, date_nanos, double, integer, ip, long, unsigned_long, version"; +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java index 78ea3f5451880..0d513fa2bfaf3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java @@ -22,19 +22,24 @@ * This class is only used to generates docs for the match operator - all testing is the same as {@link MatchTests} */ @FunctionName("match_operator") -public class MatchOperatorTests extends MatchTests { +public class MatchOperatorTests extends AbstractMatchFullTextFunctionTests { public MatchOperatorTests(@Name("TestCase") Supplier testCaseSupplier) { - super(testCaseSupplier); + this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - return AbstractMatchFullTextFunctionTests.parameters(); + return parameterSuppliersFromTypedData(testCaseSuppliers()); } @Override protected Expression build(Source source, List args) { return new MatchOperator(source, args.get(0), args.get(1)); } + + @Override + public void testSerializationOfSimple() { + // MatchOperator is not a separate function that needs to be serialized, it's serialized via Match + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java index 4280ab487f213..5504715075265 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java @@ -10,14 +10,26 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.MapExpression; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSUPPORTED; +import static org.elasticsearch.xpack.esql.planner.TranslatorHandler.TRANSLATOR_HANDLER; +import static org.hamcrest.Matchers.equalTo; + @FunctionName("match") public class MatchTests extends AbstractMatchFullTextFunctionTests { @@ -27,11 +39,48 @@ public MatchTests(@Name("TestCase") Supplier testCase @ParametersFactory public static Iterable parameters() { - return AbstractMatchFullTextFunctionTests.parameters(); + return parameterSuppliersFromTypedData(addFunctionNamedParams(testCaseSuppliers())); + } + + /** + * Adds function named parameters to all the test case suppliers provided + */ + private static List addFunctionNamedParams(List suppliers) { + List result = new ArrayList<>(); + for (TestCaseSupplier supplier : suppliers) { + List dataTypes = new ArrayList<>(supplier.types()); + dataTypes.add(UNSUPPORTED); + result.add(new TestCaseSupplier(supplier.name() + ", options", dataTypes, () -> { + List values = new ArrayList<>(supplier.get().getData()); + values.add( + new TestCaseSupplier.TypedData( + new MapExpression( + Source.EMPTY, + List.of( + new Literal(Source.EMPTY, "fuzziness", KEYWORD), + new Literal(Source.EMPTY, randomAlphaOfLength(10), KEYWORD) + ) + ), + UNSUPPORTED, + "options" + ).forceLiteral() + ); + + return new TestCaseSupplier.TestCase(values, equalTo("MatchEvaluator"), BOOLEAN, equalTo(true)); + })); + } + return result; } @Override protected Expression build(Source source, List args) { - return new Match(source, args.get(0), args.get(1)); + Match match = new Match(source, args.get(0), args.get(1), args.size() > 2 ? args.get(2) : null); + // We need to add the QueryBuilder to the match expression, as it is used to implement equals() and hashCode() and + // thus test the serialization methods. But we can only do this if the parameters make sense . + if (args.get(0) instanceof FieldAttribute && args.get(1).foldable()) { + QueryBuilder queryBuilder = TRANSLATOR_HANDLER.asQuery(match).asBuilder(); + match.replaceQueryBuilder(queryBuilder); + } + return match; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/TermTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/TermTests.java index d1df0ed09b28e..eca246e20452c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/TermTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/TermTests.java @@ -122,6 +122,6 @@ private static String matchTypeErrorSupplier(boolean includeOrdinal, List args) { - return new Match(source, args.get(0), args.get(1)); + return new Match(source, args.get(0), args.get(1), args.size() > 2 ? args.get(2) : null); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogWithBaseInMapSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogWithBaseInMapSerializationTests.java deleted file mode 100644 index a2a97e11bfc0f..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogWithBaseInMapSerializationTests.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.function.scalar.math; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.map.LogWithBaseInMap; - -import java.io.IOException; - -public class LogWithBaseInMapSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected LogWithBaseInMap createTestInstance() { - Source source = randomSource(); - Expression number = randomChild(); - Expression base = randomBoolean() ? null : randomChild(); - return new LogWithBaseInMap(source, number, base); - } - - @Override - protected LogWithBaseInMap mutateInstance(LogWithBaseInMap instance) throws IOException { - Source source = instance.source(); - Expression number = instance.number(); - Expression base = instance.base(); - if (randomBoolean()) { - number = randomValueOtherThan(number, AbstractExpressionSerializationTests::randomChild); - } else { - base = randomValueOtherThan(base, () -> randomBoolean() ? null : randomChild()); - } - return new LogWithBaseInMap(source, number, base); - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/fulltext/MatchQuerySerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/fulltext/MatchQuerySerializationTests.java deleted file mode 100644 index 7781c804a6dfc..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/fulltext/MatchQuerySerializationTests.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.predicate.fulltext; - -import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; - -import java.io.IOException; - -public class MatchQuerySerializationTests extends AbstractFulltextSerializationTests { - - @Override - protected final MatchQueryPredicate createTestInstance() { - return new MatchQueryPredicate(randomSource(), randomChild(), randomAlphaOfLength(randomIntBetween(1, 16)), randomOptionOrNull()); - } - - @Override - protected MatchQueryPredicate mutateInstance(MatchQueryPredicate instance) throws IOException { - var field = instance.field(); - var query = instance.query(); - var options = instance.options(); - switch (between(0, 2)) { - case 0 -> field = randomValueOtherThan(field, AbstractExpressionSerializationTests::randomChild); - case 1 -> query = randomValueOtherThan(query, () -> randomAlphaOfLength(randomIntBetween(1, 16))); - case 2 -> options = randomValueOtherThan(options, this::randomOptionOrNull); - } - return new MatchQueryPredicate(instance.source(), field, query, options); - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 1749876a0d3e3..dd4a20ea2e974 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -12,6 +12,7 @@ import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.mapper.MapperService; @@ -19,6 +20,7 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryStringQueryBuilder; @@ -1577,6 +1579,36 @@ public void testFullTextFunctionsDisjunctionWithFiltersPushdown() { assertThat(actualLuceneQuery.toString(), is(expectedLuceneQuery.toString())); } + public void testMatchOptionsPushDown() { + String query = """ + from test + | where match(first_name, "Anna", {"fuzziness": "AUTO", "prefix_length": 3, "max_expansions": 10, + "fuzzy_transpositions": false, "auto_generate_synonyms_phrase_query": true, "analyzer": "my_analyzer", + "boost": 2.1, "minimum_should_match": 2, "operator": "AND"}) + """; + var plan = plannerOptimizer.plan(query); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var actualLuceneQuery = as(fieldExtract.child(), EsQueryExec.class).query(); + + Source filterSource = new Source(4, 8, "emp_no > 10000"); + var expectedLuceneQuery = new MatchQueryBuilder("first_name", "Anna").fuzziness(Fuzziness.AUTO) + .prefixLength(3) + .maxExpansions(10) + .fuzzyTranspositions(false) + .autoGenerateSynonymsPhraseQuery(true) + .analyzer("my_analyzer") + .boost(2.1f) + .minimumShouldMatch("2") + .operator(Operator.AND) + .prefixLength(3) + .lenient(true); + assertThat(actualLuceneQuery.toString(), is(expectedLuceneQuery.toString())); + } + /** * Expecting * LimitExec[1000[INTEGER]] diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 06a08c2b65936..7ceaaa740b802 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -59,13 +59,13 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; -import org.elasticsearch.xpack.esql.expression.function.scalar.map.LogWithBaseInMap; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAvg; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvCount; @@ -6902,33 +6902,21 @@ public void testWhereNull() { assertThat(local.supplier(), equalTo(LocalSupplier.EMPTY)); } - public void testMapExpressionAsFunctionArgument() { - assumeTrue("MapExpression require snapshot build", EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled()); + public void testFunctionNamedParamsAsFunctionArgument() { var query = """ from test - | EVAL l = log_with_base_in_map(languages, {"base":2.0}) - | KEEP l + | WHERE MATCH(first_name, "Anna Smith", {"minimum_should_match": 2.0}) """; var plan = optimizedPlan(query); - Project proj = as(plan, EsqlProject.class); - List fields = proj.projections(); - assertEquals(1, fields.size()); - ReferenceAttribute ra = as(fields.get(0), ReferenceAttribute.class); - assertEquals("l", ra.name()); - assertEquals(DataType.DOUBLE, ra.dataType()); - Eval eval = as(proj.child(), Eval.class); - assertEquals(1, eval.fields().size()); - Alias a = as(eval.fields().get(0), Alias.class); - LogWithBaseInMap l = as(a.child(), LogWithBaseInMap.class); - MapExpression me = as(l.base(), MapExpression.class); + Limit limit = as(plan, Limit.class); + Filter filter = as(limit.child(), Filter.class); + Match match = as(filter.condition(), Match.class); + MapExpression me = as(match.options(), MapExpression.class); assertEquals(1, me.entryExpressions().size()); EntryExpression ee = as(me.entryExpressions().get(0), EntryExpression.class); BytesRef key = as(ee.key().fold(FoldContext.small()), BytesRef.class); - assertEquals("base", key.utf8ToString()); + assertEquals("minimum_should_match", key.utf8ToString()); assertEquals(new Literal(EMPTY, 2.0, DataType.DOUBLE), ee.value()); assertEquals(DataType.DOUBLE, ee.dataType()); - Limit limit = as(eval.child(), Limit.class); - EsRelation esRelation = as(limit.child(), EsRelation.class); - assertEquals(esRelation.indexPattern(), "test"); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index cb2df8dec9a6f..a536e310eb485 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -28,7 +28,7 @@ import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.FilteredExpression; -import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; +import org.elasticsearch.xpack.esql.expression.function.fulltext.MatchOperator; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; @@ -2303,7 +2303,7 @@ public void testMetricWithGroupKeyAsAgg() { public void testMatchOperatorConstantQueryString() { var plan = statement("FROM test | WHERE field:\"value\""); var filter = as(plan, Filter.class); - var match = (Match) filter.condition(); + var match = (MatchOperator) filter.condition(); var matchField = (UnresolvedAttribute) match.field(); assertThat(matchField.name(), equalTo("field")); assertThat(match.query().fold(FoldContext.small()), equalTo("value")); @@ -2347,7 +2347,7 @@ public void testMatchFunctionFieldCasting() { public void testMatchOperatorFieldCasting() { var plan = statement("FROM test | WHERE field::int : \"value\""); var filter = as(plan, Filter.class); - var match = (Match) filter.condition(); + var match = (MatchOperator) filter.condition(); var toInteger = (ToInteger) match.field(); var matchField = (UnresolvedAttribute) toInteger.field(); assertThat(matchField.name(), equalTo("field")); @@ -2362,10 +2362,6 @@ public void testFailingMetadataWithSquareBrackets() { } public void testNamedFunctionArgumentInMap() { - assumeTrue( - "named function arguments require snapshot build", - EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() - ); // functions can be scalar, grouping and aggregation // functions can be in eval/where/stats/sort/dissect/grok commands, commands in snapshot are not covered // positive @@ -2603,10 +2599,6 @@ by fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"opt } public void testNamedFunctionArgumentWithCaseSensitiveKeys() { - assumeTrue( - "named function arguments require snapshot build", - EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() - ); LinkedHashMap expectedMap1 = new LinkedHashMap<>(3); expectedMap1.put("option", "string"); expectedMap1.put("Option", 1); @@ -2648,10 +2640,6 @@ public void testNamedFunctionArgumentWithCaseSensitiveKeys() { } public void testMultipleNamedFunctionArgumentsNotAllowed() { - assumeTrue( - "named function arguments require snapshot build", - EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() - ); Map commands = Map.ofEntries( Map.entry("eval x = {}", "41"), Map.entry("where {}", "38"), @@ -2676,10 +2664,6 @@ public void testMultipleNamedFunctionArgumentsNotAllowed() { } public void testNamedFunctionArgumentNotInMap() { - assumeTrue( - "named function arguments require snapshot build", - EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() - ); Map commands = Map.ofEntries( Map.entry("eval x = {}", "38"), Map.entry("where {}", "35"), @@ -2704,10 +2688,6 @@ public void testNamedFunctionArgumentNotInMap() { } public void testNamedFunctionArgumentNotConstant() { - assumeTrue( - "named function arguments require snapshot build", - EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() - ); Map commands = Map.ofEntries( Map.entry("eval x = {}", new String[] { "31", "35" }), Map.entry("where {}", new String[] { "28", "32" }), @@ -2740,10 +2720,6 @@ public void testNamedFunctionArgumentNotConstant() { } public void testNamedFunctionArgumentEmptyMap() { - assumeTrue( - "named function arguments require snapshot build", - EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() - ); Map commands = Map.ofEntries( Map.entry("eval x = {}", "30"), Map.entry("where {}", "27"), @@ -2768,10 +2744,6 @@ public void testNamedFunctionArgumentEmptyMap() { } public void testNamedFunctionArgumentMapWithNULL() { - assumeTrue( - "named function arguments require snapshot build", - EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() - ); Map commands = Map.ofEntries( Map.entry("eval x = {}", "29"), Map.entry("where {}", "26"), @@ -2798,10 +2770,6 @@ public void testNamedFunctionArgumentMapWithNULL() { } public void testNamedFunctionArgumentMapWithEmptyKey() { - assumeTrue( - "named function arguments require snapshot build", - EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() - ); Map commands = Map.ofEntries( Map.entry("eval x = {}", "29"), Map.entry("where {}", "26"), @@ -2837,10 +2805,6 @@ public void testNamedFunctionArgumentMapWithEmptyKey() { } public void testNamedFunctionArgumentMapWithDuplicatedKey() { - assumeTrue( - "named function arguments require snapshot build", - EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() - ); Map commands = Map.ofEntries( Map.entry("eval x = {}", "29"), Map.entry("where {}", "26"), @@ -2867,10 +2831,6 @@ public void testNamedFunctionArgumentMapWithDuplicatedKey() { } public void testNamedFunctionArgumentInInvalidPositions() { - assumeTrue( - "named function arguments require snapshot build", - EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() - ); // negative, named arguments are not supported outside of a functionExpression where booleanExpression or indexPattern is supported String map = "{\"option1\":\"string\", \"option2\":1}"; @@ -2900,10 +2860,6 @@ public void testNamedFunctionArgumentInInvalidPositions() { } public void testNamedFunctionArgumentWithUnsupportedNamedParameterTypes() { - assumeTrue( - "named function arguments require snapshot build", - EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() - ); Map commands = Map.ofEntries( Map.entry("eval x = {}", "29"), Map.entry("where {}", "26"), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/MatchQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/MatchQueryTests.java index 49d1a9ad19d09..bf3f3baa0b634 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/MatchQueryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/MatchQueryTests.java @@ -14,10 +14,10 @@ import org.elasticsearch.xpack.esql.core.tree.SourceTests; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.StringUtils; -import org.elasticsearch.xpack.esql.expression.predicate.fulltext.MatchQueryPredicate; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.function.Function; import static java.util.Collections.emptyMap; @@ -51,33 +51,29 @@ private static MatchQuery mutate(MatchQuery query) { } public void testQueryBuilding() { - MatchQueryBuilder qb = getBuilder("lenient=true"); - assertThat(qb.lenient(), equalTo(true)); - qb = getBuilder("lenient=true;operator=AND"); + MatchQueryBuilder qb = getBuilder(Map.of("lenient", true, "operator", "AND")); assertThat(qb.lenient(), equalTo(true)); assertThat(qb.operator(), equalTo(Operator.AND)); - Exception e = expectThrows(IllegalArgumentException.class, () -> getBuilder("pizza=yummy")); + Exception e = expectThrows(IllegalArgumentException.class, () -> getBuilder(Map.of("pizza", "yummy"))); assertThat(e.getMessage(), equalTo("illegal match option [pizza]")); - e = expectThrows(IllegalArgumentException.class, () -> getBuilder("operator=aoeu")); + e = expectThrows(IllegalArgumentException.class, () -> getBuilder(Map.of("operator", "aoeu"))); assertThat(e.getMessage(), equalTo("No enum constant org.elasticsearch.index.query.Operator.AOEU")); } - private static MatchQueryBuilder getBuilder(String options) { + private static MatchQueryBuilder getBuilder(Map options) { final Source source = new Source(1, 1, StringUtils.EMPTY); FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", KEYWORD, emptyMap(), true)); - final MatchQueryPredicate mmqp = new MatchQueryPredicate(source, fa, "eggplant", options); - final MatchQuery mmq = new MatchQuery(source, "eggplant", "foo", mmqp.optionMap()); + final MatchQuery mmq = new MatchQuery(source, "eggplant", "foo", options); return (MatchQueryBuilder) mmq.asBuilder(); } public void testToString() { final Source source = new Source(1, 1, StringUtils.EMPTY); FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", KEYWORD, emptyMap(), true)); - final MatchQueryPredicate mmqp = new MatchQueryPredicate(source, fa, "eggplant", ""); - final MatchQuery mmq = new MatchQuery(source, "eggplant", "foo", mmqp.optionMap()); + final MatchQuery mmq = new MatchQuery(source, "eggplant", "foo"); assertEquals("MatchQuery@1:2[eggplant:foo]", mmq.toString()); } } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index da8290a1e185d..4e2215e0f9300 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -30,7 +30,8 @@ setup: - method: POST path: /_query parameters: [] - capabilities: [ snapshot_test_for_telemetry, fn_byte_length ] + # A snapshot function was removed in match_function_options, it can't work on mixed cluster tests otherwise. + capabilities: [ snapshot_test_for_telemetry, fn_byte_length, match_function_options] reason: "Test that should only be executed on snapshot versions" - do: {xpack.usage: {}} @@ -92,7 +93,7 @@ setup: - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} # Testing for the entire function set isn't feasbile, so we just check that we return the correct count as an approximation. - - length: {esql.functions: 134} # check the "sister" test below for a likely update to the same esql.functions length check + - length: {esql.functions: 133} # check the "sister" test below for a likely update to the same esql.functions length check --- "Basic ESQL usage output (telemetry) non-snapshot version": From 1da828542b0220ab958fb87ab0bfd30fc6f58d81 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 28 Jan 2025 09:04:25 +0100 Subject: [PATCH 098/383] Record whether data streams for logs-*-* exist for logsdb enablement in 9.x (#120708) Add LogsPatternUsageService that records whether there are data streams matching with logs-*-* pattern. This is recorded via the new logsdb.prior_logs_usage cluster setting. Upon upgrade to 9.x this can be used to determine whether logsdb should be enabled by default if cluster.logsdb.enabled hasn't been set. The recommended upgrade path to 9.x is always to go to 8.latest. This component will run in clusters with version greater than 8.18.0 but not on 9.0 and newer. --- .../ClusterUpdateSettingsResponse.java | 2 +- .../xpack/logsdb/LogsDBPlugin.java | 19 +- .../xpack/logsdb/LogsPatternUsageService.java | 166 ++++++++++++++ .../LogsdbIndexModeSettingsProvider.java | 2 +- ...gsPatternUsageServiceIntegrationTests.java | 139 ++++++++++++ .../logsdb/LogsPatternUsageServiceTests.java | 213 ++++++++++++++++++ 6 files changed, 538 insertions(+), 3 deletions(-) create mode 100644 x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageService.java create mode 100644 x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceIntegrationTests.java create mode 100644 x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceTests.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponse.java index de754260dddbf..dd0353afbbfbe 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponse.java @@ -36,7 +36,7 @@ public class ClusterUpdateSettingsResponse extends AcknowledgedResponse { persistentSettings = Settings.readSettingsFromStream(in); } - ClusterUpdateSettingsResponse(boolean acknowledged, Settings transientSettings, Settings persistentSettings) { + public ClusterUpdateSettingsResponse(boolean acknowledged, Settings transientSettings, Settings persistentSettings) { super(acknowledged); this.persistentSettings = persistentSettings; this.transientSettings = transientSettings; diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index c2515039ed8bf..4720ec87cb85c 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -11,8 +11,10 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; @@ -40,6 +42,8 @@ import java.util.function.Predicate; import java.util.function.Supplier; +import static org.elasticsearch.xpack.logsdb.LogsPatternUsageService.LOGSDB_PRIOR_LOGS_USAGE; +import static org.elasticsearch.xpack.logsdb.LogsPatternUsageService.USAGE_CHECK_MAX_PERIOD; import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseService.FALLBACK_SETTING; public class LogsDBPlugin extends Plugin implements ActionPlugin { @@ -76,6 +80,19 @@ public Collection createComponents(PluginServices services) { CLUSTER_LOGSDB_ENABLED, logsdbIndexModeSettingsProvider::updateClusterIndexModeLogsdbEnabled ); + + var clusterService = services.clusterService(); + Supplier metadataSupplier = () -> clusterService.state().metadata(); + var historicLogsUsageService = new LogsPatternUsageService(services.client(), settings, services.threadPool(), metadataSupplier); + clusterService.addLocalNodeMasterListener(historicLogsUsageService); + clusterService.addLifecycleListener(new LifecycleListener() { + + @Override + public void beforeStop() { + historicLogsUsageService.offMaster(); + } + }); + // Nothing to share here: return super.createComponents(services); } @@ -95,7 +112,7 @@ public Collection getAdditionalIndexSettingProviders(Index @Override public List> getSettings() { - return List.of(FALLBACK_SETTING, CLUSTER_LOGSDB_ENABLED); + return List.of(FALLBACK_SETTING, CLUSTER_LOGSDB_ENABLED, USAGE_CHECK_MAX_PERIOD, LOGSDB_PRIOR_LOGS_USAGE); } @Override diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageService.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageService.java new file mode 100644 index 0000000000000..929db16a618a0 --- /dev/null +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageService.java @@ -0,0 +1,166 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.LocalNodeMasterListener; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.logsdb.LogsdbIndexModeSettingsProvider.LOGS_PATTERN; + +/** + * A component that checks in the background whether there are data streams that match log-*-* pattern and if so records this + * as persistent setting in cluster state. If logs-*-* data stream usage has been found then this component will no longer + * run in the background. + *

+ * After {@link #onMaster()} is invoked, the first check is scheduled to run after 1 minute. If no logs-*-* data streams are + * found, then the next check runs after 2 minutes. The schedule time will double if no data streams with logs-*-* pattern + * are found up until the maximum configured period in the {@link #USAGE_CHECK_MAX_PERIOD} setting (defaults to 24 hours). + *

+ * If during a check one or more logs-*-* data streams are found, then the {@link #LOGSDB_PRIOR_LOGS_USAGE} setting gets set + * as persistent cluster setting and this component will not schedule new checks. The mentioned setting is visible in persistent settings + * of cluster state and a signal that upon upgrading to 9.x logsdb will not be enabled by default for data streams matching the + * logs-*-* pattern. It isn't recommended to manually set the {@link #LOGSDB_PRIOR_LOGS_USAGE} setting. + */ +final class LogsPatternUsageService implements LocalNodeMasterListener { + + private static final Logger LOGGER = LogManager.getLogger(LogsPatternUsageService.class); + private static final TimeValue USAGE_CHECK_MINIMUM = TimeValue.timeValueSeconds(30); + static final Setting USAGE_CHECK_MAX_PERIOD = Setting.timeSetting( + "logsdb.usage_check.max_period", + new TimeValue(24, TimeUnit.HOURS), + Setting.Property.NodeScope + ); + static final Setting LOGSDB_PRIOR_LOGS_USAGE = Setting.boolSetting( + "logsdb.prior_logs_usage", + false, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + + private final Client client; + private final Settings nodeSettings; + private final ThreadPool threadPool; + private final Supplier metadataSupplier; + + // Initializing to 30s, so first time will run with a delay of 60s: + volatile TimeValue nextWaitTime = USAGE_CHECK_MINIMUM; + volatile boolean isMaster; + volatile boolean hasPriorLogsUsage; + volatile Scheduler.Cancellable cancellable; + + LogsPatternUsageService(Client client, Settings nodeSettings, ThreadPool threadPool, Supplier metadataSupplier) { + this.client = client; + this.nodeSettings = nodeSettings; + this.threadPool = threadPool; + this.metadataSupplier = metadataSupplier; + } + + @Override + public void onMaster() { + if (cancellable == null || cancellable.isCancelled()) { + isMaster = true; + nextWaitTime = USAGE_CHECK_MINIMUM; + scheduleNext(); + } + } + + @Override + public void offMaster() { + isMaster = false; + if (cancellable != null && cancellable.isCancelled() == false) { + cancellable.cancel(); + cancellable = null; + } + } + + void scheduleNext() { + TimeValue maxWaitTime = USAGE_CHECK_MAX_PERIOD.get(nodeSettings); + nextWaitTime = TimeValue.timeValueMillis(Math.min(nextWaitTime.millis() * 2, maxWaitTime.millis())); + scheduleNext(nextWaitTime); + } + + void scheduleNext(TimeValue waitTime) { + if (isMaster && hasPriorLogsUsage == false) { + try { + cancellable = threadPool.schedule(this::check, waitTime, threadPool.generic()); + } catch (EsRejectedExecutionException e) { + if (e.isExecutorShutdown()) { + LOGGER.debug("Failed to check; Shutting down", e); + } else { + throw e; + } + } + } else { + LOGGER.debug("Skipping check, because [{}]/[{}]", isMaster, hasPriorLogsUsage); + } + } + + void check() { + LOGGER.debug("Starting logs-*-* usage check"); + if (isMaster) { + var metadata = metadataSupplier.get(); + if (LOGSDB_PRIOR_LOGS_USAGE.exists(metadata.persistentSettings())) { + LOGGER.debug("Using persistent logs-*-* usage check"); + hasPriorLogsUsage = true; + return; + } + + if (hasLogsUsage(metadata)) { + updateSetting(); + } else { + LOGGER.debug("No usage found; Skipping check"); + scheduleNext(); + } + } else { + LOGGER.debug("No longer master; Skipping check"); + } + } + + static boolean hasLogsUsage(Metadata metadata) { + for (var dataStream : metadata.dataStreams().values()) { + if (Regex.simpleMatch(LOGS_PATTERN, dataStream.getName())) { + return true; + } + } + return false; + } + + void updateSetting() { + var settingsToUpdate = Settings.builder().put(LOGSDB_PRIOR_LOGS_USAGE.getKey(), true).build(); + var request = new ClusterUpdateSettingsRequest(TimeValue.ONE_MINUTE, TimeValue.ONE_MINUTE); + request.persistentSettings(settingsToUpdate); + client.execute(ClusterUpdateSettingsAction.INSTANCE, request, ActionListener.wrap(resp -> { + if (resp.isAcknowledged() && LOGSDB_PRIOR_LOGS_USAGE.exists(resp.getPersistentSettings())) { + hasPriorLogsUsage = true; + cancellable = null; + } else { + LOGGER.debug(() -> "unexpected response [" + LOGSDB_PRIOR_LOGS_USAGE.getKey() + "]"); + scheduleNext(TimeValue.ONE_MINUTE); + } + }, e -> { + LOGGER.debug(() -> "Failed to update [" + LOGSDB_PRIOR_LOGS_USAGE.getKey() + "]", e); + scheduleNext(TimeValue.ONE_MINUTE); + })); + } +} diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java index 29b3a80ce2896..ac19c96f31b5c 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java @@ -46,7 +46,7 @@ final class LogsdbIndexModeSettingsProvider implements IndexSettingProvider { private static final Logger LOGGER = LogManager.getLogger(LogsdbIndexModeSettingsProvider.class); - private static final String LOGS_PATTERN = "logs-*-*"; + static final String LOGS_PATTERN = "logs-*-*"; private static final Set MAPPING_INCLUDES = Set.of("_doc._source.*", "_doc.properties.host**", "_doc.subobjects"); private final SyntheticSourceLicenseService syntheticSourceLicenseService; diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceIntegrationTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceIntegrationTests.java new file mode 100644 index 0000000000000..fcd1d311df802 --- /dev/null +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceIntegrationTests.java @@ -0,0 +1,139 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsAction; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; +import org.elasticsearch.action.datastreams.DeleteDataStreamAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.datastreams.DataStreamsPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.threadpool.ThreadPoolStats; + +import java.util.Collection; +import java.util.List; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.nullValue; + +public class LogsPatternUsageServiceIntegrationTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return List.of(LogsDBPlugin.class, DataStreamsPlugin.class); + } + + @Override + protected Settings nodeSettings() { + return Settings.builder().put("logsdb.usage_check.max_period", "1s").build(); + } + + @Override + protected boolean resetNodeAfterTest() { + return true; + } + + public void testLogsPatternUsage() throws Exception { + var template = ComposableIndexTemplate.builder() + .indexPatterns(List.of("logs-*-*")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); + assertAcked( + client().execute( + TransportPutComposableIndexTemplateAction.TYPE, + new TransportPutComposableIndexTemplateAction.Request("1").indexTemplate(template) + ).actionGet() + ); + + IndexRequest indexRequest = new IndexRequest("my-index").create(true).source("field", "value"); + var indexResponse = client().index(indexRequest).actionGet(); + assertThat(indexResponse.getResult(), equalTo(DocWriteResponse.Result.CREATED)); + + { + var response = client().execute(ClusterGetSettingsAction.INSTANCE, new ClusterGetSettingsAction.Request(TimeValue.ONE_MINUTE)) + .actionGet(); + assertThat(response.persistentSettings().get("logsdb.prior_logs_usage"), nullValue()); + } + + indexRequest = new IndexRequest("logs-myapp-prod").create(true).source("@timestamp", "2000-01-01T00:00"); + indexResponse = client().index(indexRequest).actionGet(); + assertThat(indexResponse.getResult(), equalTo(DocWriteResponse.Result.CREATED)); + + assertBusy(() -> { + var response = client().execute(ClusterGetSettingsAction.INSTANCE, new ClusterGetSettingsAction.Request(TimeValue.ONE_MINUTE)) + .actionGet(); + assertThat(response.persistentSettings().get("logsdb.prior_logs_usage"), equalTo("true")); + }); + } + + public void testLogsPatternUsageNoLogsStarDashStarUsage() throws Exception { + var template = ComposableIndexTemplate.builder() + .indexPatterns(List.of("log-*-*")) + .template(new Template(Settings.builder().put("index.number_of_replicas", 0).build(), null, null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); + assertAcked( + client().execute( + TransportPutComposableIndexTemplateAction.TYPE, + new TransportPutComposableIndexTemplateAction.Request("1").indexTemplate(template) + ).actionGet() + ); + + var indexRequest = new IndexRequest("log-myapp-prod").create(true).source("@timestamp", "2000-01-01T00:00"); + var indexResponse = client().index(indexRequest).actionGet(); + assertThat(indexResponse.getResult(), equalTo(DocWriteResponse.Result.CREATED)); + + ensureGreen("log-myapp-prod"); + // Check that LogsPatternUsageService checked three times by checking generic threadpool stats. + // (the LogsPatternUsageService's check is scheduled via the generic threadpool) + var threadPool = getInstanceFromNode(ThreadPool.class); + var beforeStat = getGenericThreadpoolStat(threadPool); + assertBusy(() -> { + var stat = getGenericThreadpoolStat(threadPool); + assertThat(stat.completed(), greaterThanOrEqualTo(beforeStat.completed() + 3)); + }); + var response = client().execute(ClusterGetSettingsAction.INSTANCE, new ClusterGetSettingsAction.Request(TimeValue.ONE_MINUTE)) + .actionGet(); + assertThat(response.persistentSettings().get("logsdb.prior_logs_usage"), nullValue()); + } + + private static ThreadPoolStats.Stats getGenericThreadpoolStat(ThreadPool threadPool) { + var result = threadPool.stats().stats().stream().filter(stats -> stats.name().equals(ThreadPool.Names.GENERIC)).toList(); + assertThat(result.size(), equalTo(1)); + return result.get(0); + } + + @Override + public void tearDown() throws Exception { + // Need to clean up the data stream and logsdb.prior_logs_usage setting because ESSingleNodeTestCase tests aren't allowed to leave + // persistent cluster settings around. + + var deleteDataStreamsRequest = new DeleteDataStreamAction.Request(TEST_REQUEST_TIMEOUT, "*"); + deleteDataStreamsRequest.indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN); + assertAcked(client().execute(DeleteDataStreamAction.INSTANCE, deleteDataStreamsRequest)); + + var settings = Settings.builder().put("logsdb.prior_logs_usage", (String) null).build(); + client().admin() + .cluster() + .updateSettings(new ClusterUpdateSettingsRequest(TimeValue.ONE_MINUTE, TimeValue.ONE_MINUTE).persistentSettings(settings)) + .actionGet(); + + super.tearDown(); + } +} diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceTests.java new file mode 100644 index 0000000000000..2cd2f9216aba3 --- /dev/null +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceTests.java @@ -0,0 +1,213 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.DataStreamTestHelper; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.List; +import java.util.function.Supplier; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; +import static org.mockito.Mockito.when; + +public class LogsPatternUsageServiceTests extends ESTestCase { + + public void testOnMaster() throws Exception { + var nodeSettings = Settings.builder().put("logsdb.usage_check.max_period", "1s").build(); + var client = mock(Client.class); + doAnswer(invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocationOnMock + .getArguments()[2]; + var persistentSettings = Settings.builder().put("logsdb.prior_logs_usage", true).build(); + listener.onResponse(new ClusterUpdateSettingsResponse(true, Settings.EMPTY, persistentSettings)); + return null; + }).when(client).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); + + try (var threadPool = new TestThreadPool(getTestName())) { + var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1-prod", 1)), List.of()); + Supplier metadataSupplier = clusterState::metadata; + + var service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); + // pre-check: + assertFalse(service.isMaster); + assertFalse(service.hasPriorLogsUsage); + assertNull(service.cancellable); + // Trigger service: + service.onMaster(); + assertBusy(() -> { + assertTrue(service.isMaster); + assertTrue(service.hasPriorLogsUsage); + assertNull(service.cancellable); + }); + } + } + + public void testCheckHasUsage() { + var nodeSettings = Settings.EMPTY; + var client = mock(Client.class); + doAnswer(invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocationOnMock + .getArguments()[2]; + var persistentSettings = Settings.builder().put("logsdb.prior_logs_usage", true).build(); + listener.onResponse(new ClusterUpdateSettingsResponse(true, Settings.EMPTY, persistentSettings)); + return null; + }).when(client).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); + + var threadPool = mock(ThreadPool.class); + var scheduledCancellable = mock(Scheduler.ScheduledCancellable.class); + when(threadPool.schedule(any(), any(), any())).thenReturn(scheduledCancellable); + var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1-prod", 1)), List.of()); + Supplier metadataSupplier = clusterState::metadata; + + LogsPatternUsageService service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); + service.onMaster(); + assertFalse(service.hasPriorLogsUsage); + assertNotNull(service.cancellable); + assertEquals(service.nextWaitTime, TimeValue.timeValueMinutes(1)); + service.check(); + assertTrue(service.hasPriorLogsUsage); + assertNull(service.cancellable); + assertEquals(service.nextWaitTime, TimeValue.timeValueMinutes(1)); + + verify(threadPool, times(1)).schedule(any(), any(), any()); + verify(client, times(1)).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); + } + + public void testCheckHasUsageNoMatch() { + var nodeSettings = Settings.EMPTY; + var client = mock(Client.class); + + var threadPool = mock(ThreadPool.class); + var scheduledCancellable = mock(Scheduler.ScheduledCancellable.class); + when(threadPool.schedule(any(), any(), any())).thenReturn(scheduledCancellable); + var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("log-app1-prod", 1)), List.of()); + Supplier metadataSupplier = clusterState::metadata; + + LogsPatternUsageService service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); + service.onMaster(); + assertFalse(service.hasPriorLogsUsage); + assertNotNull(service.cancellable); + assertEquals(service.nextWaitTime, TimeValue.timeValueMinutes(1)); + service.check(); + assertFalse(service.hasPriorLogsUsage); + assertNotNull(service.cancellable); + assertEquals(service.nextWaitTime, TimeValue.timeValueMinutes(2)); + + verify(threadPool, times(2)).schedule(any(), any(), any()); + verifyNoInteractions(client); + } + + public void testCheckPriorLogsUsageAlreadySet() { + var nodeSettings = Settings.EMPTY; + var client = mock(Client.class); + + var threadPool = mock(ThreadPool.class); + var scheduledCancellable = mock(Scheduler.ScheduledCancellable.class); + when(threadPool.schedule(any(), any(), any())).thenReturn(scheduledCancellable); + var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("log-app1-prod", 1)), List.of()); + clusterState = ClusterState.builder(clusterState) + .metadata( + Metadata.builder(clusterState.getMetadata()) + .persistentSettings(Settings.builder().put("logsdb.prior_logs_usage", true).build()) + .build() + ) + .build(); + Supplier metadataSupplier = clusterState::metadata; + + LogsPatternUsageService service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); + service.isMaster = true; + assertFalse(service.hasPriorLogsUsage); + assertNull(service.cancellable); + service.check(); + assertTrue(service.hasPriorLogsUsage); + assertNull(service.cancellable); + + verifyNoInteractions(client, threadPool); + } + + public void testCheckHasUsageUnexpectedResponse() { + var nodeSettings = Settings.EMPTY; + var client = mock(Client.class); + doAnswer(invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocationOnMock + .getArguments()[2]; + ClusterUpdateSettingsResponse response; + if (randomBoolean()) { + var persistentSettings = Settings.builder().put("logsdb.prior_logs_usage", true).build(); + response = new ClusterUpdateSettingsResponse(false, Settings.EMPTY, persistentSettings); + } else { + response = new ClusterUpdateSettingsResponse(true, Settings.EMPTY, Settings.EMPTY); + } + listener.onResponse(response); + return null; + }).when(client).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); + + var threadPool = mock(ThreadPool.class); + var scheduledCancellable = mock(Scheduler.ScheduledCancellable.class); + when(threadPool.schedule(any(), any(), any())).thenReturn(scheduledCancellable); + var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1-prod", 1)), List.of()); + Supplier metadataSupplier = clusterState::metadata; + + LogsPatternUsageService service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); + service.isMaster = true; + assertFalse(service.hasPriorLogsUsage); + assertNull(service.cancellable); + service.check(); + assertFalse(service.hasPriorLogsUsage); + assertNotNull(service.cancellable); + + verify(threadPool, times(1)).schedule(any(), any(), any()); + verify(client, times(1)).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); + } + + public void testHasLogsUsage() { + var metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(), List.of()).getMetadata(); + assertFalse(LogsPatternUsageService.hasLogsUsage(metadata)); + metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("log-app1", 1)), List.of()).getMetadata(); + assertFalse(LogsPatternUsageService.hasLogsUsage(metadata)); + metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1", 1)), List.of()).getMetadata(); + assertFalse(LogsPatternUsageService.hasLogsUsage(metadata)); + metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("log-app1-prod", 1)), List.of()).getMetadata(); + assertFalse(LogsPatternUsageService.hasLogsUsage(metadata)); + metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1-prod", 1)), List.of()).getMetadata(); + assertTrue(LogsPatternUsageService.hasLogsUsage(metadata)); + metadata = DataStreamTestHelper.getClusterStateWithDataStreams( + List.of(new Tuple<>("log-app1-prod", 1), new Tuple<>("logs-app2-prod", 1)), + List.of() + ).getMetadata(); + assertTrue(LogsPatternUsageService.hasLogsUsage(metadata)); + metadata = DataStreamTestHelper.getClusterStateWithDataStreams( + List.of(new Tuple<>("log-app1", 1), new Tuple<>("logs-app2-prod", 1)), + List.of() + ).getMetadata(); + assertTrue(LogsPatternUsageService.hasLogsUsage(metadata)); + } + +} From 974c4e6242e92854982b0d96181ee72ed6a70edf Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Tue, 28 Jan 2025 09:20:24 +0100 Subject: [PATCH 099/383] Fix pattern tests in release builds (#120879) --- muted-tests.yml | 6 ------ .../xpack/esql/parser/StatementParserTests.java | 4 ++++ 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index d8cc9fd2f0547..16bd65784fca5 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -249,12 +249,6 @@ tests: - class: org.elasticsearch.xpack.security.authc.service.ServiceAccountIT method: testAuthenticateShouldNotFallThroughInCaseOfFailure issue: https://github.com/elastic/elasticsearch/issues/120902 -- class: org.elasticsearch.xpack.esql.parser.StatementParserTests - method: testInvalidJoinPatterns - issue: https://github.com/elastic/elasticsearch/issues/120849 -- class: org.elasticsearch.xpack.esql.parser.StatementParserTests - method: testValidJoinPattern - issue: https://github.com/elastic/elasticsearch/issues/120848 - class: org.elasticsearch.packaging.test.DockerTests method: test050BasicApiTests issue: https://github.com/elastic/elasticsearch/issues/120911 diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index a536e310eb485..dcc549057b77a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -2905,6 +2905,8 @@ public void testValidFromPattern() { } public void testValidJoinPattern() { + assumeTrue("LOOKUP JOIN requires corresponding capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); + var basePattern = randomIndexPatterns(without(CROSS_CLUSTER)); var joinPattern = randomIndexPattern(without(WILDCARD_PATTERN), without(CROSS_CLUSTER)); var onField = randomIdentifier(); @@ -2922,6 +2924,8 @@ public void testValidJoinPattern() { } public void testInvalidJoinPatterns() { + assumeTrue("LOOKUP JOIN requires corresponding capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); + { // wildcard var joinPattern = randomIndexPattern(WILDCARD_PATTERN, without(CROSS_CLUSTER)); From a608f0626e2a31fc133abc626952377fa7849719 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Tue, 28 Jan 2025 09:33:22 +0100 Subject: [PATCH 100/383] Added query param `?include_source_on_error` for ingest requests (#120725) A new query parameter `?include_source_on_error` was added for create / index, update and bulk REST APIs to control if to include the document source in the error response in case of parsing errors. The default value is `true`. --- docs/changelog/120725.yaml | 7 ++ .../XContentParserConfigurationImpl.java | 41 ++++++++-- .../provider/json/JsonXContentImpl.java | 15 +++- .../xcontent/XContentParserConfiguration.java | 7 ++ .../xcontent/XContentParserTests.java | 21 +++++ .../resources/rest-api-spec/api/bulk.json | 4 + .../resources/rest-api-spec/api/create.json | 4 + .../resources/rest-api-spec/api/index.json | 4 + .../resources/rest-api-spec/api/update.json | 4 + .../action/document/RestBulkActionIT.java | 76 +++++++++++++++++++ .../action/document/RestIndexActionIT.java | 53 +++++++++++++ .../action/document/RestUpdateActionIT.java | 53 +++++++++++++ .../org/elasticsearch/TransportVersions.java | 1 + .../action/bulk/BulkRequest.java | 18 ++++- .../action/bulk/BulkRequestParser.java | 9 ++- .../action/bulk/TransportShardBulkAction.java | 1 + .../bulk/TransportSimulateBulkAction.java | 1 + .../action/index/IndexRequest.java | 18 +++++ .../index/mapper/DocumentParser.java | 6 +- .../index/mapper/SourceToParse.java | 23 +++++- .../org/elasticsearch/rest/RestRequest.java | 17 ++++- .../org/elasticsearch/rest/RestUtils.java | 17 +++++ .../rest/action/document/RestBulkAction.java | 31 ++++---- .../rest/action/document/RestIndexAction.java | 2 + .../action/document/RestUpdateAction.java | 3 +- .../action/bulk/BulkRequestParserTests.java | 28 +++---- .../index/mapper/DynamicTemplatesTests.java | 12 +-- .../index/mapper/MapperServiceTestCase.java | 10 +-- .../action/MonitoringBulkRequest.java | 2 +- .../test/CoreTestTranslater.java | 2 +- 30 files changed, 421 insertions(+), 69 deletions(-) create mode 100644 docs/changelog/120725.yaml create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/rest/action/document/RestBulkActionIT.java create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/rest/action/document/RestIndexActionIT.java create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/rest/action/document/RestUpdateActionIT.java diff --git a/docs/changelog/120725.yaml b/docs/changelog/120725.yaml new file mode 100644 index 0000000000000..71d256a559a7d --- /dev/null +++ b/docs/changelog/120725.yaml @@ -0,0 +1,7 @@ +pr: 120725 +summary: |- + A new query parameter `?include_source_on_error` was added for create / index, update and bulk REST APIs to control + if to include the document source in the error response in case of parsing errors. The default value is `true`. +area: Infra/REST API +type: enhancement +issues: [] diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/XContentParserConfigurationImpl.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/XContentParserConfigurationImpl.java index 70adc59b9c6a9..e04c640ad7461 100644 --- a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/XContentParserConfigurationImpl.java +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/XContentParserConfigurationImpl.java @@ -31,7 +31,8 @@ public class XContentParserConfigurationImpl implements XContentParserConfigurat RestApiVersion.current(), null, null, - false + false, + true ); final NamedXContentRegistry registry; @@ -40,6 +41,7 @@ public class XContentParserConfigurationImpl implements XContentParserConfigurat final FilterPath[] includes; final FilterPath[] excludes; final boolean filtersMatchFieldNamesWithDots; + final boolean includeSourceOnError; private XContentParserConfigurationImpl( NamedXContentRegistry registry, @@ -47,7 +49,8 @@ private XContentParserConfigurationImpl( RestApiVersion restApiVersion, FilterPath[] includes, FilterPath[] excludes, - boolean filtersMatchFieldNamesWithDots + boolean filtersMatchFieldNamesWithDots, + boolean includeSourceOnError ) { this.registry = registry; this.deprecationHandler = deprecationHandler; @@ -55,6 +58,28 @@ private XContentParserConfigurationImpl( this.includes = includes; this.excludes = excludes; this.filtersMatchFieldNamesWithDots = filtersMatchFieldNamesWithDots; + this.includeSourceOnError = includeSourceOnError; + } + + @Override + public boolean includeSourceOnError() { + return includeSourceOnError; + } + + @Override + public XContentParserConfiguration withIncludeSourceOnError(boolean includeSourceOnError) { + if (includeSourceOnError == this.includeSourceOnError) { + return this; + } + return new XContentParserConfigurationImpl( + registry, + deprecationHandler, + restApiVersion, + includes, + excludes, + filtersMatchFieldNamesWithDots, + includeSourceOnError + ); } @Override @@ -65,7 +90,8 @@ public XContentParserConfigurationImpl withRegistry(NamedXContentRegistry regist restApiVersion, includes, excludes, - filtersMatchFieldNamesWithDots + filtersMatchFieldNamesWithDots, + includeSourceOnError ); } @@ -80,7 +106,8 @@ public XContentParserConfiguration withDeprecationHandler(DeprecationHandler dep restApiVersion, includes, excludes, - filtersMatchFieldNamesWithDots + filtersMatchFieldNamesWithDots, + includeSourceOnError ); } @@ -95,7 +122,8 @@ public XContentParserConfiguration withRestApiVersion(RestApiVersion restApiVers restApiVersion, includes, excludes, - filtersMatchFieldNamesWithDots + filtersMatchFieldNamesWithDots, + includeSourceOnError ); } @@ -143,7 +171,8 @@ public XContentParserConfiguration withFiltering( restApiVersion, includePaths, excludePaths, - filtersMatchFieldNamesWithDots + filtersMatchFieldNamesWithDots, + includeSourceOnError ); } diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentImpl.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentImpl.java index c842e3bbc50f4..7f52467caf49b 100644 --- a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentImpl.java +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentImpl.java @@ -87,23 +87,30 @@ public XContentGenerator createGenerator(OutputStream os, Set includes, return new JsonXContentGenerator(jsonFactory.createGenerator(os, JsonEncoding.UTF8), os, includes, excludes); } + private XContentParser createParser(XContentParserConfiguration config, JsonParser parser) { + if (config.includeSourceOnError() == false) { + parser.disable(JsonParser.Feature.INCLUDE_SOURCE_IN_LOCATION); // enabled by default, disable if requested + } + return new JsonXContentParser(config, parser); + } + @Override public XContentParser createParser(XContentParserConfiguration config, String content) throws IOException { - return new JsonXContentParser(config, jsonFactory.createParser(content)); + return createParser(config, jsonFactory.createParser(content)); } @Override public XContentParser createParser(XContentParserConfiguration config, InputStream is) throws IOException { - return new JsonXContentParser(config, jsonFactory.createParser(is)); + return createParser(config, jsonFactory.createParser(is)); } @Override public XContentParser createParser(XContentParserConfiguration config, byte[] data, int offset, int length) throws IOException { - return new JsonXContentParser(config, jsonFactory.createParser(data, offset, length)); + return createParser(config, jsonFactory.createParser(data, offset, length)); } @Override public XContentParser createParser(XContentParserConfiguration config, Reader reader) throws IOException { - return new JsonXContentParser(config, jsonFactory.createParser(reader)); + return createParser(config, jsonFactory.createParser(reader)); } } diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentParserConfiguration.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentParserConfiguration.java index 59e5cd5d6485c..73ebdfce222ad 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentParserConfiguration.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentParserConfiguration.java @@ -26,6 +26,13 @@ public interface XContentParserConfiguration { */ XContentParserConfiguration EMPTY = XContentProvider.provider().empty(); + /** + * Disable to not include the source in case of parsing errors (defaults to true). + */ + XContentParserConfiguration withIncludeSourceOnError(boolean includeSourceOnError); + + boolean includeSourceOnError(); + /** * Replace the registry backing {@link XContentParser#namedObject}. */ diff --git a/libs/x-content/src/test/java/org/elasticsearch/xcontent/XContentParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/xcontent/XContentParserTests.java index 5aff60b1a4c75..5aa3b1e140074 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/xcontent/XContentParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/xcontent/XContentParserTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -33,6 +34,7 @@ import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.junit.internal.matchers.ThrowableMessageMatcher.hasMessage; @@ -655,6 +657,25 @@ public void testCreateRootSubParser() throws IOException { } + public void testJsonIncludeSourceOnParserError() throws IOException { + var xContent = XContentFactory.xContent(XContentType.JSON); + var source = "{\"field\": invalid}"; // causes parse exception + var sourceEnabled = XContentParserConfiguration.EMPTY; + var sourceDisabled = XContentParserConfiguration.EMPTY.withIncludeSourceOnError(false); + + var parseException = expectThrows(XContentParseException.class, () -> createParser(xContent, sourceEnabled, source).map()); + assertThat(parseException.getMessage(), containsString(source)); + + parseException = expectThrows(XContentParseException.class, () -> createParser(xContent, sourceDisabled, source).map()); + assertThat(parseException.getMessage(), not(containsString(source))); + } + + private XContentParser createParser(XContent xContent, XContentParserConfiguration config, String content) throws IOException { + return randomBoolean() + ? xContent.createParser(config, content) + : xContent.createParser(config, content.getBytes(StandardCharsets.UTF_8)); + } + /** * Generates a random object {"first_field": "foo", "marked_field": {...random...}, "last_field": "bar} * diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json b/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json index f9c8041d7221f..490bb6fd73bbe 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/bulk.json @@ -83,6 +83,10 @@ "list_executed_pipelines": { "type": "boolean", "description": "Sets list_executed_pipelines for all incoming documents. Defaults to unset (false)" + }, + "include_source_on_error": { + "type": "boolean", + "description": "True or false if to include the document source in the error message in case of parsing errors. Defaults to true." } }, "body":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/create.json b/rest-api-spec/src/main/resources/rest-api-spec/api/create.json index 8ed4c04917d3a..65cb0da4753cc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/create.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/create.json @@ -69,6 +69,10 @@ "pipeline":{ "type":"string", "description":"The pipeline id to preprocess incoming documents with" + }, + "include_source_on_error": { + "type": "boolean", + "description": "True or false if to include the document source in the error message in case of parsing errors. Defaults to true." } }, "body":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json index 102ca4e012e85..79ecbd794024a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json @@ -105,6 +105,10 @@ "require_data_stream": { "type": "boolean", "description": "When true, requires the destination to be a data stream (existing or to-be-created). Default is false" + }, + "include_source_on_error": { + "type": "boolean", + "description": "True or false if to include the document source in the error message in case of parsing errors. Defaults to true." } }, "body":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json index e588777e990ec..9e47e80547e88 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json @@ -83,6 +83,10 @@ "require_alias": { "type": "boolean", "description": "When true, requires destination is an alias. Default is false" + }, + "include_source_on_error": { + "type": "boolean", + "description": "True or false if to include the document source in the error message in case of parsing errors. Defaults to true." } }, "body":{ diff --git a/server/src/internalClusterTest/java/org/elasticsearch/rest/action/document/RestBulkActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/rest/action/document/RestBulkActionIT.java new file mode 100644 index 0000000000000..d0b5ec4562903 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/rest/action/document/RestBulkActionIT.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.rest.action.document; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.InputStreamReader; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; + +public class RestBulkActionIT extends ESIntegTestCase { + @Override + protected boolean addMockHttpTransport() { + return false; + } + + public void testBulkIndexWithSourceOnErrorDisabled() throws Exception { + var source = "{\"field\": \"index\",}"; + var sourceEscaped = "{\\\"field\\\": \\\"index\\\",}"; + + var request = new Request("PUT", "/test_index/_bulk"); + request.setJsonEntity(Strings.format("{\"index\":{\"_id\":\"1\"}}\n%s\n", source)); + + Response response = getRestClient().performRequest(request); + String responseContent = Streams.copyToString(new InputStreamReader(response.getEntity().getContent(), UTF_8)); + assertThat(responseContent, containsString(sourceEscaped)); + + request.addParameter(RestUtils.INCLUDE_SOURCE_ON_ERROR_PARAMETER, "false"); + + response = getRestClient().performRequest(request); + responseContent = Streams.copyToString(new InputStreamReader(response.getEntity().getContent(), UTF_8)); + assertThat( + responseContent, + both(not(containsString(sourceEscaped))).and( + containsString("REDACTED (`StreamReadFeature.INCLUDE_SOURCE_IN_LOCATION` disabled)") + ) + ); + } + + public void testBulkUpdateWithSourceOnErrorDisabled() throws Exception { + var source = "{\"field\": \"index\",}"; + var sourceEscaped = "{\\\"field\\\": \\\"index\\\",}"; + + var request = new Request("PUT", "/test_index/_bulk"); + request.addParameter(RestUtils.INCLUDE_SOURCE_ON_ERROR_PARAMETER, "false"); + request.setJsonEntity(Strings.format("{\"update\":{\"_id\":\"1\"}}\n{\"doc\":%s}}\n", source)); + + // note: this behavior is not consistent with bulk index actions + // In case of updates by doc, the source is eagerly parsed and will fail the entire request if it cannot be parsed + var exception = assertThrows(ResponseException.class, () -> getRestClient().performRequest(request)); + String response = Streams.copyToString(new InputStreamReader(exception.getResponse().getEntity().getContent(), UTF_8)); + + assertThat( + response, + both(not(containsString(sourceEscaped))).and( + containsString("REDACTED (`StreamReadFeature.INCLUDE_SOURCE_IN_LOCATION` disabled)") + ) + ); + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/rest/action/document/RestIndexActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/rest/action/document/RestIndexActionIT.java new file mode 100644 index 0000000000000..1a27e704ad497 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/rest/action/document/RestIndexActionIT.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.rest.action.document; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.InputStreamReader; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; + +public class RestIndexActionIT extends ESIntegTestCase { + @Override + protected boolean addMockHttpTransport() { + return false; + } + + public void testIndexWithSourceOnErrorDisabled() throws Exception { + var source = "{\"field\": \"value}"; + var sourceEscaped = "{\\\"field\\\": \\\"value}"; + + var request = new Request("POST", "/test_index/_doc/1"); + request.setJsonEntity(source); + + var exception = assertThrows(ResponseException.class, () -> getRestClient().performRequest(request)); + String response = Streams.copyToString(new InputStreamReader(exception.getResponse().getEntity().getContent(), UTF_8)); + assertThat(response, containsString(sourceEscaped)); + + // disable source on error + request.addParameter(RestUtils.INCLUDE_SOURCE_ON_ERROR_PARAMETER, "false"); + exception = assertThrows(ResponseException.class, () -> getRestClient().performRequest(request)); + response = Streams.copyToString(new InputStreamReader(exception.getResponse().getEntity().getContent(), UTF_8)); + assertThat( + response, + both(not(containsString(sourceEscaped))).and( + containsString("REDACTED (`StreamReadFeature.INCLUDE_SOURCE_IN_LOCATION` disabled)") + ) + ); + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/rest/action/document/RestUpdateActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/rest/action/document/RestUpdateActionIT.java new file mode 100644 index 0000000000000..f25a2b8855c06 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/rest/action/document/RestUpdateActionIT.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.rest.action.document; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.InputStreamReader; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; + +public class RestUpdateActionIT extends ESIntegTestCase { + @Override + protected boolean addMockHttpTransport() { + return false; + } + + public void testUpdateByDocWithSourceOnErrorDisabled() throws Exception { + var updateRequest = "{\"doc\":{\"field\": \"value}}"; + var sourceEscaped = "{\\\"field\\\": \\\"value}"; + + var request = new Request("POST", "/test_index/_update/1"); + request.setJsonEntity(updateRequest); + + var exception = assertThrows(ResponseException.class, () -> getRestClient().performRequest(request)); + String response = Streams.copyToString(new InputStreamReader(exception.getResponse().getEntity().getContent(), UTF_8)); + assertThat(response, containsString(sourceEscaped)); + + // disable source on error + request.addParameter(RestUtils.INCLUDE_SOURCE_ON_ERROR_PARAMETER, "false"); + exception = assertThrows(ResponseException.class, () -> getRestClient().performRequest(request)); + response = Streams.copyToString(new InputStreamReader(exception.getResponse().getEntity().getContent(), UTF_8)); + assertThat( + response, + both(not(containsString(sourceEscaped))).and( + containsString("REDACTED (`StreamReadFeature.INCLUDE_SOURCE_IN_LOCATION` disabled)") + ) + ); + } +} diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 77aac3b2de96e..981221a1406cf 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -166,6 +166,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_RESPONSE_PARTIAL = def(8_832_00_0); public static final TransportVersion RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN = def(8_833_00_0); public static final TransportVersion ILM_ADD_SEARCHABLE_SNAPSHOT_ADD_REPLICATE_FOR = def(8_834_00_0); + public static final TransportVersion INGEST_REQUEST_INCLUDE_SOURCE_ON_ERROR = def(8_835_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 91caebc420ffb..cd4602ead42b5 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -84,6 +84,7 @@ public class BulkRequest extends ActionRequest private String globalIndex; private Boolean globalRequireAlias; private Boolean globalRequireDatsStream; + private boolean includeSourceOnError = true; private long sizeInBytes = 0; @@ -103,6 +104,9 @@ public BulkRequest(StreamInput in) throws IOException { } else { incrementalState = BulkRequest.IncrementalState.EMPTY; } + if (in.getTransportVersion().onOrAfter(TransportVersions.INGEST_REQUEST_INCLUDE_SOURCE_ON_ERROR)) { + includeSourceOnError = in.readBoolean(); + } // else default value is true } public BulkRequest(@Nullable String globalIndex) { @@ -278,7 +282,7 @@ public BulkRequest add( String pipeline = valueOrDefault(defaultPipeline, globalPipeline); Boolean requireAlias = valueOrDefault(defaultRequireAlias, globalRequireAlias); Boolean requireDataStream = valueOrDefault(defaultRequireDataStream, globalRequireDatsStream); - new BulkRequestParser(true, restApiVersion).parse( + new BulkRequestParser(true, includeSourceOnError, restApiVersion).parse( data, defaultIndex, routing, @@ -341,6 +345,11 @@ public void incrementalState(IncrementalState incrementalState) { this.incrementalState = incrementalState; } + public final BulkRequest includeSourceOnError(boolean includeSourceOnError) { + this.includeSourceOnError = includeSourceOnError; + return this; + } + /** * Note for internal callers (NOT high level rest client), * the global parameter setting is ignored when used with: @@ -399,6 +408,10 @@ public Boolean requireDataStream() { return globalRequireDatsStream; } + public boolean includeSourceOnError() { + return includeSourceOnError; + } + /** * Note for internal callers (NOT high level rest client), * the global parameter setting is ignored when used with: @@ -457,6 +470,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { incrementalState.writeTo(out); } + if (out.getTransportVersion().onOrAfter(TransportVersions.INGEST_REQUEST_INCLUDE_SOURCE_ON_ERROR)) { + out.writeBoolean(includeSourceOnError); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java index 9be1feae5ccfe..2f336566953ba 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java @@ -81,12 +81,14 @@ public final class BulkRequestParser { * Create a new parser. * * @param deprecateOrErrorOnType whether to allow _type information in the index line; used by BulkMonitoring + * @param includeSourceOnError if to include the source in parser error messages * @param restApiVersion */ - public BulkRequestParser(boolean deprecateOrErrorOnType, RestApiVersion restApiVersion) { + public BulkRequestParser(boolean deprecateOrErrorOnType, boolean includeSourceOnError, RestApiVersion restApiVersion) { this.deprecateOrErrorOnType = deprecateOrErrorOnType; this.config = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE) - .withRestApiVersion(restApiVersion); + .withRestApiVersion(restApiVersion) + .withIncludeSourceOnError(includeSourceOnError); } private static int findNextMarker(byte marker, int from, BytesReference data, boolean lastData) { @@ -480,7 +482,8 @@ private boolean parseActionLine(BytesReference data, int from, int to) throws IO .setDynamicTemplates(dynamicTemplates) .setRequireAlias(requireAlias) .setRequireDataStream(requireDataStream) - .setListExecutedPipelines(currentListExecutedPipelines); + .setListExecutedPipelines(currentListExecutedPipelines) + .setIncludeSourceOnError(config.includeSourceOnError()); if ("create".equals(action)) { indexRequest = indexRequest.create(true); } else if (opType != null) { diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 89cee714a9ff2..33c73898c0394 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -376,6 +376,7 @@ static boolean executeBulkItemRequest( request.getContentType(), request.routing(), request.getDynamicTemplates(), + request.getIncludeSourceOnError(), meteringParserDecorator ); result = primary.applyIndexOperationOnPrimary( diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index 18c420d99f525..106e40771ced5 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -187,6 +187,7 @@ private Tuple, Exception> validateMappings( request.getContentType(), request.routing(), request.getDynamicTemplates(), + request.getIncludeSourceOnError(), XContentMeteringParserDecorator.NOOP ); diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index bcb8a7fb78bf3..e774384f87343 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -116,6 +116,8 @@ public class IndexRequest extends ReplicatedWriteRequest implement private boolean requireDataStream; + private boolean includeSourceOnError = true; + /** * Transient flag denoting that the local request should be routed to a failure store. Not persisted across the wire. */ @@ -210,6 +212,10 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio in.readBoolean(); // obsolete originatesFromUpdateByDoc } } + + if (in.getTransportVersion().onOrAfter(TransportVersions.INGEST_REQUEST_INCLUDE_SOURCE_ON_ERROR)) { + includeSourceOnError = in.readBoolean(); + } // else default value is true } public IndexRequest() { @@ -806,6 +812,9 @@ private void writeBody(StreamOutput out) throws IOException { out.writeBoolean(false); // obsolete originatesFromUpdateByDoc } } + if (out.getTransportVersion().onOrAfter(TransportVersions.INGEST_REQUEST_INCLUDE_SOURCE_ON_ERROR)) { + out.writeBoolean(includeSourceOnError); + } } @Override @@ -874,6 +883,15 @@ public IndexRequest setRequireDataStream(boolean requireDataStream) { return this; } + public boolean getIncludeSourceOnError() { + return includeSourceOnError; + } + + public IndexRequest setIncludeSourceOnError(boolean includeSourceOnError) { + this.includeSourceOnError = includeSourceOnError; + return this; + } + @Override public Index getConcreteWriteIndex(IndexAbstraction ia, Metadata metadata) { if (DataStream.isFailureStoreFeatureFlagEnabled() && writeToFailureStore) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index d45cafd252428..c12a350057694 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -85,7 +85,11 @@ public ParsedDocument parseDocument(SourceToParse source, MappingLookup mappingL XContentMeteringParserDecorator meteringParserDecorator = source.getMeteringParserDecorator(); try ( XContentParser parser = meteringParserDecorator.decorate( - XContentHelper.createParser(parserConfiguration, source.source(), xContentType) + XContentHelper.createParser( + parserConfiguration.withIncludeSourceOnError(source.getIncludeSourceOnError()), + source.source(), + xContentType + ) ) ) { context = new RootDocumentParserContext(mappingLookup, mappingParserContext, source, parser); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java index 879e0fe785df2..5396fdef0f041 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java @@ -29,6 +29,9 @@ public class SourceToParse { private final XContentType xContentType; private final Map dynamicTemplates; + + private final boolean includeSourceOnError; + private final XContentMeteringParserDecorator meteringParserDecorator; public SourceToParse( @@ -37,6 +40,7 @@ public SourceToParse( XContentType xContentType, @Nullable String routing, Map dynamicTemplates, + boolean includeSourceOnError, XContentMeteringParserDecorator meteringParserDecorator ) { this.id = id; @@ -46,15 +50,26 @@ public SourceToParse( this.xContentType = Objects.requireNonNull(xContentType); this.routing = routing; this.dynamicTemplates = Objects.requireNonNull(dynamicTemplates); + this.includeSourceOnError = includeSourceOnError; this.meteringParserDecorator = meteringParserDecorator; } public SourceToParse(String id, BytesReference source, XContentType xContentType) { - this(id, source, xContentType, null, Map.of(), XContentMeteringParserDecorator.NOOP); + this(id, source, xContentType, null, Map.of(), true, XContentMeteringParserDecorator.NOOP); } public SourceToParse(String id, BytesReference source, XContentType xContentType, String routing) { - this(id, source, xContentType, routing, Map.of(), XContentMeteringParserDecorator.NOOP); + this(id, source, xContentType, routing, Map.of(), true, XContentMeteringParserDecorator.NOOP); + } + + public SourceToParse( + String id, + BytesReference source, + XContentType xContentType, + String routing, + Map dynamicTemplates + ) { + this(id, source, xContentType, routing, dynamicTemplates, true, XContentMeteringParserDecorator.NOOP); } public BytesReference source() { @@ -94,4 +109,8 @@ public XContentType getXContentType() { public XContentMeteringParserDecorator getMeteringParserDecorator() { return meteringParserDecorator; } + + public boolean getIncludeSourceOnError() { + return includeSourceOnError; + } } diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java index a04bdcb32f2b4..fb8a8b44d8ec3 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -543,9 +543,24 @@ public XContentParserConfiguration contentParserConfig() { * {@link #contentOrSourceParamParser()} for requests that support specifying the request body in the {@code source} param. */ public final XContentParser contentParser() throws IOException { + return contentParser(parserConfig); + } + + private XContentParser contentParser(XContentParserConfiguration parserConfig) throws IOException { BytesReference content = requiredContent(); // will throw exception if body or content type missing return XContentHelper.createParserNotCompressed(parserConfig, content, xContentType.get()); + } + /** + * If there is any content then call {@code applyParser} with the parser modified by {@code includeSourceOnError}, otherwise do nothing. + */ + public final void applyContentParser(boolean includeSourceOnError, CheckedConsumer applyParser) + throws IOException { + if (hasContent()) { + try (XContentParser parser = contentParser(parserConfig.withIncludeSourceOnError(includeSourceOnError))) { + applyParser.accept(parser); + } + } } /** @@ -553,7 +568,7 @@ public final XContentParser contentParser() throws IOException { */ public final void applyContentParser(CheckedConsumer applyParser) throws IOException { if (hasContent()) { - try (XContentParser parser = contentParser()) { + try (XContentParser parser = contentParser(parserConfig)) { applyParser.accept(parser); } } diff --git a/server/src/main/java/org/elasticsearch/rest/RestUtils.java b/server/src/main/java/org/elasticsearch/rest/RestUtils.java index 10e72035cf1f5..68ec7a758e73c 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestUtils.java +++ b/server/src/main/java/org/elasticsearch/rest/RestUtils.java @@ -291,6 +291,12 @@ public static Optional extractTraceId(String traceparent) { */ public static final String REST_TIMEOUT_PARAM = "timeout"; + /** + * The name of the common {@code ?include_source_on_error} query parameter. + * By default, the document source is included in the error response in case of parsing errors. This parameter allows to disable this. + */ + public static final String INCLUDE_SOURCE_ON_ERROR_PARAMETER = "include_source_on_error"; + /** * Extract the {@code ?master_timeout} parameter from the request, imposing the common default of {@code 30s} in case the parameter is * missing. @@ -329,6 +335,17 @@ public static TimeValue getTimeout(RestRequest restRequest) { return restRequest.paramAsTime(REST_TIMEOUT_PARAM, null); } + /** + * Extract the {@code ?include_source_on_error} parameter from the request, returning {@code true} in case the parameter is missing. + * + * @param restRequest The request from which to extract the {@code ?include_source_on_error} parameter + * @return the value of the {@code ?include_source_on_error} parameter from the request, with a default of {@code true} if the request + */ + public static boolean getIncludeSourceOnError(RestRequest restRequest) { + assert restRequest != null; + return restRequest.paramAsBoolean(INCLUDE_SOURCE_ON_ERROR_PARAMETER, true); + } + // Remove the BWC support for the deprecated ?local parameter. // NOTE: ensure each usage of this method has been deprecated for long enough to remove it. @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index dea7b7138d0d0..944edc2e5e1f8 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; @@ -103,6 +104,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC boolean defaultRequireDataStream = request.paramAsBoolean(DocWriteRequest.REQUIRE_DATA_STREAM, false); bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.setRefreshPolicy(request.param("refresh")); + bulkRequest.includeSourceOnError(RestUtils.getIncludeSourceOnError(request)); ReleasableBytesReference content = request.requiredContent(); try { @@ -160,20 +162,21 @@ static class ChunkHandler implements BaseRestHandler.RequestBodyChunkConsumer { ChunkHandler(boolean allowExplicitIndex, RestRequest request, Supplier handlerSupplier) { this.request = request; this.handlerSupplier = handlerSupplier; - this.parser = new BulkRequestParser(true, request.getRestApiVersion()).incrementalParser( - request.param("index"), - request.param("routing"), - FetchSourceContext.parseFromRestRequest(request), - request.param("pipeline"), - request.paramAsBoolean(DocWriteRequest.REQUIRE_ALIAS, false), - request.paramAsBoolean(DocWriteRequest.REQUIRE_DATA_STREAM, false), - request.paramAsBoolean("list_executed_pipelines", false), - allowExplicitIndex, - request.getXContentType(), - (indexRequest, type) -> items.add(indexRequest), - items::add, - items::add - ); + this.parser = new BulkRequestParser(true, RestUtils.getIncludeSourceOnError(request), request.getRestApiVersion()) + .incrementalParser( + request.param("index"), + request.param("routing"), + FetchSourceContext.parseFromRestRequest(request), + request.param("pipeline"), + request.paramAsBoolean(DocWriteRequest.REQUIRE_ALIAS, false), + request.paramAsBoolean(DocWriteRequest.REQUIRE_DATA_STREAM, false), + request.paramAsBoolean("list_executed_pipelines", false), + allowExplicitIndex, + request.getXContentType(), + (indexRequest, type) -> items.add(indexRequest), + items::add, + items::add + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index d40c6225cc7b4..14c2428d29081 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestActions; @@ -120,6 +121,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC indexRequest.setIfPrimaryTerm(request.paramAsLong("if_primary_term", indexRequest.ifPrimaryTerm())); indexRequest.setRequireAlias(request.paramAsBoolean(DocWriteRequest.REQUIRE_ALIAS, indexRequest.isRequireAlias())); indexRequest.setRequireDataStream(request.paramAsBoolean(DocWriteRequest.REQUIRE_DATA_STREAM, indexRequest.isRequireDataStream())); + indexRequest.setIncludeSourceOnError(RestUtils.getIncludeSourceOnError(request)); String sOpType = request.param("op_type"); String waitForActiveShards = request.param("wait_for_active_shards"); if (waitForActiveShards != null) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java index 57b3a89b2303b..a61be96b37caa 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestActions; @@ -72,7 +73,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC updateRequest.setIfPrimaryTerm(request.paramAsLong("if_primary_term", updateRequest.ifPrimaryTerm())); updateRequest.setRequireAlias(request.paramAsBoolean(DocWriteRequest.REQUIRE_ALIAS, updateRequest.isRequireAlias())); - request.applyContentParser(parser -> { + request.applyContentParser(RestUtils.getIncludeSourceOnError(request), parser -> { updateRequest.fromXContent(parser); IndexRequest upsertRequest = updateRequest.upsertRequest(); if (upsertRequest != null) { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestParserTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestParserTests.java index 9d944d43f4c36..de2dc8f9ea0b5 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestParserTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestParserTests.java @@ -39,7 +39,7 @@ public void testParserCannotBeReusedAfterFailure() { {} """); - BulkRequestParser parser = new BulkRequestParser(randomBoolean(), RestApiVersion.current()); + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), true, RestApiVersion.current()); BulkRequestParser.IncrementalParser incrementalParser = parser.incrementalParser( null, null, @@ -70,7 +70,7 @@ public void testIncrementalParsing() throws IOException { ArrayList> updateRequests = new ArrayList<>(); ArrayList> deleteRequests = new ArrayList<>(); - BulkRequestParser parser = new BulkRequestParser(randomBoolean(), RestApiVersion.current()); + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), true, RestApiVersion.current()); BulkRequestParser.IncrementalParser incrementalParser = parser.incrementalParser( null, null, @@ -116,7 +116,7 @@ public void testIndexRequest() throws IOException { { "index":{ "_id": "bar" } } {} """); - BulkRequestParser parser = new BulkRequestParser(randomBoolean(), RestApiVersion.current()); + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), true, RestApiVersion.current()); final AtomicBoolean parsed = new AtomicBoolean(); parser.parse(request, "foo", null, null, null, null, null, null, false, XContentType.JSON, (indexRequest, type) -> { assertFalse(parsed.get()); @@ -152,7 +152,7 @@ public void testDeleteRequest() throws IOException { BytesArray request = new BytesArray(""" { "delete":{ "_id": "bar" } } """); - BulkRequestParser parser = new BulkRequestParser(randomBoolean(), RestApiVersion.current()); + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), true, RestApiVersion.current()); final AtomicBoolean parsed = new AtomicBoolean(); parser.parse( request, @@ -182,7 +182,7 @@ public void testUpdateRequest() throws IOException { { "update":{ "_id": "bar" } } {} """); - BulkRequestParser parser = new BulkRequestParser(randomBoolean(), RestApiVersion.current()); + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), true, RestApiVersion.current()); final AtomicBoolean parsed = new AtomicBoolean(); parser.parse(request, "foo", null, null, null, null, null, null, false, XContentType.JSON, (req, type) -> fail(), updateRequest -> { assertFalse(parsed.get()); @@ -218,7 +218,7 @@ public void testBarfOnLackOfTrailingNewline() throws IOException { BytesArray request = new BytesArray(""" { "index":{ "_id": "bar" } } {}"""); - BulkRequestParser parser = new BulkRequestParser(randomBoolean(), RestApiVersion.current()); + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), true, RestApiVersion.current()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> parser.parse( @@ -266,7 +266,7 @@ public void testFailOnExplicitIndex() { { "index":{ "_index": "foo", "_id": "bar" } } {} """); - BulkRequestParser parser = new BulkRequestParser(randomBoolean(), RestApiVersion.current()); + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), true, RestApiVersion.current()); IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, @@ -294,7 +294,7 @@ public void testTypesStillParsedForBulkMonitoring() throws IOException { { "index":{ "_type": "quux", "_id": "bar" } } {} """); - BulkRequestParser parser = new BulkRequestParser(false, RestApiVersion.current()); + BulkRequestParser parser = new BulkRequestParser(false, true, RestApiVersion.current()); final AtomicBoolean parsed = new AtomicBoolean(); parser.parse(request, "foo", null, null, null, null, null, null, false, XContentType.JSON, (indexRequest, type) -> { assertFalse(parsed.get()); @@ -313,7 +313,7 @@ public void testParseDeduplicatesParameterStrings() throws IOException { { "index":{ "_index": "bar", "pipeline": "foo", "routing": "blub" } } {} """); - BulkRequestParser parser = new BulkRequestParser(randomBoolean(), RestApiVersion.current()); + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), true, RestApiVersion.current()); final List indexRequests = new ArrayList<>(); parser.parse( request, @@ -343,7 +343,7 @@ public void testFailOnInvalidAction() { { "invalidaction":{ } } {} """); - BulkRequestParser parser = new BulkRequestParser(randomBoolean(), randomFrom(RestApiVersion.values())); + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), true, randomFrom(RestApiVersion.values())); IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, @@ -374,7 +374,7 @@ public void testFailMissingCloseBrace() { { "index":{ } {} """); - BulkRequestParser parser = new BulkRequestParser(randomBoolean(), randomFrom(REST_API_VERSIONS_POST_V8)); + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), true, randomFrom(REST_API_VERSIONS_POST_V8)); IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, @@ -402,7 +402,7 @@ public void testFailExtraKeys() { { "index":{ }, "something": "unexpected" } {} """); - BulkRequestParser parser = new BulkRequestParser(randomBoolean(), randomFrom(REST_API_VERSIONS_POST_V8)); + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), true, randomFrom(REST_API_VERSIONS_POST_V8)); IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, @@ -430,7 +430,7 @@ public void testFailContentAfterClosingBrace() { { "index":{ } } { "something": "unexpected" } {} """); - BulkRequestParser parser = new BulkRequestParser(randomBoolean(), randomFrom(REST_API_VERSIONS_POST_V8)); + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), true, randomFrom(REST_API_VERSIONS_POST_V8)); IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, @@ -458,7 +458,7 @@ public void testListExecutedPipelines() throws IOException { { "index":{ "_id": "bar" } } {} """); - BulkRequestParser parser = new BulkRequestParser(randomBoolean(), RestApiVersion.current()); + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), true, RestApiVersion.current()); parser.parse(request, "foo", null, null, null, null, null, null, false, XContentType.JSON, (indexRequest, type) -> { assertFalse(indexRequest.getListExecutedPipelines()); }, req -> fail(), req -> fail()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index 78643a2d581cc..0c68ba5ca0837 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; -import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; @@ -668,16 +667,7 @@ public void testTemplateWithoutMatchPredicates() throws Exception { {"foo": "41.12,-71.34", "bar": "41.12,-71.34"} """; ParsedDocument doc = mapperService.documentMapper() - .parse( - new SourceToParse( - "1", - new BytesArray(json), - XContentType.JSON, - null, - Map.of("foo", "geo_point"), - XContentMeteringParserDecorator.NOOP - ) - ); + .parse(new SourceToParse("1", new BytesArray(json), XContentType.JSON, null, Map.of("foo", "geo_point"))); assertThat(doc.rootDoc().getFields("foo"), hasSize(1)); assertThat(doc.rootDoc().getFields("bar"), hasSize(1)); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 8c44b49f36357..459480d1d7316 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -61,7 +61,6 @@ import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.TelemetryPlugin; -import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.script.ScriptContext; @@ -389,14 +388,7 @@ protected static SourceToParse source( XContentBuilder builder = JsonXContent.contentBuilder().startObject(); build.accept(builder); builder.endObject(); - return new SourceToParse( - id, - BytesReference.bytes(builder), - XContentType.JSON, - routing, - dynamicTemplates, - XContentMeteringParserDecorator.NOOP - ); + return new SourceToParse(id, BytesReference.bytes(builder), XContentType.JSON, routing, dynamicTemplates); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequest.java index 638e57207fbeb..36bc2db95932d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequest.java @@ -83,7 +83,7 @@ public MonitoringBulkRequest add( ) throws IOException { // MonitoringBulkRequest accepts a body request that has the same format as the BulkRequest - new BulkRequestParser(false, RestApiVersion.current()).parse( + new BulkRequestParser(false, true, RestApiVersion.current()).parse( content, null, null, diff --git a/x-pack/qa/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/test/CoreTestTranslater.java b/x-pack/qa/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/test/CoreTestTranslater.java index d34303ea803d6..51c9e35c95a3d 100644 --- a/x-pack/qa/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/test/CoreTestTranslater.java +++ b/x-pack/qa/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/test/CoreTestTranslater.java @@ -366,7 +366,7 @@ private boolean handleBulk(ApiCallSection bulk) { bos.write(JsonXContent.jsonXContent.bulkSeparator()); } List indexRequests = new ArrayList<>(); - new BulkRequestParser(false, RestApiVersion.current()).parse( + new BulkRequestParser(false, true, RestApiVersion.current()).parse( bos.bytes(), defaultIndex, defaultRouting, From 776ebda7c597909591bc9bde5382a47475a64699 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 19:57:53 +1100 Subject: [PATCH 101/383] Mute org.elasticsearch.action.search.SearchProgressActionListenerIT testSearchProgressWithQuery #120994 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 16bd65784fca5..7556f724c861f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -277,6 +277,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120925 - class: org.elasticsearch.xpack.inference.InferenceGetServicesIT issue: https://github.com/elastic/elasticsearch/issues/120986 +- class: org.elasticsearch.action.search.SearchProgressActionListenerIT + method: testSearchProgressWithQuery + issue: https://github.com/elastic/elasticsearch/issues/120994 # Examples: # From 2ebbad406bcf6bbbeb4835f98048a0d44593c1ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Fern=C3=A1ndez=20Casta=C3=B1o?= Date: Tue, 28 Jan 2025 10:32:27 +0100 Subject: [PATCH 102/383] Defer unpromotable shard refreshes until index refresh blocks are cleared (#120642) This update postpones unpromotable refreshes for indices with an active INDEX_REFRESH_BLOCK until the block is cleared. This ensures refresh operations proceed only when the index is no longer blocked. To avoid indefinite delays, the maximum wait time is governed by the bulk request timeout whereas for explicit refreshes it relies on the fact that the block will be removed eventually. Closes ES-10134 --- docs/changelog/120642.yaml | 5 + .../refresh/TransportShardRefreshAction.java | 9 +- ...ansportUnpromotableShardRefreshAction.java | 83 +++++- .../UnpromotableShardRefreshRequest.java | 21 ++ .../support/replication/PostWriteRefresh.java | 5 +- .../metadata/MetadataCreateIndexService.java | 2 +- .../routing/IndexShardRoutingTable.java | 22 +- ...rtUnpromotableShardRefreshActionTests.java | 249 +++++++++++++++++- .../replication/PostWriteRefreshTests.java | 6 +- .../cluster/routing/TestShardRouting.java | 12 +- 10 files changed, 393 insertions(+), 21 deletions(-) create mode 100644 docs/changelog/120642.yaml diff --git a/docs/changelog/120642.yaml b/docs/changelog/120642.yaml new file mode 100644 index 0000000000000..4709298a0ebb6 --- /dev/null +++ b/docs/changelog/120642.yaml @@ -0,0 +1,5 @@ +pr: 120642 +summary: Defer unpromotable shard refreshes until index refresh blocks are cleared +area: Engine +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index cb667400240f0..6b77a39c32ffe 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -74,7 +74,14 @@ public TransportShardRefreshAction( ReplicaActionExecution.SubjectToCircuitBreaker ); // registers the unpromotable version of shard refresh action - new TransportUnpromotableShardRefreshAction(clusterService, transportService, shardStateAction, actionFilters, indicesService); + new TransportUnpromotableShardRefreshAction( + clusterService, + transportService, + shardStateAction, + actionFilters, + indicesService, + threadPool + ); this.refreshExecutor = transportService.getThreadPool().executor(ThreadPool.Names.REFRESH); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java index 6c24ec2d17604..dd4fbedad98a6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java @@ -9,20 +9,31 @@ package org.elasticsearch.action.admin.indices.refresh; +import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.broadcast.unpromotable.TransportBroadcastUnpromotableAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.action.shard.ShardStateAction; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_REFRESH_BLOCK; public class TransportUnpromotableShardRefreshAction extends TransportBroadcastUnpromotableAction< UnpromotableShardRefreshRequest, @@ -36,6 +47,8 @@ public class TransportUnpromotableShardRefreshAction extends TransportBroadcastU } private final IndicesService indicesService; + private final ThreadPool threadPool; + private final boolean useRefreshBlock; @Inject public TransportUnpromotableShardRefreshAction( @@ -43,7 +56,28 @@ public TransportUnpromotableShardRefreshAction( TransportService transportService, ShardStateAction shardStateAction, ActionFilters actionFilters, - IndicesService indicesService + IndicesService indicesService, + ThreadPool threadPool + ) { + this( + clusterService, + transportService, + shardStateAction, + actionFilters, + indicesService, + threadPool, + MetadataCreateIndexService.useRefreshBlock(clusterService.getSettings()) + ); + } + + public TransportUnpromotableShardRefreshAction( + ClusterService clusterService, + TransportService transportService, + ShardStateAction shardStateAction, + ActionFilters actionFilters, + IndicesService indicesService, + ThreadPool threadPool, + boolean useRefreshBlock ) { super( NAME, @@ -55,6 +89,53 @@ public TransportUnpromotableShardRefreshAction( transportService.getThreadPool().executor(ThreadPool.Names.REFRESH) ); this.indicesService = indicesService; + this.threadPool = threadPool; + this.useRefreshBlock = useRefreshBlock; + } + + @Override + protected void doExecute(Task task, UnpromotableShardRefreshRequest request, ActionListener listener) { + beforeDispatchingRequestToUnpromotableShards(request, listener.delegateFailure((l, unused) -> super.doExecute(task, request, l))); + } + + private void beforeDispatchingRequestToUnpromotableShards(UnpromotableShardRefreshRequest request, ActionListener listener) { + if (useRefreshBlock == false) { + listener.onResponse(null); + return; + } + + var clusterStateObserver = new ClusterStateObserver(clusterService, request.getTimeout(), logger, threadPool.getThreadContext()); + + if (isIndexBlockedForRefresh(request.shardId().getIndexName(), clusterStateObserver.setAndGetObservedState()) == false) { + listener.onResponse(null); + return; + } + + clusterStateObserver.waitForNextChange(new ClusterStateObserver.Listener() { + @Override + public void onNewClusterState(ClusterState state) { + listener.onResponse(null); + } + + @Override + public void onClusterServiceClose() { + listener.onFailure(new NodeClosedException(clusterService.localNode())); + } + + @Override + public void onTimeout(TimeValue timeout) { + listener.onFailure( + new ElasticsearchTimeoutException( + "shard refresh timed out waiting for index block to be removed", + new ClusterBlockException(Map.of(request.shardId().getIndexName(), Set.of(INDEX_REFRESH_BLOCK))) + ) + ); + } + }, clusterState -> isIndexBlockedForRefresh(request.shardId().getIndexName(), clusterState) == false); + } + + private static boolean isIndexBlockedForRefresh(String index, ClusterState state) { + return state.blocks().hasIndexBlock(index, INDEX_REFRESH_BLOCK); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/UnpromotableShardRefreshRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/UnpromotableShardRefreshRequest.java index f0629bee5f72f..07b2bc1fcf7c1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/UnpromotableShardRefreshRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/UnpromotableShardRefreshRequest.java @@ -16,6 +16,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.engine.Engine; import java.io.IOException; @@ -26,22 +28,36 @@ public class UnpromotableShardRefreshRequest extends BroadcastUnpromotableReques private final long primaryTerm; private final long segmentGeneration; + private final TimeValue timeout; public UnpromotableShardRefreshRequest( IndexShardRoutingTable indexShardRoutingTable, long primaryTerm, long segmentGeneration, boolean failShardOnError + ) { + this(indexShardRoutingTable, primaryTerm, segmentGeneration, failShardOnError, null); + } + + public UnpromotableShardRefreshRequest( + IndexShardRoutingTable indexShardRoutingTable, + long primaryTerm, + long segmentGeneration, + boolean failShardOnError, + @Nullable TimeValue timeout ) { super(indexShardRoutingTable, failShardOnError); this.primaryTerm = primaryTerm; this.segmentGeneration = segmentGeneration; + this.timeout = timeout; } public UnpromotableShardRefreshRequest(StreamInput in) throws IOException { super(in); segmentGeneration = in.readVLong(); primaryTerm = in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0) ? in.readVLong() : Engine.UNKNOWN_PRIMARY_TERM; + // The timeout is only used by the request sender, therefore we don't write it over the wire + timeout = null; } @Override @@ -70,6 +86,11 @@ public long getPrimaryTerm() { return primaryTerm; } + @Nullable + public TimeValue getTimeout() { + return timeout; + } + @Override public String toString() { return Strings.format( diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java index 7414aeeb2c405..997d859ec35a2 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java @@ -65,7 +65,7 @@ public void onFailure(Exception e) { } }); case IMMEDIATE -> immediate(indexShard, listener.delegateFailureAndWrap((l, r) -> { - if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0) { + if (indexShard.getReplicationGroup().getRoutingTable().allUnpromotableShards().size() > 0) { sendUnpromotableRequests(indexShard, r.generation(), true, l, postWriteRefreshTimeout); } else { l.onResponse(true); @@ -136,7 +136,8 @@ private void sendUnpromotableRequests( indexShard.getReplicationGroup().getRoutingTable(), indexShard.getOperationPrimaryTerm(), generation, - true + true, + postWriteRefreshTimeout ); transportService.sendRequest( transportService.getLocalNode(), diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index bfbdbc5c91aa4..97b83b1dc1562 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -1761,7 +1761,7 @@ public static void validateStoreTypeSetting(Settings indexSettings) { } } - private static boolean useRefreshBlock(Settings settings) { + public static boolean useRefreshBlock(Settings settings) { return DiscoveryNode.isStateless(settings) && settings.getAsBoolean(USE_INDEX_REFRESH_BLOCK_SETTING_NAME, false); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java index 39e31ea70fa32..74c2c1d14b77c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java @@ -52,6 +52,7 @@ public class IndexShardRoutingTable { final List replicas; final List activeShards; final List assignedShards; + private final List assignedUnpromotableShards; private final List unpromotableShards; /** * The initializing list, including ones that are initializing on a target node because of relocation. @@ -71,8 +72,9 @@ public class IndexShardRoutingTable { List replicas = new ArrayList<>(); List activeShards = new ArrayList<>(); List assignedShards = new ArrayList<>(); - List unpromotableShards = new ArrayList<>(); + List assignedUnpromotableShards = new ArrayList<>(); List allInitializingShards = new ArrayList<>(); + List unpromotableShards = new ArrayList<>(); boolean allShardsStarted = true; int activeSearchShardCount = 0; int totalSearchShardCount = 0; @@ -95,6 +97,9 @@ public class IndexShardRoutingTable { if (shard.initializing()) { allInitializingShards.add(shard); } + if (shard.isPromotableToPrimary() == false) { + unpromotableShards.add(shard); + } if (shard.relocating()) { // create the target initializing shard routing on the node the shard is relocating to allInitializingShards.add(shard.getTargetRelocatingShard()); @@ -102,13 +107,14 @@ public class IndexShardRoutingTable { assert shard.getTargetRelocatingShard().assignedToNode() : "relocating to unassigned " + shard.getTargetRelocatingShard(); assignedShards.add(shard.getTargetRelocatingShard()); if (shard.getTargetRelocatingShard().isPromotableToPrimary() == false) { + assignedUnpromotableShards.add(shard.getTargetRelocatingShard()); unpromotableShards.add(shard.getTargetRelocatingShard()); } } if (shard.assignedToNode()) { assignedShards.add(shard); if (shard.isPromotableToPrimary() == false) { - unpromotableShards.add(shard); + assignedUnpromotableShards.add(shard); } } if (shard.state() != ShardRoutingState.STARTED) { @@ -117,10 +123,13 @@ public class IndexShardRoutingTable { } assert shards.isEmpty() == false : "cannot have an empty shard routing table"; assert primary != null : shards; + assert unpromotableShards.containsAll(assignedUnpromotableShards) + : unpromotableShards + " does not contain all assigned unpromotable shards " + assignedUnpromotableShards; this.primary = primary; this.replicas = CollectionUtils.wrapUnmodifiableOrEmptySingleton(replicas); this.activeShards = CollectionUtils.wrapUnmodifiableOrEmptySingleton(activeShards); this.assignedShards = CollectionUtils.wrapUnmodifiableOrEmptySingleton(assignedShards); + this.assignedUnpromotableShards = CollectionUtils.wrapUnmodifiableOrEmptySingleton(assignedUnpromotableShards); this.unpromotableShards = CollectionUtils.wrapUnmodifiableOrEmptySingleton(unpromotableShards); this.allInitializingShards = CollectionUtils.wrapUnmodifiableOrEmptySingleton(allInitializingShards); this.allShardsStarted = allShardsStarted; @@ -185,6 +194,15 @@ public List assignedShards() { * @return a {@link List} of shards */ public List unpromotableShards() { + return this.assignedUnpromotableShards; + } + + /** + * Returns a {@link List} of all unpromotable shards, including unassigned shards + * + * @return a {@link List} of shards + */ + public List allUnpromotableShards() { return this.unpromotableShards; } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshActionTests.java index 4ad6a9a7a972d..4b038949e9ca3 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshActionTests.java @@ -9,27 +9,53 @@ package org.elasticsearch.action.admin.indices.refresh; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.action.shard.ShardStateAction; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.tasks.Task; -import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.transport.MockTransport; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; +import static java.util.Collections.emptySet; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -37,25 +63,40 @@ public class TransportUnpromotableShardRefreshActionTests extends ESTestCase { private ThreadPool threadPool; private ClusterService clusterService; + private TransportService transportService; + private DiscoveryNode localNode; @Override public void setUp() throws Exception { super.setUp(); threadPool = new TestThreadPool("TransportUnpromotableShardRefreshActionTests"); - clusterService = ClusterServiceUtils.createClusterService(threadPool); + localNode = DiscoveryNodeUtils.create("local"); + clusterService = createClusterService(threadPool, localNode); + final MockTransport transport = new MockTransport(); + transportService = transport.createTransportService( + Settings.EMPTY, + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + boundTransportAddress -> localNode, + null, + emptySet() + ); + + transportService.start(); + transportService.acceptIncomingRequests(); } @Override public void tearDown() throws Exception { super.tearDown(); clusterService.close(); + transportService.stop(); ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); } public void testRespondOKToRefreshRequestBeforeShardIsCreated() { final var shardId = new ShardId(new Index(randomIdentifier(), randomUUID()), between(0, 3)); - final var shardRouting = TestShardRouting.newShardRouting(shardId, randomUUID(), true, ShardRoutingState.STARTED); - final var indexShardRoutingTable = new IndexShardRoutingTable.Builder(shardId).addShard(shardRouting).build(); + final var indexShardRoutingTable = createShardRoutingTableWithPrimaryAndSearchShards(shardId, true); final var request = new UnpromotableShardRefreshRequest( indexShardRoutingTable, @@ -64,8 +105,6 @@ public void testRespondOKToRefreshRequestBeforeShardIsCreated() { randomBoolean() ); - final TransportService transportService = mock(TransportService.class); - when(transportService.getThreadPool()).thenReturn(threadPool); final IndicesService indicesService = mock(IndicesService.class); if (randomBoolean()) { when(indicesService.indexService(shardId.getIndex())).thenReturn(null); @@ -75,16 +114,206 @@ public void testRespondOKToRefreshRequestBeforeShardIsCreated() { when(indexService.hasShard(shardId.id())).thenReturn(false); } - final var action = new TransportUnpromotableShardRefreshAction( + // Register the action + new TransportUnpromotableShardRefreshAction( clusterService, transportService, mock(ShardStateAction.class), - mock(ActionFilters.class), - indicesService + new ActionFilters(Set.of()), + indicesService, + mock(ThreadPool.class) ); final PlainActionFuture future = new PlainActionFuture<>(); - action.unpromotableShardOperation(mock(Task.class), request, future); + transportService.sendRequest(localNode, TransportUnpromotableShardRefreshAction.NAME, request, expectSuccess(future::onResponse)); + assertThat(safeGet(future), sameInstance(ActionResponse.Empty.INSTANCE)); + } + + public void testActionWaitsUntilIndexRefreshBlocksAreCleared() { + final var shardId = new ShardId(new Index(randomIdentifier(), randomUUID()), between(0, 3)); + final var withSearchShards = randomBoolean(); + final var indexShardRoutingTable = createShardRoutingTableWithPrimaryAndSearchShards(shardId, withSearchShards); + + final var indicesService = mock(IndicesService.class); + final var unpromotableShardOperationExecuted = new AtomicBoolean(false); + final var waitForBlocks = randomBoolean(); + // Register the action + new TransportUnpromotableShardRefreshAction( + clusterService, + transportService, + mock(ShardStateAction.class), + new ActionFilters(Set.of()), + indicesService, + threadPool, + waitForBlocks + ) { + @Override + protected void unpromotableShardOperation( + Task task, + UnpromotableShardRefreshRequest request, + ActionListener responseListener + ) { + unpromotableShardOperationExecuted.set(true); + ActionListener.completeWith(responseListener, () -> ActionResponse.Empty.INSTANCE); + } + }; + + var withRefreshBlock = randomBoolean(); + if (withRefreshBlock) { + setState( + clusterService, + ClusterState.builder(clusterService.state()) + .blocks(ClusterBlocks.builder().addIndexBlock(shardId.getIndexName(), IndexMetadata.INDEX_REFRESH_BLOCK)) + ); + } + + final var future = new PlainActionFuture(); + final var request = new UnpromotableShardRefreshRequest( + indexShardRoutingTable, + randomNonNegativeLong(), + randomNonNegativeLong(), + randomBoolean(), + // Ensure that the request doesn't timeout + TimeValue.timeValueSeconds(15) + ); + transportService.sendRequest(localNode, TransportUnpromotableShardRefreshAction.NAME, request, expectSuccess(future::onResponse)); + + // If the index is not blocked for refreshes, or if the node is not configured to wait for blocked refreshes, + // the action should return a response immediately. + if (withRefreshBlock && waitForBlocks) { + assertThat(future.isDone(), is(false)); + assertThat(unpromotableShardOperationExecuted.get(), is(false)); + + if (randomBoolean()) { + setState(clusterService, ClusterState.builder(clusterService.state()).version(clusterService.state().version() + 1)); + assertThat(future.isDone(), is(false)); + assertThat(unpromotableShardOperationExecuted.get(), is(false)); + } + + setState( + clusterService, + ClusterState.builder(clusterService.state()) + .blocks(ClusterBlocks.builder().removeIndexBlock(shardId.getIndexName(), IndexMetadata.INDEX_REFRESH_BLOCK)) + ); + } + assertThat(safeGet(future), sameInstance(ActionResponse.Empty.INSTANCE)); + assertThat(unpromotableShardOperationExecuted.get(), is(withSearchShards)); + } + + public void testActionWaitsUntilShardRefreshBlocksAreClearedMightTimeout() { + final var shardId = new ShardId(new Index(randomIdentifier(), randomUUID()), between(0, 3)); + final var indexShardRoutingTable = createShardRoutingTableWithPrimaryAndSearchShards(shardId, true); + + final IndicesService indicesService = mock(IndicesService.class); + // Register the action + new TransportUnpromotableShardRefreshAction( + clusterService, + transportService, + mock(ShardStateAction.class), + new ActionFilters(Set.of()), + indicesService, + threadPool, + true + ) { + @Override + protected void unpromotableShardOperation( + Task task, + UnpromotableShardRefreshRequest request, + ActionListener responseListener + ) { + assert false : "Unexpected call"; + throw new AssertionError("Unexpected call"); + } + }; + + setState( + clusterService, + ClusterState.builder(clusterService.state()) + .blocks(ClusterBlocks.builder().addIndexBlock(shardId.getIndexName(), IndexMetadata.INDEX_REFRESH_BLOCK)) + ); + + final var countDownLatch = new CountDownLatch(1); + final var request = new UnpromotableShardRefreshRequest( + indexShardRoutingTable, + randomNonNegativeLong(), + randomNonNegativeLong(), + randomBoolean(), + TimeValue.timeValueSeconds(5) + ); + transportService.sendRequest(localNode, TransportUnpromotableShardRefreshAction.NAME, request, expectError(e -> { + assertThat(e.getCause(), instanceOf(ElasticsearchTimeoutException.class)); + assertThat(e.getCause().getMessage(), containsString("shard refresh timed out waiting for index block to be removed")); + assertThat(e.getRootCause(), instanceOf(ClusterBlockException.class)); + countDownLatch.countDown(); + })); + + assertThat(countDownLatch.getCount(), is(equalTo(1L))); + + if (randomBoolean()) { + setState(clusterService, ClusterState.builder(clusterService.state()).version(clusterService.state().version() + 1)); + assertThat(countDownLatch.getCount(), is(equalTo(1L))); + } + + safeAwait(countDownLatch); + } + + private IndexShardRoutingTable createShardRoutingTableWithPrimaryAndSearchShards(ShardId shardId, boolean withSearchShards) { + final var shardRouting = TestShardRouting.newShardRouting( + shardId, + randomUUID(), + true, + ShardRoutingState.STARTED, + ShardRouting.Role.INDEX_ONLY + ); + final var indexShardRoutingTableBuilder = new IndexShardRoutingTable.Builder(shardId).addShard(shardRouting); + + if (withSearchShards) { + final var unpromotableShardRouting = TestShardRouting.newShardRouting( + shardId, + localNode.getId(), + false, + ShardRoutingState.INITIALIZING, + ShardRouting.Role.SEARCH_ONLY + ); + indexShardRoutingTableBuilder.addShard(unpromotableShardRouting); + } + return indexShardRoutingTableBuilder.build(); + } + + private TransportResponseHandler expectSuccess(Consumer onResponse) { + return responseHandler(onResponse, ESTestCase::fail); + } + + private TransportResponseHandler expectError(Consumer onException) { + return responseHandler(r -> { assert false : r; }, onException); + } + + private TransportResponseHandler responseHandler( + Consumer onResponse, + Consumer onException + ) { + return new TransportResponseHandler<>() { + + @Override + public ActionResponse.Empty read(StreamInput in) { + return ActionResponse.Empty.INSTANCE; + } + + @Override + public Executor executor() { + return TransportResponseHandler.TRANSPORT_WORKER; + } + + @Override + public void handleResponse(ActionResponse.Empty response) { + onResponse.accept(response); + } + + @Override + public void handleException(TransportException exp) { + onException.accept(exp); + } + }; } } diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/PostWriteRefreshTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/PostWriteRefreshTests.java index 4337d4c3d9e99..9897fe9a42547 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/PostWriteRefreshTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/PostWriteRefreshTests.java @@ -162,7 +162,7 @@ public void testPrimaryWithUnpromotables() throws IOException { new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "message"), ShardRouting.Role.SEARCH_ONLY ); - when(routingTable.unpromotableShards()).thenReturn(List.of(shardRouting)); + when(routingTable.allUnpromotableShards()).thenReturn(List.of(shardRouting)); when(routingTable.shardId()).thenReturn(shardId); WriteRequest.RefreshPolicy policy = randomFrom(WriteRequest.RefreshPolicy.IMMEDIATE, WriteRequest.RefreshPolicy.WAIT_UNTIL); postWriteRefresh.refreshShard(policy, primary, result.getTranslogLocation(), f, postWriteRefreshTimeout); @@ -238,9 +238,9 @@ public void testWaitForWithNullLocationCompletedImmediately() throws IOException ); // Randomly test scenarios with and without unpromotables if (randomBoolean()) { - when(routingTable.unpromotableShards()).thenReturn(Collections.emptyList()); + when(routingTable.allUnpromotableShards()).thenReturn(Collections.emptyList()); } else { - when(routingTable.unpromotableShards()).thenReturn(List.of(shardRouting)); + when(routingTable.allUnpromotableShards()).thenReturn(List.of(shardRouting)); } WriteRequest.RefreshPolicy policy = WriteRequest.RefreshPolicy.WAIT_UNTIL; postWriteRefresh.refreshShard(policy, primary, null, f, postWriteRefreshTimeout); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java index 04725eedcdfdb..6d3fe05620f42 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java @@ -128,6 +128,16 @@ public static ShardRouting newShardRouting(String index, int shardId, String cur } public static ShardRouting newShardRouting(ShardId shardId, String currentNodeId, boolean primary, ShardRoutingState state) { + return newShardRouting(shardId, currentNodeId, primary, state, ShardRouting.Role.DEFAULT); + } + + public static ShardRouting newShardRouting( + ShardId shardId, + String currentNodeId, + boolean primary, + ShardRoutingState state, + ShardRouting.Role role + ) { assertNotEquals(ShardRoutingState.RELOCATING, state); return new ShardRouting( shardId, @@ -140,7 +150,7 @@ public static ShardRouting newShardRouting(ShardId shardId, String currentNodeId buildRelocationFailureInfo(state), buildAllocationId(state), ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE, - ShardRouting.Role.DEFAULT + role ); } From ee0ad557e63a69f62dbe6571ddce8ac0dba5df83 Mon Sep 17 00:00:00 2001 From: Charlotte Hoblik <116336412+charlotte-hoblik@users.noreply.github.com> Date: Tue, 28 Jan 2025 10:54:20 +0100 Subject: [PATCH 103/383] Fix typo in tutorial (#120928) --- docs/reference/quickstart/full-text-filtering-tutorial.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/quickstart/full-text-filtering-tutorial.asciidoc b/docs/reference/quickstart/full-text-filtering-tutorial.asciidoc index b602ee5076434..05c48813c110f 100644 --- a/docs/reference/quickstart/full-text-filtering-tutorial.asciidoc +++ b/docs/reference/quickstart/full-text-filtering-tutorial.asciidoc @@ -107,7 +107,7 @@ PUT /cooking_blog/_mapping <1> The `standard` analyzer is used by default for `text` fields if an `analyzer` isn't specified. It's included here for demonstration purposes. <2> <> are used here to index `text` fields as both `text` and `keyword` <>. This enables both full-text search and exact matching/filtering on the same field. Note that if you used <>, these multi-fields would be created automatically. -<3> The <> prevents indexing values longer than 256 characters in the `keyword` field. Again this is the default value, but it's included here for for demonstration purposes. +<3> The <> prevents indexing values longer than 256 characters in the `keyword` field. Again this is the default value, but it's included here for demonstration purposes. It helps to save disk space and avoid potential issues with Lucene's term byte-length limit. [TIP] From e18baa12faccadcc4000a1984d1405af91ceb6e4 Mon Sep 17 00:00:00 2001 From: Sylvain Morin <56630013+sylvain-morin@users.noreply.github.com> Date: Tue, 28 Jan 2025 10:56:33 +0100 Subject: [PATCH 104/383] Minor fix in documentation (#119385) Co-authored-by: Iraklis Psaroudakis --- docs/reference/setup/advanced-configuration.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/setup/advanced-configuration.asciidoc b/docs/reference/setup/advanced-configuration.asciidoc index ff80b51f0408b..0c60b1893a860 100644 --- a/docs/reference/setup/advanced-configuration.asciidoc +++ b/docs/reference/setup/advanced-configuration.asciidoc @@ -53,7 +53,7 @@ or a range of versions followed by a colon. + To apply a setting to a specific version and any later versions, omit the upper bound of the range. -For example, this setting applies to Java 8 and later: +For example, this setting applies to Java 17 and later: + [source,text] ------------------------------------- From 7837a96ce5115220274d3b80fd29969f8ab46c46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Tue, 28 Jan 2025 11:02:28 +0100 Subject: [PATCH 105/383] [DOCS] Adds EIS reference docs (#120706) --- .../inference/elastic-infer-service.asciidoc | 124 ++++++++++++++++++ .../inference/inference-apis.asciidoc | 1 + .../inference/put-inference.asciidoc | 1 + 3 files changed, 126 insertions(+) create mode 100644 docs/reference/inference/elastic-infer-service.asciidoc diff --git a/docs/reference/inference/elastic-infer-service.asciidoc b/docs/reference/inference/elastic-infer-service.asciidoc new file mode 100644 index 0000000000000..f78bfa967cceb --- /dev/null +++ b/docs/reference/inference/elastic-infer-service.asciidoc @@ -0,0 +1,124 @@ +[[infer-service-elastic]] +=== Elastic {infer-cap} Service (EIS) + +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-inference[{infer-cap} APIs]. +-- + +Creates an {infer} endpoint to perform an {infer} task with the `elastic` service. + + +[discrete] +[[infer-service-elastic-api-request]] +==== {api-request-title} + + +`PUT /_inference//` + +[discrete] +[[infer-service-elastic-api-path-params]] +==== {api-path-parms-title} + + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=inference-id] + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=task-type] ++ +-- +Available task types: + +* `chat_completion`, +* `sparse_embedding`. +-- + +[NOTE] +==== +The `chat_completion` task type only supports streaming and only through the `_unified` API. + +include::inference-shared.asciidoc[tag=chat-completion-docs] +==== + +[discrete] +[[infer-service-elastic-api-request-body]] +==== {api-request-body-title} + + +`max_chunking_size`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] + +`overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-overlap] + +`sentence_overlap`::: +(Optional, integer) +include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap] + +`strategy`::: +(Optional, string) +include::inference-shared.asciidoc[tag=chunking-settings-strategy] + +`service`:: +(Required, string) +The type of service supported for the specified task type. In this case, +`elastic`. + +`service_settings`:: +(Required, object) +include::inference-shared.asciidoc[tag=service-settings] + +`model_id`::: +(Required, string) +The name of the model to use for the {infer} task. + +`rate_limit`::: +(Optional, object) +By default, the `elastic` service sets the number of requests allowed per minute to `1000` in case of `sparse_embedding` and `240` in case of `chat_completion`. +This helps to minimize the number of rate limit errors returned. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +-- +include::inference-shared.asciidoc[tag=request-per-minute-example] +-- + + +[discrete] +[[inference-example-elastic]] +==== Elastic {infer-cap} Service example + + +The following example shows how to create an {infer} endpoint called `elser-model-eis` to perform a `text_embedding` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/sparse_embedding/elser-model-eis +{ + "service": "elastic", + "service_settings": { + "model_name": "elser" + } +} + +------------------------------------------------------------ +// TEST[skip:TBD] + +The following example shows how to create an {infer} endpoint called `chat-completion-endpoint` to perform a `chat_completion` task type. + +[source,console] +------------------------------------------------------------ +PUT /_inference/chat_completion/chat-completion-endpoint +{ + "service": "elastic", + "service_settings": { + "model_id": "model-1" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] \ No newline at end of file diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index 6c97f388788f7..aa1d54de60391 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -136,6 +136,7 @@ include::chat-completion-inference.asciidoc[] include::put-inference.asciidoc[] include::stream-inference.asciidoc[] include::update-inference.asciidoc[] +include::elastic-infer-service.asciidoc[] include::service-alibabacloud-ai-search.asciidoc[] include::service-amazon-bedrock.asciidoc[] include::service-anthropic.asciidoc[] diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 4e149667d6298..6e33619c11e59 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -59,6 +59,7 @@ The create {infer} API enables you to create an {infer} endpoint and configure a * Avoid creating multiple endpoints for the same model unless required, as each endpoint consumes significant resources. ==== +You can create an {infer} endpoint that uses the <> to perform {infer} tasks as a service without the need of deploying a model in your environment. The following integrations are available through the {infer} API. You can find the available task types next to the integration name. From 636e3645acedde0ff3e19b643fc84ccfb63a582c Mon Sep 17 00:00:00 2001 From: Sean Story Date: Tue, 28 Jan 2025 04:09:15 -0600 Subject: [PATCH 106/383] Clarify need to submit for authorization (#119460) --- .../reference/connector/docs/connectors-box.asciidoc | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/reference/connector/docs/connectors-box.asciidoc b/docs/reference/connector/docs/connectors-box.asciidoc index 3e95f15d16ccd..265ccd52059f9 100644 --- a/docs/reference/connector/docs/connectors-box.asciidoc +++ b/docs/reference/connector/docs/connectors-box.asciidoc @@ -105,7 +105,7 @@ Save the refresh token from the response. You'll need this for the connector con * "Write all files and folders stored in Box" in Application Scopes * "Make API calls using the as-user header" in Advanced Features 3. Select `App + Enterprise Access` in App Access Level. -4. Authorize your application from the admin console. +4. Authorize your application from the admin console. If you do not have permission, you may need to submit the application for authorization. Save the *Client Credentials* and *Enterprise ID*. You'll need these to configure the connector. [discrete#es-connectors-box-configuration] @@ -121,7 +121,7 @@ The Client ID to authenticate with Box instance. The Client Secret to authenticate with Box instance. `Refresh Token` (required if Box Account is Box Free):: -The Refresh Token to generate Access Token. +The Refresh Token to generate Access Token. *NOTE:* If the process terminates, you'll need to generate a new refresh token. `Enterprise ID` (required if Box Account is Box Enterprise):: @@ -179,7 +179,7 @@ See <>. See <>. -// Closing the collapsible section +// Closing the collapsible section =============== @@ -275,7 +275,7 @@ Save the refresh token from the response. You'll need this for the connector con * "Write all files and folders stored in Box" in Application Scopes * "Make API calls using the as-user header" in Advanced Features 3. Select `App + Enterprise Access` in App Access Level. -4. Authorize your application from the admin console. +4. Authorize your application from the admin console. If you do not have permission, you may need to submit the application for authorization. Save the *Client Credentials* and *Enterprise ID*. You'll need these to configure the connector. [discrete#es-connectors-box-client-configuration] @@ -291,7 +291,7 @@ The Client ID to authenticate with Box instance. The Client Secret to authenticate with Box instance. `Refresh Token` (required if Box Account is Box Free):: -The Refresh Token to generate Access Token. +The Refresh Token to generate Access Token. *NOTE:* If the process terminates, you'll need to generate a new refresh token. `Enterprise ID` (required if Box Account is Box Enterprise):: @@ -375,5 +375,5 @@ See <>. See <>. -// Closing the collapsible section +// Closing the collapsible section =============== From 38b0e925f5467e4b4b5180606b0199a3cf2ae386 Mon Sep 17 00:00:00 2001 From: Pius Fung Date: Tue, 28 Jan 2025 02:10:43 -0800 Subject: [PATCH 107/383] Add warning on scripted metric aggregation's intermediate state memory usage (#119379) --- .../aggregations/metrics/scripted-metric-aggregation.asciidoc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc index 16879450c65d8..4e20d01f32555 100644 --- a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -9,7 +9,9 @@ A metric aggregation that executes using scripts to provide a metric output. WARNING: `scripted_metric` is not available in {serverless-full}. WARNING: Using scripts can result in slower search speeds. See -<>. +<>. When using a scripted metric aggregation, its intermediate state is serialized +into an in-memory byte array for transmission to other nodes during the aggregation process. +Consequently, a complex scripted metric aggregation may also encounter the 2GB limitation imposed on Java arrays. Example: From b94a20e2cd286b149e0a881d7671888bf782ee80 Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Tue, 28 Jan 2025 11:15:25 +0100 Subject: [PATCH 108/383] Reset relocation/allocation failure counter on node join/shutdown (#119968) We prevent retries of allocations/relocations once they see index.allocation.max_retries failed attempts (default 5). In #108987, we added reseting the allocation failure counters when a node joins the cluster. As discussed in the linked discussion, it would make sense to extend this reset also to relocations AND also consider node shutdown events. With this change we reset both allocation/relocation failures if a new node joins the cluster or a shutdown metadata is applied. The subset of shutdown events that we consider and how we track them is more or less copied from what was done for #106998. To me the logic seemed to make sense here too. Closes ES-10492 --- docs/changelog/119968.yaml | 5 + .../allocation/AllocationFailuresResetIT.java | 85 ++++- .../cluster/routing/RoutingNodes.java | 78 ++++- .../routing/allocation/AllocationService.java | 62 +++- .../allocation/allocator/ShardsAllocator.java | 2 +- .../AllocationFailuresResetOnShutdownIT.java | 327 ++++++++++++++++++ 6 files changed, 548 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/119968.yaml create mode 100644 x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/AllocationFailuresResetOnShutdownIT.java diff --git a/docs/changelog/119968.yaml b/docs/changelog/119968.yaml new file mode 100644 index 0000000000000..6d308ae853112 --- /dev/null +++ b/docs/changelog/119968.yaml @@ -0,0 +1,5 @@ +pr: 119968 +summary: Reset relocation/allocation failure counter on node join/shutdown +area: Allocation +type: enhancement +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/AllocationFailuresResetIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/AllocationFailuresResetIT.java index d8834dede2db3..391829c7caddd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/AllocationFailuresResetIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/AllocationFailuresResetIT.java @@ -9,17 +9,29 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.Level; +import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.MockIndexEventListener; +import org.elasticsearch.test.MockLog; import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_PREFIX; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.notNullValue; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class AllocationFailuresResetIT extends ESIntegTestCase { @@ -49,7 +61,7 @@ private void removeAllocationFailuresInjection(String node) { private void awaitShardAllocMaxRetries() throws Exception { var maxRetries = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(internalCluster().getDefaultSettings()); assertBusy(() -> { - var state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); + var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); var index = state.getRoutingTable().index(INDEX); assertNotNull(index); var shard = index.shard(SHARD).primaryShard(); @@ -62,7 +74,7 @@ private void awaitShardAllocMaxRetries() throws Exception { private void awaitShardAllocSucceed() throws Exception { assertBusy(() -> { - var state = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState(); + var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); var index = state.getRoutingTable().index(INDEX); assertNotNull(index); var shard = index.shard(SHARD).primaryShard(); @@ -72,14 +84,77 @@ private void awaitShardAllocSucceed() throws Exception { }); } - public void testResetFailuresOnNodeJoin() throws Exception { + public void testResetAllocationFailuresOnNodeJoin() throws Exception { var node1 = internalCluster().startNode(); injectAllocationFailures(node1); prepareCreate(INDEX, indexSettings(1, 0)).execute(); awaitShardAllocMaxRetries(); removeAllocationFailuresInjection(node1); - internalCluster().startNode(); - awaitShardAllocSucceed(); + try (var mockLog = MockLog.capture(RoutingNodes.class)) { + var shardId = internalCluster().clusterService().state().routingTable().index(INDEX).shard(SHARD).shardId(); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "log resetting failed allocations", + RoutingNodes.class.getName(), + Level.INFO, + Strings.format(RoutingNodes.RESET_FAILED_ALLOCATION_COUNTER_LOG_MSG, 1, List.of(shardId)) + ) + ); + internalCluster().startNode(); + awaitShardAllocSucceed(); + mockLog.assertAllExpectationsMatched(); + } } + public void testResetRelocationFailuresOnNodeJoin() throws Exception { + String node1 = internalCluster().startNode(); + createIndex(INDEX, 1, 0); + ensureGreen(INDEX); + final var failRelocation = new AtomicBoolean(true); + String node2 = internalCluster().startNode(); + internalCluster().getInstance(MockIndexEventListener.TestEventListener.class, node2).setNewDelegate(new IndexEventListener() { + @Override + public void beforeIndexCreated(Index index, Settings indexSettings) { + if (failRelocation.get()) { + throw new RuntimeException("FAIL"); + } + } + }); + updateIndexSettings(Settings.builder().put(INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + "._name", node1), INDEX); + ensureGreen(INDEX); + // await all relocation attempts are exhausted + var maxAttempts = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); + assertBusy(() -> { + var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); + var shard = state.routingTable().index(INDEX).shard(SHARD).primaryShard(); + assertThat(shard, notNullValue()); + assertThat(shard.relocationFailureInfo().failedRelocations(), equalTo(maxAttempts)); + }); + // ensure the shard remain started + var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); + var shard = state.routingTable().index(INDEX).shard(SHARD).primaryShard(); + assertThat(shard, notNullValue()); + assertThat(shard.state(), equalTo(ShardRoutingState.STARTED)); + assertThat(state.nodes().get(shard.currentNodeId()).getName(), equalTo(node1)); + failRelocation.set(false); + // A new node joining should reset the counter and allow more relocation retries + try (var mockLog = MockLog.capture(RoutingNodes.class)) { + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "log resetting failed relocations", + RoutingNodes.class.getName(), + Level.INFO, + Strings.format(RoutingNodes.RESET_FAILED_RELOCATION_COUNTER_LOG_MSG, 1, List.of(shard.shardId())) + ) + ); + internalCluster().startNode(); + assertBusy(() -> { + var stateAfterNodeJoin = internalCluster().clusterService().state(); + var relocatedShard = stateAfterNodeJoin.routingTable().index(INDEX).shard(SHARD).primaryShard(); + assertThat(relocatedShard, notNullValue()); + assertThat(stateAfterNodeJoin.nodes().get(relocatedShard.currentNodeId()).getName(), not(equalTo(node1))); + }); + mockLog.assertAllExpectationsMatched(); + } + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index fb5393c1961f8..3f69ff8a5dc73 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -16,14 +16,19 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus; import org.elasticsearch.cluster.routing.allocation.ExistingShardsAllocator; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceMetrics; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import java.util.ArrayDeque; import java.util.ArrayList; @@ -44,6 +49,8 @@ import java.util.stream.Stream; import java.util.stream.StreamSupport; +import static org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY; + /** * {@link RoutingNodes} represents a copy the routing information contained in the {@link ClusterState cluster state}. * It can be either initialized as mutable or immutable allowing or disallowing changes to its elements. @@ -60,6 +67,13 @@ */ public class RoutingNodes implements Iterable { + private static final Logger logger = LogManager.getLogger(RoutingNodes.class); + public static final String RESET_FAILED_ALLOCATION_COUNTER_LOG_MSG = + "Resetting failure counter for %d shard(s) that have reached their max allocation retires (%s)"; + public static final String RESET_FAILED_RELOCATION_COUNTER_LOG_MSG = + "Resetting failure counter for %d shard(s) that have reached their max relocation retries (%s)"; + private static final int MAX_SHARDS_IN_LOG_MSG = 20; + private final Map nodesToShards; private final UnassignedShards unassignedShards; @@ -1298,14 +1312,47 @@ public boolean hasAllocationFailures() { })); } - public void resetFailedCounter(RoutingChangesObserver routingChangesObserver) { + public boolean hasRelocationFailures() { + for (var shardRoutings : assignedShards.values()) { + for (var routing : shardRoutings) { + if (routing.relocationFailureInfo() != null && routing.relocationFailureInfo().failedRelocations() > 0) { + return true; + } + } + } + return false; + } + + public void resetFailedCounter(RoutingAllocation allocation) { + final var observer = allocation.changes(); + int shardsWithMaxFailedAllocations = 0; + int shardsWithMaxFailedRelocations = 0; + List topShardIdsWithFailedAllocations = new ArrayList<>(); + List topShardIdsWithFailedRelocations = new ArrayList<>(); + final var unassignedIterator = unassigned().iterator(); while (unassignedIterator.hasNext()) { ShardRouting shardRouting = unassignedIterator.next(); UnassignedInfo unassignedInfo = shardRouting.unassignedInfo(); + int failedAllocations = unassignedInfo.failedAllocations(); + if (failedAllocations > 0) { + try { + final var maxRetry = SETTING_ALLOCATION_MAX_RETRY.get( + allocation.metadata().getIndexSafe(shardRouting.index()).getSettings() + ); + if (failedAllocations >= maxRetry) { + shardsWithMaxFailedAllocations++; + if (topShardIdsWithFailedAllocations.size() <= MAX_SHARDS_IN_LOG_MSG) { + topShardIdsWithFailedAllocations.add(shardRouting.shardId()); + } + } + } catch (IndexNotFoundException e) { + // ignore + } + } unassignedIterator.updateUnassigned( new UnassignedInfo( - unassignedInfo.failedAllocations() > 0 ? UnassignedInfo.Reason.MANUAL_ALLOCATION : unassignedInfo.reason(), + failedAllocations > 0 ? UnassignedInfo.Reason.MANUAL_ALLOCATION : unassignedInfo.reason(), unassignedInfo.message(), unassignedInfo.failure(), 0, @@ -1317,7 +1364,7 @@ public void resetFailedCounter(RoutingChangesObserver routingChangesObserver) { unassignedInfo.lastAllocatedNodeId() ), shardRouting.recoverySource(), - routingChangesObserver + observer ); } @@ -1326,6 +1373,20 @@ public void resetFailedCounter(RoutingChangesObserver routingChangesObserver) { for (ShardRouting shardRouting : routingNode) { if (shardRouting.relocationFailureInfo() != null && shardRouting.relocationFailureInfo().failedRelocations() > 0) { shardsWithRelocationFailures.add(shardRouting); + try { + int failedRelocations = shardRouting.relocationFailureInfo().failedRelocations(); + final var maxRetry = SETTING_ALLOCATION_MAX_RETRY.get( + allocation.metadata().getIndexSafe(shardRouting.index()).getSettings() + ); + if (failedRelocations >= maxRetry) { + shardsWithMaxFailedRelocations++; + if (topShardIdsWithFailedRelocations.size() <= MAX_SHARDS_IN_LOG_MSG) { + topShardIdsWithFailedRelocations.add(shardRouting.shardId()); + } + } + } catch (IndexNotFoundException e) { + // ignore + } } } @@ -1336,6 +1397,17 @@ public void resetFailedCounter(RoutingChangesObserver routingChangesObserver) { assignedShardsAdd(updated); } } + + if (shardsWithMaxFailedAllocations > 0) { + logger.info( + Strings.format(RESET_FAILED_ALLOCATION_COUNTER_LOG_MSG, shardsWithMaxFailedAllocations, topShardIdsWithFailedAllocations) + ); + } + if (shardsWithMaxFailedRelocations > 0) { + logger.info( + Strings.format(RESET_FAILED_RELOCATION_COUNTER_LOG_MSG, shardsWithMaxFailedRelocations, topShardIdsWithFailedRelocations) + ); + } } /** diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index 2f2fd4ef453f6..be4f54630a0be 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.RestoreInProgress; @@ -19,6 +20,7 @@ import org.elasticsearch.cluster.metadata.AutoExpandReplicas; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata.Type; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; @@ -573,15 +575,71 @@ public void addAllocFailuresResetListenerTo(ClusterService clusterService) { }); clusterService.addListener((changeEvent) -> { - if (changeEvent.nodesAdded() && changeEvent.state().getRoutingNodes().hasAllocationFailures()) { + if (shouldResetAllocationFailures(changeEvent)) { taskQueue.submitTask("reset-allocation-failures", (e) -> { assert MasterService.isPublishFailureException(e); }, null); } }); } + /** + * We should reset allocation/relocation failure count to allow further retries when: + * + * 1. A new node joins the cluster. + * 2. A node shutdown metadata is added that could lead to a node being removed or replaced in the cluster. + * + * Note that removing a non-RESTART shutdown metadata from a node that is still in the cluster is treated similarly and + * will cause resetting the allocation/relocation failures. + */ + private boolean shouldResetAllocationFailures(ClusterChangedEvent changeEvent) { + final var clusterState = changeEvent.state(); + + if (clusterState.getRoutingNodes().hasAllocationFailures() == false + && clusterState.getRoutingNodes().hasRelocationFailures() == false) { + return false; + } + if (changeEvent.nodesAdded()) { + return true; + } + + final var currentNodeShutdowns = clusterState.metadata().nodeShutdowns(); + final var previousNodeShutdowns = changeEvent.previousState().metadata().nodeShutdowns(); + + if (currentNodeShutdowns.equals(previousNodeShutdowns)) { + return false; + } + + for (var currentShutdown : currentNodeShutdowns.getAll().entrySet()) { + var previousNodeShutdown = previousNodeShutdowns.get(currentShutdown.getKey()); + if (currentShutdown.equals(previousNodeShutdown)) { + continue; + } + // A RESTART doesn't necessarily move around shards, so no need to consider it for a reset. + // Furthermore, once the node rejoins after restarting, there will be a reset if necessary. + if (currentShutdown.getValue().getType() == SingleNodeShutdownMetadata.Type.RESTART) { + continue; + } + // A node with no shutdown marker or a RESTART marker receives a non-RESTART shutdown marker + if (previousNodeShutdown == null || previousNodeShutdown.getType() == Type.RESTART) { + return true; + } + } + + for (var previousShutdown : previousNodeShutdowns.getAll().entrySet()) { + var nodeId = previousShutdown.getKey(); + // A non-RESTART marker is removed but the node is still in the cluster. We could re-attempt failed relocations/allocations. + if (currentNodeShutdowns.get(nodeId) == null + && previousShutdown.getValue().getType() != SingleNodeShutdownMetadata.Type.RESTART + && clusterState.nodes().get(nodeId) != null) { + return true; + } + } + + return false; + } + private ClusterState rerouteWithResetFailedCounter(ClusterState clusterState) { RoutingAllocation allocation = createRoutingAllocation(clusterState, currentNanoTime()); - allocation.routingNodes().resetFailedCounter(allocation.changes()); + allocation.routingNodes().resetFailedCounter(allocation); reroute(allocation, routingAllocation -> shardsAllocator.allocate(routingAllocation, ActionListener.noop())); return buildResultAndLogHealthChange(clusterState, allocation, "reroute with reset failed counter"); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocator.java index 095e2c36ef6be..47a277a639c01 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocator.java @@ -62,7 +62,7 @@ default RoutingExplanations execute(RoutingAllocation allocation, AllocationComm try { if (retryFailed) { - allocation.routingNodes().resetFailedCounter(allocation.changes()); + allocation.routingNodes().resetFailedCounter(allocation); } return commands.execute(allocation, explain); } finally { diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/AllocationFailuresResetOnShutdownIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/AllocationFailuresResetOnShutdownIT.java new file mode 100644 index 0000000000000..b6c6843d766f9 --- /dev/null +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/AllocationFailuresResetOnShutdownIT.java @@ -0,0 +1,327 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.shutdown; + +import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.shard.IndexEventListener; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.MockIndexEventListener; +import org.hamcrest.Matchers; + +import java.util.Arrays; +import java.util.Collection; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_PREFIX; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.notNullValue; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +public class AllocationFailuresResetOnShutdownIT extends ESIntegTestCase { + @Override + protected Collection> nodePlugins() { + return Arrays.asList(MockIndexEventListener.TestPlugin.class, ShutdownPlugin.class); + } + + public void testResetRelocationFailuresOnNodeShutdown() throws Exception { + String node1 = internalCluster().startNode(); + createIndex("index1", 1, 0); + ensureGreen("index1"); + final var failRelocation = new AtomicBoolean(true); + String node2 = internalCluster().startNode(); + internalCluster().getInstances(MockIndexEventListener.TestEventListener.class) + .forEach(testEventListener -> testEventListener.setNewDelegate(new IndexEventListener() { + @Override + public void beforeIndexShardCreated(ShardRouting routing, Settings indexSettings) { + if (failRelocation.get()) { + throw new RuntimeException("FAIL"); + } + } + })); + updateIndexSettings(Settings.builder().put(INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + "._name", node1), "index1"); + ensureGreen("index1"); + // await all relocation attempts are exhausted + var maxAttempts = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); + assertBusy(() -> { + var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); + var shard = state.routingTable().index("index1").shard(0).primaryShard(); + assertThat(shard, notNullValue()); + assertThat(shard.relocationFailureInfo().failedRelocations(), equalTo(maxAttempts)); + }); + // ensure the shard remain started + var stateAfterFailures = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); + var shardAfterFailures = stateAfterFailures.routingTable().index("index1").shard(0).primaryShard(); + assertThat(shardAfterFailures, notNullValue()); + assertThat(shardAfterFailures.state(), equalTo(ShardRoutingState.STARTED)); + assertThat(stateAfterFailures.nodes().get(shardAfterFailures.currentNodeId()).getName(), equalTo(node1)); + failRelocation.set(false); + if (randomBoolean()) { + // A RESTART marker shouldn't cause a reset of failures + final var request = createRequest(SingleNodeShutdownMetadata.Type.RESTART, node1, null); + safeGet(client().execute(PutShutdownNodeAction.INSTANCE, request)); + var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); + var shard = state.routingTable().index("index1").shard(0).primaryShard(); + assertThat(shard, notNullValue()); + assertThat(shard.relocationFailureInfo().failedRelocations(), equalTo(maxAttempts)); + assertThat(state.nodes().get(shard.currentNodeId()).getName(), equalTo(node1)); + } + // A non-RESTART node shutdown should reset the counter and allow more relocation retries + final var request = createRequest( + randomFrom( + SingleNodeShutdownMetadata.Type.REPLACE, + SingleNodeShutdownMetadata.Type.SIGTERM, + SingleNodeShutdownMetadata.Type.REMOVE + ), + node1, + node2 + ); + safeGet(client().execute(PutShutdownNodeAction.INSTANCE, request)); + assertBusy(() -> { + var stateAfterNodeJoin = internalCluster().clusterService().state(); + var relocatedShard = stateAfterNodeJoin.routingTable().index("index1").shard(0).primaryShard(); + assertThat(relocatedShard.relocationFailureInfo().failedRelocations(), Matchers.lessThan(maxAttempts)); + assertThat(relocatedShard, notNullValue()); + assertThat(stateAfterNodeJoin.nodes().get(relocatedShard.currentNodeId()).getName(), not(equalTo(node1))); + }); + } + + public void testResetRelocationFailuresOnNodeShutdownRemovalOfExistingNode() throws Exception { + String node1 = internalCluster().startNode(); + createIndex("index1", 1, 0); + ensureGreen("index1"); + final var failRelocation = new AtomicBoolean(true); + String node2 = internalCluster().startNode(); + internalCluster().startNode(); + internalCluster().getInstances(MockIndexEventListener.TestEventListener.class) + .forEach(testEventListener -> testEventListener.setNewDelegate(new IndexEventListener() { + @Override + public void beforeIndexShardCreated(ShardRouting routing, Settings indexSettings) { + if (failRelocation.get()) { + throw new RuntimeException("FAIL"); + } + } + })); + // add shutdown to the new node + final var request = createRequest(randomFrom(SingleNodeShutdownMetadata.Type.values()), node2, randomIdentifier()); + safeGet(client().execute(PutShutdownNodeAction.INSTANCE, request)); + + updateIndexSettings(Settings.builder().put(INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + "._name", node1), "index1"); + ensureGreen("index1"); + // await all relocation attempts are exhausted + var maxAttempts = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); + assertBusy(() -> { + var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); + var shard = state.routingTable().index("index1").shard(0).primaryShard(); + assertThat(shard, notNullValue()); + assertThat(shard.relocationFailureInfo().failedRelocations(), equalTo(maxAttempts)); + }); + // ensure the shard remain started + var stateAfterFailures = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); + var shardAfterFailures = stateAfterFailures.routingTable().index("index1").shard(0).primaryShard(); + assertThat(shardAfterFailures, notNullValue()); + assertThat(shardAfterFailures.state(), equalTo(ShardRoutingState.STARTED)); + assertThat(stateAfterFailures.nodes().get(shardAfterFailures.currentNodeId()).getName(), equalTo(node1)); + failRelocation.set(false); + // Removing the non-RESTART shutdown marker should reset the counter and allow more relocation retries + safeGet( + client().execute( + DeleteShutdownNodeAction.INSTANCE, + new DeleteShutdownNodeAction.Request(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, getNodeId(node2)) + ) + ); + if (request.getType() != SingleNodeShutdownMetadata.Type.RESTART) { + assertBusy(() -> { + var stateAfterNodeJoin = internalCluster().clusterService().state(); + var relocatedShard = stateAfterNodeJoin.routingTable().index("index1").shard(0).primaryShard(); + assertThat(relocatedShard.relocationFailureInfo().failedRelocations(), Matchers.lessThan(maxAttempts)); + assertThat(relocatedShard, notNullValue()); + assertThat(stateAfterNodeJoin.nodes().get(relocatedShard.currentNodeId()).getName(), not(equalTo(node1))); + }); + } else { + var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); + var shard = state.routingTable().index("index1").shard(0).primaryShard(); + assertThat(shard, notNullValue()); + assertThat(shard.relocationFailureInfo().failedRelocations(), equalTo(maxAttempts)); + assertThat(state.nodes().get(shard.currentNodeId()).getName(), equalTo(node1)); + } + } + + public void testResetAllocationFailuresOnNodeShutdown() throws Exception { + String node1 = internalCluster().startNode(); + String node2 = internalCluster().startNode(); + internalCluster().startNode(); + + final var failAllocation = new AtomicBoolean(true); + internalCluster().getInstances(MockIndexEventListener.TestEventListener.class) + .forEach(testEventListener -> testEventListener.setNewDelegate(new IndexEventListener() { + @Override + public void beforeIndexShardCreated(ShardRouting routing, Settings indexSettings) { + if (failAllocation.get()) { + throw new RuntimeException("FAIL"); + } + } + })); + + prepareCreate("index1", indexSettings(1, 0)).execute(); + + // await all relocation attempts are exhausted + var maxAttempts = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); + ensureRed("index1"); + { + var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); + var index = state.getRoutingTable().index("index1"); + assertNotNull(index); + var shard = index.shard(0).primaryShard(); + assertNotNull(shard); + assertNotNull(shard.unassignedInfo()); + assertThat(maxAttempts, equalTo(shard.unassignedInfo().failedAllocations())); + } + + failAllocation.set(false); + + if (randomBoolean()) { + // A RESTART marker shouldn't cause a reset of failures + final var request = createRequest(randomFrom(SingleNodeShutdownMetadata.Type.RESTART), node1, null); + safeGet(client().execute(PutShutdownNodeAction.INSTANCE, request)); + ensureRed("index1"); + var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); + var shard = state.routingTable().index("index1").shard(0).primaryShard(); + assertThat(shard, notNullValue()); + assertNotNull(shard.unassignedInfo()); + assertThat(shard.unassignedInfo().failedAllocations(), equalTo(maxAttempts)); + } + + // A non-RESTART node shutdown should reset the counter and allow more relocation retries + final var request = createRequest( + randomFrom( + SingleNodeShutdownMetadata.Type.REPLACE, + SingleNodeShutdownMetadata.Type.SIGTERM, + SingleNodeShutdownMetadata.Type.REMOVE + ), + node1, + node2 + ); + safeGet(client().execute(PutShutdownNodeAction.INSTANCE, request)); + ensureGreen("index1"); + } + + public void testResetAllocationFailuresOnNodeShutdownRemovalOfExistingNode() throws Exception { + String node1 = internalCluster().startNode(); + String node2 = internalCluster().startNode(); + internalCluster().startNode(); + + final var failAllocation = new AtomicBoolean(true); + internalCluster().getInstances(MockIndexEventListener.TestEventListener.class) + .forEach(testEventListener -> testEventListener.setNewDelegate(new IndexEventListener() { + @Override + public void beforeIndexShardCreated(ShardRouting routing, Settings indexSettings) { + if (failAllocation.get()) { + throw new RuntimeException("FAIL"); + } + } + })); + + final var request = createRequest(randomFrom(SingleNodeShutdownMetadata.Type.values()), node1, node2); + safeGet(client().execute(PutShutdownNodeAction.INSTANCE, request)); + + prepareCreate("index1", indexSettings(1, 0)).execute(); + + // await all allocation attempts are exhausted + var maxAttempts = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); + ensureRed("index1"); + { + var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); + var shard = state.getRoutingTable().index("index1").shard(0).primaryShard(); + assertNotNull(shard); + assertNotNull(shard.unassignedInfo()); + assertThat(maxAttempts, equalTo(shard.unassignedInfo().failedAllocations())); + } + + failAllocation.set(false); + + // A none-RESTART node shutdown should reset the counter and allow more allocation retries + safeGet( + client().execute( + DeleteShutdownNodeAction.INSTANCE, + new DeleteShutdownNodeAction.Request(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, getNodeId(node1)) + ) + ); + + if (request.getType() != SingleNodeShutdownMetadata.Type.RESTART) { + ensureGreen("index1"); + } else { + ensureRed("index1"); + var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState(); + var shard = state.routingTable().index("index1").shard(0).primaryShard(); + assertThat(shard, notNullValue()); + assertNotNull(shard.unassignedInfo()); + assertThat(shard.unassignedInfo().failedAllocations(), equalTo(maxAttempts)); + } + } + + private PutShutdownNodeAction.Request createRequest(SingleNodeShutdownMetadata.Type type, String nodeName, String targetNodeName) { + var nodeId = getNodeId(nodeName); + switch (type) { + case REMOVE -> { + return new PutShutdownNodeAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + nodeId, + SingleNodeShutdownMetadata.Type.REMOVE, + "test", + null, + null, + null + ); + } + case REPLACE -> { + return new PutShutdownNodeAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + nodeId, + SingleNodeShutdownMetadata.Type.REPLACE, + "test", + null, + targetNodeName, + null + ); + } + case RESTART -> { + return new PutShutdownNodeAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + nodeId, + SingleNodeShutdownMetadata.Type.RESTART, + "test", + null, + null, + null + ); + } + case SIGTERM -> { + return new PutShutdownNodeAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + nodeId, + SingleNodeShutdownMetadata.Type.SIGTERM, + "test", + null, + null, + randomTimeValue() + ); + } + default -> throw new AssertionError("unknown shutdown metadata type: " + type); + } + } +} From ddc23625926ad8f1adad976090b0199cb786ca33 Mon Sep 17 00:00:00 2001 From: Roberto Seldner Date: Tue, 28 Jan 2025 03:39:10 -0700 Subject: [PATCH 109/383] Update async-search.asciidoc - Indicating `search.max_async_search_response_size` is a Dynamic (#112758) Indicating `search.max_async_search_response_size` is a Dynamic setting here as it does not appear to be documented elsewhere. --- docs/reference/search/async-search.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/search/async-search.asciidoc b/docs/reference/search/async-search.asciidoc index 9a9e9ca45e817..d397186d255ce 100644 --- a/docs/reference/search/async-search.asciidoc +++ b/docs/reference/search/async-search.asciidoc @@ -143,7 +143,7 @@ nor search requests that only include the <>. WARNING: By default, {es} doesn't allow to store an async search response larger than 10Mb, and an attempt to do this results in an error. The maximum allowed size for a stored async search response can be set by changing the -`search.max_async_search_response_size` cluster level setting. +`search.max_async_search_response_size` (<>) cluster level setting. [[get-async-search]] ==== Get async search From 453db3fd7121472a056a83a5d41c559e3ee6cfe8 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 28 Jan 2025 11:50:47 +0100 Subject: [PATCH 110/383] Optimize InternalAggregations construction a little (#120868) We can streamline and optimize this logic a little to see less copying and more compact results. --- .../aggregations/TermsReduceBenchmark.java | 4 +- .../StringTermsSerializationBenchmark.java | 4 +- .../histogram/InternalAutoDateHistogram.java | 2 +- .../bucket/timeseries/InternalTimeSeries.java | 2 +- .../DerivativePipelineAggregator.java | 17 +++--- .../pipeline/MovFnPipelineAggregator.java | 13 ++-- .../aggregations/InternalAggregations.java | 61 +++++++++++++------ .../histogram/InternalDateHistogram.java | 2 +- .../bucket/histogram/InternalHistogram.java | 5 +- .../BucketScriptPipelineAggregator.java | 12 ++-- .../CumulativeSumPipelineAggregator.java | 15 +++-- .../SerialDiffPipelineAggregator.java | 12 ++-- .../pipeline/SiblingPipelineAggregator.java | 12 ++-- .../search/SearchResponseUtils.java | 2 +- ...mulativeCardinalityPipelineAggregator.java | 18 +++--- .../MovingPercentilesPipelineAggregator.java | 23 ++++--- .../NormalizePipelineAggregator.java | 16 ++--- .../InferencePipelineAggregator.java | 15 +++-- 18 files changed, 134 insertions(+), 101 deletions(-) diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java index 9fd319f9e9b1c..672f2db7c29e3 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java @@ -111,7 +111,7 @@ public void setup() { dict[i] = new BytesRef(Long.toString(rand.nextLong())); } for (int i = 0; i < numShards; i++) { - aggsList.add(InternalAggregations.from(Collections.singletonList(newTerms(rand, dict, true)))); + aggsList.add(InternalAggregations.from(newTerms(rand, dict, true))); } } @@ -124,7 +124,7 @@ private StringTerms newTerms(Random rand, BytesRef[] dict, boolean withNested) { for (BytesRef term : randomTerms) { InternalAggregations subAggs; if (withNested) { - subAggs = InternalAggregations.from(Collections.singletonList(newTerms(rand, dict, false))); + subAggs = InternalAggregations.from(newTerms(rand, dict, false)); } else { subAggs = InternalAggregations.EMPTY; } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/bucket/terms/StringTermsSerializationBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/bucket/terms/StringTermsSerializationBenchmark.java index 6065dedc8de4f..1e2846f9ba23b 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/bucket/terms/StringTermsSerializationBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/bucket/terms/StringTermsSerializationBenchmark.java @@ -50,13 +50,13 @@ public class StringTermsSerializationBenchmark { @Setup public void initResults() { - results = DelayableWriteable.referencing(InternalAggregations.from(List.of(newTerms(true)))); + results = DelayableWriteable.referencing(InternalAggregations.from(newTerms(true))); } private StringTerms newTerms(boolean withNested) { List resultBuckets = new ArrayList<>(buckets); for (int i = 0; i < buckets; i++) { - InternalAggregations inner = withNested ? InternalAggregations.from(List.of(newTerms(false))) : InternalAggregations.EMPTY; + InternalAggregations inner = withNested ? InternalAggregations.from(newTerms(false)) : InternalAggregations.EMPTY; resultBuckets.add(new StringTerms.Bucket(new BytesRef("test" + i), i, inner, false, 0, DocValueFormat.RAW)); } return new StringTerms( diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java index edb7ec4cffce7..c6fceb330f498 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -414,7 +414,7 @@ private BucketReduceResult addEmptyBuckets(BucketReduceResult current, Aggregati Bucket lastBucket = null; ListIterator iter = list.listIterator(); - InternalAggregations reducedEmptySubAggs = InternalAggregations.reduce(List.of(bucketInfo.emptySubAggregations), reduceContext); + InternalAggregations reducedEmptySubAggs = InternalAggregations.reduce(bucketInfo.emptySubAggregations, reduceContext); // Add the empty buckets within the data, // e.g. if the data series is [1,2,3,7] there're 3 empty buckets that will be created for 4,5,6 diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java index c4669b1c25224..ac472cbdd0c38 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java @@ -222,7 +222,7 @@ public InternalAggregation get() { InternalBucket reducedBucket; if (bucketsWithSameKey.size() == 1) { reducedBucket = bucketsWithSameKey.get(0); - reducedBucket.aggregations = InternalAggregations.reduce(List.of(reducedBucket.aggregations), reduceContext); + reducedBucket.aggregations = InternalAggregations.reduce(reducedBucket.aggregations, reduceContext); } else { reducedBucket = reduceBucket(bucketsWithSameKey, reduceContext); } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregator.java index ed92d6dffd9fb..d9661ccb90b0e 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregator.java @@ -22,8 +22,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; @@ -69,11 +67,16 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe if (xAxisUnits != null) { xDiff = (thisBucketKey.doubleValue() - lastBucketKey.doubleValue()) / xAxisUnits; } - final List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .collect(Collectors.toCollection(ArrayList::new)); - aggs.add(new Derivative(name(), gradient, xDiff, formatter, metadata())); - Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); - newBuckets.add(newBucket); + newBuckets.add( + factory.createBucket( + factory.getKey(bucket), + bucket.getDocCount(), + InternalAggregations.append( + bucket.getAggregations(), + new Derivative(name(), gradient, xDiff, formatter, metadata()) + ) + ) + ); } else { newBuckets.add(bucket); } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/MovFnPipelineAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/MovFnPipelineAggregator.java index 626d9c675af3f..ad21d149e9337 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/MovFnPipelineAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/MovFnPipelineAggregator.java @@ -25,8 +25,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; @@ -117,12 +115,11 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe vars, values.subList(fromIndex, toIndex).stream().mapToDouble(Double::doubleValue).toArray() ); - - List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .map(InternalAggregation.class::cast) - .collect(Collectors.toCollection(ArrayList::new)); - aggs.add(new InternalSimpleValue(name(), result, formatter, metadata())); - newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); + newBucket = factory.createBucket( + factory.getKey(bucket), + bucket.getDocCount(), + InternalAggregations.append(bucket.getAggregations(), new InternalSimpleValue(name(), result, formatter, metadata())) + ); index++; } newBuckets.add(newBucket); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java index 9ed62add775c0..5f81913be32fa 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; @@ -47,7 +48,7 @@ public final class InternalAggregations implements Iterable /** * Constructs a new aggregation. */ - public InternalAggregations(List aggregations) { + private InternalAggregations(List aggregations) { this.aggregations = aggregations; if (aggregations.isEmpty()) { aggregationsAsMap = Map.of(); @@ -70,14 +71,15 @@ public List asList() { } private Map asMap() { - if (aggregationsAsMap == null) { + var res = aggregationsAsMap; + if (res == null) { Map newAggregationsAsMap = Maps.newMapWithExpectedSize(aggregations.size()); for (InternalAggregation aggregation : aggregations) { newAggregationsAsMap.put(aggregation.getName(), aggregation); } - this.aggregationsAsMap = unmodifiableMap(newAggregationsAsMap); + res = this.aggregationsAsMap = unmodifiableMap(newAggregationsAsMap); } - return aggregationsAsMap; + return res; } /** @@ -121,13 +123,27 @@ public XContentBuilder toXContentInternal(XContentBuilder builder, Params params return builder; } + public static InternalAggregations from(InternalAggregation aggregation) { + return new InternalAggregations(List.of(aggregation)); + } + public static InternalAggregations from(List aggregations) { if (aggregations.isEmpty()) { return EMPTY; } + if (aggregations.size() == 1) { + return from(aggregations.getFirst()); + } return new InternalAggregations(aggregations); } + public static InternalAggregations append(InternalAggregations aggs, InternalAggregation toAppend) { + if (aggs.aggregations.isEmpty()) { + return from(toAppend); + } + return new InternalAggregations(CollectionUtils.appendToCopyNoNullElements(aggs.aggregations, toAppend)); + } + public static InternalAggregations readFrom(StreamInput in) throws IOException { return from(in.readNamedWriteableCollectionAsList(InternalAggregation.class)); } @@ -227,19 +243,7 @@ public static InternalAggregations reduce(List aggregation } // handle special case when there is just one aggregation if (aggregationsList.size() == 1) { - final List internalAggregations = aggregationsList.get(0).asList(); - final List reduced = new ArrayList<>(internalAggregations.size()); - for (InternalAggregation aggregation : internalAggregations) { - if (aggregation.mustReduceOnSingleInternalAgg()) { - try (AggregatorReducer aggregatorReducer = aggregation.getReducer(context.forAgg(aggregation.getName()), 1)) { - aggregatorReducer.accept(aggregation); - reduced.add(aggregatorReducer.get()); - } - } else { - reduced.add(aggregation); - } - } - return from(reduced); + return reduce(aggregationsList.getFirst(), context); } // general case try (AggregatorsReducer reducer = new AggregatorsReducer(aggregationsList.get(0), context, aggregationsList.size())) { @@ -250,6 +254,29 @@ public static InternalAggregations reduce(List aggregation } } + public static InternalAggregations reduce(InternalAggregations aggregations, AggregationReduceContext context) { + final List internalAggregations = aggregations.asList(); + int size = internalAggregations.size(); + if (size == 0) { + return EMPTY; + } + boolean noneReduced = true; + final List reduced = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + InternalAggregation aggregation = internalAggregations.get(i); + if (aggregation.mustReduceOnSingleInternalAgg()) { + noneReduced = false; + try (AggregatorReducer aggregatorReducer = aggregation.getReducer(context.forAgg(aggregation.getName()), 1)) { + aggregatorReducer.accept(aggregation); + reduced.add(aggregatorReducer.get()); + } + } else { + reduced.add(aggregation); + } + } + return noneReduced ? aggregations : from(reduced); + } + /** * Finalizes the sampling for all the internal aggregations * @param samplingContext the sampling context diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index d2badbeec4622..8ce7cc7571264 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -377,7 +377,7 @@ public void accept(long key) { iterateEmptyBuckets(list, list.listIterator(), counter); reduceContext.consumeBucketsAndMaybeBreak(counter.size); - InternalAggregations reducedEmptySubAggs = InternalAggregations.reduce(List.of(emptyBucketInfo.subAggregations), reduceContext); + InternalAggregations reducedEmptySubAggs = InternalAggregations.reduce(emptyBucketInfo.subAggregations, reduceContext); ListIterator iter = list.listIterator(); iterateEmptyBuckets(list, iter, new LongConsumer() { private int size = 0; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index cb21507363740..73602ac024b98 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -349,10 +349,7 @@ public void accept(double key) { /* * Now that we're sure we have space we allocate all the buckets. */ - InternalAggregations reducedEmptySubAggs = InternalAggregations.reduce( - Collections.singletonList(emptyBucketInfo.subAggregations), - reduceContext - ); + InternalAggregations reducedEmptySubAggs = InternalAggregations.reduce(emptyBucketInfo.subAggregations, reduceContext); ListIterator iter = list.listIterator(); iterateEmptyBuckets(list, iter, new DoubleConsumer() { private int size; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java index 95709f2787475..224a39bb73d05 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java @@ -22,8 +22,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; @@ -80,13 +78,11 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe if (returned == null) { newBuckets.add(bucket); } else { - final List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .collect(Collectors.toCollection(ArrayList::new)); - - InternalSimpleValue simpleValue = new InternalSimpleValue(name(), returned.doubleValue(), formatter, metadata()); - aggs.add(simpleValue); InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket( - InternalAggregations.from(aggs), + InternalAggregations.append( + bucket.getAggregations(), + new InternalSimpleValue(name(), returned.doubleValue(), formatter, metadata()) + ), bucket ); newBuckets.add(newBucket); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java index 670479ab2f0a5..1b14ae6f5f118 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java @@ -21,8 +21,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; @@ -53,12 +51,13 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe if (thisBucketValue != null && thisBucketValue.isInfinite() == false && thisBucketValue.isNaN() == false) { sum += thisBucketValue; } - - List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .collect(Collectors.toCollection(ArrayList::new)); - aggs.add(new InternalSimpleValue(name(), sum, formatter, metadata())); - Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); - newBuckets.add(newBucket); + newBuckets.add( + factory.createBucket( + factory.getKey(bucket), + bucket.getDocCount(), + InternalAggregations.append(bucket.getAggregations(), new InternalSimpleValue(name(), sum, formatter, metadata())) + ) + ); } return factory.createAggregation(newBuckets); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java index 91c64c87c331d..839d71559885a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java @@ -23,8 +23,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; @@ -84,11 +82,11 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe // Both have values, calculate diff and replace the "empty" bucket if (Double.isNaN(thisBucketValue) == false && Double.isNaN(lagValue) == false) { double diff = thisBucketValue - lagValue; - - List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .collect(Collectors.toCollection(ArrayList::new)); - aggs.add(new InternalSimpleValue(name(), diff, formatter, metadata())); - newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); + newBucket = factory.createBucket( + factory.getKey(bucket), + bucket.getDocCount(), + InternalAggregations.append(bucket.getAggregations(), new InternalSimpleValue(name(), diff, formatter, metadata())) + ); } newBuckets.add(newBucket); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java index 6852356c1e45d..fe38e1fa62942 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java @@ -9,11 +9,11 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; -import java.util.List; import java.util.Map; public abstract class SiblingPipelineAggregator extends PipelineAggregator { @@ -23,11 +23,11 @@ protected SiblingPipelineAggregator(String name, String[] bucketsPaths, Map { - List aggs = aggregations.copyResults(); - aggs.add(doReduce(aggregations, reduceContext)); - return InternalAggregations.from(aggs); - }); + return aggregation.copyWithRewritenBuckets( + aggregations -> InternalAggregations.from( + CollectionUtils.appendToCopyNoNullElements(aggregations.copyResults(), doReduce(aggregations, reduceContext)) + ) + ); } public abstract InternalAggregation doReduce(InternalAggregations aggregations, AggregationReduceContext context); diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index 330058b16a811..21b43636222f9 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -986,7 +986,7 @@ private static InternalAggregations parseInternalAggregations(XContentParser par } } } - return new InternalAggregations(aggregations); + return InternalAggregations.from(aggregations); } private static final InstantiatingObjectParser PROFILE_RESULT_PARSER; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java index e71cedf381886..b4682ecc9be6a 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java @@ -25,8 +25,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; public class CumulativeCardinalityPipelineAggregator extends PipelineAggregator { private final DocValueFormat formatter; @@ -57,12 +55,16 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe hll.merge(0, bucketHll, 0); cardinality = hll.cardinality(0); } - - List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .collect(Collectors.toList()); - aggs.add(new InternalSimpleLongValue(name(), cardinality, formatter, metadata())); - Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); - newBuckets.add(newBucket); + newBuckets.add( + factory.createBucket( + factory.getKey(bucket), + bucket.getDocCount(), + InternalAggregations.append( + bucket.getAggregations(), + new InternalSimpleLongValue(name(), cardinality, formatter, metadata()) + ) + ) + ); } return factory.createAggregation(newBuckets); } finally { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java index 663299df54f8b..15abd65fcc089 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java @@ -27,7 +27,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.stream.Collectors; public class MovingPercentilesPipelineAggregator extends PipelineAggregator { @@ -101,9 +100,14 @@ private void reduceTDigest( } if (state != null) { - List aggs = bucket.getAggregations().asList().stream().collect(Collectors.toList()); - aggs.add(new InternalTDigestPercentiles(name(), config.keys, state, config.keyed, config.formatter, metadata())); - newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); + newBucket = factory.createBucket( + factory.getKey(bucket), + bucket.getDocCount(), + InternalAggregations.append( + bucket.getAggregations(), + new InternalTDigestPercentiles(name(), config.keys, state, config.keyed, config.formatter, metadata()) + ) + ); } newBuckets.add(newBucket); index++; @@ -147,9 +151,14 @@ private void reduceHDR( } if (state != null) { - List aggs = new ArrayList<>(bucket.getAggregations().asList()); - aggs.add(new InternalHDRPercentiles(name(), config.keys, state, config.keyed, config.formatter, metadata())); - newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); + newBucket = factory.createBucket( + factory.getKey(bucket), + bucket.getDocCount(), + InternalAggregations.append( + bucket.getAggregations(), + new InternalHDRPercentiles(name(), config.keys, state, config.keyed, config.formatter, metadata()) + ) + ); } newBuckets.add(newBucket); index++; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java index adb8b691a83ea..a338b8d98d214 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java @@ -21,8 +21,6 @@ import java.util.Map; import java.util.function.DoubleUnaryOperator; import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; @@ -70,11 +68,15 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe normalizedBucketValue = method.applyAsDouble(values[i]); } - List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) - .collect(Collectors.toList()); - aggs.add(new InternalSimpleValue(name(), normalizedBucketValue, formatter, metadata())); - InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(InternalAggregations.from(aggs), bucket); - newBuckets.add(newBucket); + newBuckets.add( + originalAgg.createBucket( + InternalAggregations.append( + bucket.getAggregations(), + new InternalSimpleValue(name(), normalizedBucketValue, formatter, metadata()) + ), + bucket + ) + ); } return originalAgg.create(newBuckets); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregator.java index fd5c66399c72d..14b1aacf549eb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregator.java @@ -100,12 +100,15 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe } catch (Exception e) { inference = new WarningInferenceResults(e.getMessage()); } - - final List aggs = new ArrayList<>(bucket.getAggregations().asList()); - InternalInferenceAggregation aggResult = new InternalInferenceAggregation(name(), metadata(), inference); - aggs.add(aggResult); - InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(InternalAggregations.from(aggs), bucket); - newBuckets.add(newBucket); + newBuckets.add( + originalAgg.createBucket( + InternalAggregations.append( + bucket.getAggregations(), + new InternalInferenceAggregation(name(), metadata(), inference) + ), + bucket + ) + ); } // the model is released at the end of this block. From 7322015761a37c75ebf103c1eae043545b036e91 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Tue, 28 Jan 2025 13:00:27 +0200 Subject: [PATCH 111/383] [Deprecation API] Refactor resource deprecation checkers and add new resources. (#120505) --- docs/changelog/120505.yaml | 5 + .../upgrades/SourceModeRollingUpgradeIT.java | 44 +-- .../org/elasticsearch/TransportVersions.java | 1 + .../core/deprecation/DeprecationIssue.java | 23 +- ...java => DataStreamDeprecationChecker.java} | 55 ++- .../xpack/deprecation/DeprecationChecks.java | 21 +- .../deprecation/DeprecationInfoAction.java | 178 +++++++--- .../IlmPolicyDeprecationChecker.java | 108 ++++++ ...ecks.java => IndexDeprecationChecker.java} | 68 +++- ...LegacyIndexTemplateDeprecationChecker.java | 77 ++++ .../deprecation/LegacyTiersDetection.java | 64 ++++ .../deprecation/NodeDeprecationChecks.java | 42 --- .../ResourceDeprecationChecker.java | 34 ++ .../TemplateDeprecationChecker.java | 139 ++++++++ .../TransportDeprecationInfoAction.java | 14 +- ...=> DataStreamDeprecationCheckerTests.java} | 52 ++- .../DeprecationInfoActionResponseTests.java | 332 ++++++++++++------ .../IlmPolicyDeprecationCheckerTests.java | 151 ++++++++ ...java => IndexDeprecationCheckerTests.java} | 138 +++++--- .../NodeDeprecationChecksTests.java | 44 --- .../TemplateDeprecationCheckerTests.java | 193 ++++++++++ .../test/deprecation/10_basic.yml | 10 + 22 files changed, 1411 insertions(+), 382 deletions(-) create mode 100644 docs/changelog/120505.yaml rename x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/{DataStreamDeprecationChecks.java => DataStreamDeprecationChecker.java} (60%) create mode 100644 x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationChecker.java rename x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/{IndexDeprecationChecks.java => IndexDeprecationChecker.java} (80%) create mode 100644 x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/LegacyIndexTemplateDeprecationChecker.java create mode 100644 x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/LegacyTiersDetection.java create mode 100644 x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ResourceDeprecationChecker.java create mode 100644 x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationChecker.java rename x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/{DataStreamDeprecationChecksTests.java => DataStreamDeprecationCheckerTests.java} (84%) create mode 100644 x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationCheckerTests.java rename x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/{IndexDeprecationChecksTests.java => IndexDeprecationCheckerTests.java} (68%) create mode 100644 x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationCheckerTests.java diff --git a/docs/changelog/120505.yaml b/docs/changelog/120505.yaml new file mode 100644 index 0000000000000..85dc71420c426 --- /dev/null +++ b/docs/changelog/120505.yaml @@ -0,0 +1,5 @@ +pr: 120505 +summary: "introduce new categories for deprecated resources in deprecation API" +area: Indices APIs +type: enhancement +issues: [] diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java index f6a8b86f27bec..98572de6618ee 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java @@ -50,22 +50,9 @@ public void testConfigureStoredSourceBeforeIndexCreationLegacy() throws IOExcept putComponentTemplateRequest.setOptions(expectWarnings(SourceFieldMapper.DEPRECATION_WARNING)); putComponentTemplateRequest.setJsonEntity(storedSourceMapping); assertOK(client().performRequest(putComponentTemplateRequest)); - - var request = new Request("GET", "/_migration/deprecations"); - var nodeSettings = (Map) ((List) entityAsMap(client().performRequest(request)).get("node_settings")).getFirst(); - assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); - assertThat( - (String) nodeSettings.get("details"), - containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + templateName + "]") - ); + assertDeprecationWarningForTemplate(templateName); } else if (isUpgradedCluster()) { - var request = new Request("GET", "/_migration/deprecations"); - var nodeSettings = (Map) ((List) entityAsMap(client().performRequest(request)).get("node_settings")).getFirst(); - assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); - assertThat( - (String) nodeSettings.get("details"), - containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + templateName + "]") - ); + assertDeprecationWarningForTemplate(templateName); } } @@ -87,17 +74,24 @@ public void testConfigureStoredSourceWhenIndexIsCreatedLegacy() throws IOExcepti putComponentTemplateRequest.setOptions(expectWarnings(SourceFieldMapper.DEPRECATION_WARNING)); putComponentTemplateRequest.setJsonEntity(storedSourceMapping); assertOK(client().performRequest(putComponentTemplateRequest)); - - var request = new Request("GET", "/_migration/deprecations"); - var nodeSettings = (Map) ((List) entityAsMap(client().performRequest(request)).get("node_settings")).getFirst(); - assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); - assertThat( - (String) nodeSettings.get("details"), - containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + templateName + "]") - ); + assertDeprecationWarningForTemplate(templateName); } else if (isUpgradedCluster()) { - var request = new Request("GET", "/_migration/deprecations"); - var nodeSettings = (Map) ((List) entityAsMap(client().performRequest(request)).get("node_settings")).getFirst(); + assertDeprecationWarningForTemplate(templateName); + } + } + + private void assertDeprecationWarningForTemplate(String templateName) throws IOException { + var request = new Request("GET", "/_migration/deprecations"); + var response = entityAsMap(client().performRequest(request)); + if (response.containsKey("templates")) { + // Check the newer version of the deprecation API that contains the templates section + Map issuesByTemplate = (Map) response.get("templates"); + assertThat(issuesByTemplate.containsKey(templateName), equalTo(true)); + var templateIssues = (List) issuesByTemplate.get(templateName); + assertThat(((Map) templateIssues.getFirst()).get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); + } else { + // Bwc version with 8.18 until https://github.com/elastic/elasticsearch/pull/120505/ gets backported, clean up after backport + var nodeSettings = (Map) ((List) response.get("node_settings")).getFirst(); assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); assertThat( (String) nodeSettings.get("details"), diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 981221a1406cf..05c2071ad8d5f 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -167,6 +167,7 @@ static TransportVersion def(int id) { public static final TransportVersion RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN = def(8_833_00_0); public static final TransportVersion ILM_ADD_SEARCHABLE_SNAPSHOT_ADD_REPLICATE_FOR = def(8_834_00_0); public static final TransportVersion INGEST_REQUEST_INCLUDE_SOURCE_ON_ERROR = def(8_835_00_0); + public static final TransportVersion RESOURCE_DEPRECATION_CHECKS = def(8_836_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationIssue.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationIssue.java index 2a888506598d7..4f15a9e771ad7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationIssue.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationIssue.java @@ -240,14 +240,7 @@ public static DeprecationIssue getIntersectionOfRemovableSettings(List actions; - private final Map nonActionMetadata; - - Meta(List actions, Map nonActionMetadata) { - this.actions = actions; - this.nonActionMetadata = nonActionMetadata; - } + private record Meta(List actions, Map nonActionMetadata) { private static Meta fromRemovableSettings(List removableSettings) { List actions; @@ -358,12 +351,7 @@ private interface Action { /* * This class a represents remove_settings action within the actions list in a meta Map. */ - private static final class RemovalAction implements Action { - private final List removableSettings; - - RemovalAction(List removableSettings) { - this.removableSettings = removableSettings; - } + private record RemovalAction(List removableSettings) implements Action { @SuppressWarnings("unchecked") private static RemovalAction fromActionMap(Map actionMap) { @@ -398,12 +386,7 @@ public Map toActionMap() { /* * This represents an action within the actions list in a meta Map that is *not* a removal_action. */ - private static class UnknownAction implements Action { - private final Map actionMap; - - private UnknownAction(Map actionMap) { - this.actionMap = actionMap; - } + private record UnknownAction(Map actionMap) implements Action { private static Action fromActionMap(Map actionMap) { return new UnknownAction(actionMap); diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java similarity index 60% rename from x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecks.java rename to x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java index 8af4868f94514..c867092f7bc19 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java @@ -7,20 +7,65 @@ package org.elasticsearch.xpack.deprecation; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.index.Index; import org.elasticsearch.xpack.core.deprecation.DeprecatedIndexPredicate; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.function.BiFunction; import java.util.stream.Collectors; import static java.util.Map.entry; import static java.util.Map.ofEntries; +import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; + +/** + * Checks the data streams for deprecation warnings. + */ +public class DataStreamDeprecationChecker implements ResourceDeprecationChecker { + + public static final String NAME = "data_streams"; + private static final List> DATA_STREAM_CHECKS = List.of( + DataStreamDeprecationChecker::oldIndicesCheck, + DataStreamDeprecationChecker::ignoredOldIndicesCheck + ); + private final IndexNameExpressionResolver indexNameExpressionResolver; + + public DataStreamDeprecationChecker(IndexNameExpressionResolver indexNameExpressionResolver) { + this.indexNameExpressionResolver = indexNameExpressionResolver; + } + + /** + * @param clusterState The cluster state provided for the checker + * @return the name of the data streams that have violated the checks with their respective warnings. + */ + @Override + public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { + List dataStreamNames = indexNameExpressionResolver.dataStreamNames( + clusterState, + IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN + ); + if (dataStreamNames.isEmpty()) { + return Map.of(); + } + Map> dataStreamIssues = new HashMap<>(); + for (String dataStreamName : dataStreamNames) { + DataStream dataStream = clusterState.metadata().dataStreams().get(dataStreamName); + List issuesForSingleDataStream = filterChecks(DATA_STREAM_CHECKS, c -> c.apply(dataStream, clusterState)); + if (issuesForSingleDataStream.isEmpty() == false) { + dataStreamIssues.put(dataStreamName, issuesForSingleDataStream); + } + } + return dataStreamIssues.isEmpty() ? Map.of() : dataStreamIssues; + } -public class DataStreamDeprecationChecks { static DeprecationIssue oldIndicesCheck(DataStream dataStream, ClusterState clusterState) { List backingIndices = dataStream.getIndices(); @@ -47,9 +92,7 @@ static DeprecationIssue oldIndicesCheck(DataStream dataStream, ClusterState clus static DeprecationIssue ignoredOldIndicesCheck(DataStream dataStream, ClusterState clusterState) { List backingIndices = dataStream.getIndices(); - Set ignoredIndices = getReindexRequiredIndices(backingIndices, clusterState, true); - if (ignoredIndices.isEmpty() == false) { return new DeprecationIssue( DeprecationIssue.Level.WARNING, @@ -66,7 +109,6 @@ static DeprecationIssue ignoredOldIndicesCheck(DataStream dataStream, ClusterSta ) ); } - return null; } @@ -80,4 +122,9 @@ private static Set getReindexRequiredIndices( .map(Index::getName) .collect(Collectors.toUnmodifiableSet()); } + + @Override + public String getName() { + return NAME; + } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java index f7a26597e07fc..039a75f51f030 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java @@ -8,8 +8,6 @@ import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.DataStream; -import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; @@ -17,7 +15,6 @@ import java.util.List; import java.util.Objects; -import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; @@ -88,25 +85,9 @@ private DeprecationChecks() {} NodeDeprecationChecks::checkEqlEnabledSetting, NodeDeprecationChecks::checkNodeAttrData, NodeDeprecationChecks::checkWatcherBulkConcurrentRequestsSetting, - NodeDeprecationChecks::checkTracingApmSettings, - NodeDeprecationChecks::checkSourceModeInComponentTemplates + NodeDeprecationChecks::checkTracingApmSettings ); - static List> INDEX_SETTINGS_CHECKS = List.of( - IndexDeprecationChecks::oldIndicesCheck, - IndexDeprecationChecks::ignoredOldIndicesCheck, - IndexDeprecationChecks::translogRetentionSettingCheck, - IndexDeprecationChecks::checkIndexDataPath, - IndexDeprecationChecks::storeTypeSettingCheck, - IndexDeprecationChecks::frozenIndexSettingCheck, - IndexDeprecationChecks::deprecatedCamelCasePattern - ); - - static List> DATA_STREAM_CHECKS = List.of( - DataStreamDeprecationChecks::oldIndicesCheck, - DataStreamDeprecationChecks::ignoredOldIndicesCheck - ); - /** * helper utility function to reduce repeat of running a specific {@link List} of checks. * diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index 809870730292a..c89e61fbcf24d 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -16,10 +16,12 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.regex.Regex; @@ -43,7 +45,6 @@ import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; @@ -99,7 +100,7 @@ private static List mergeNodeIssues(NodesDeprecationCheckRespo } /* - * This method pulls all of the DeprecationIssues from the given nodeResponses, and buckets them into lists of DeprecationIssues that + * This method pulls all the DeprecationIssues from the given nodeResponses, and buckets them into lists of DeprecationIssues that * differ at most by meta values (if that). The returned tuples also contain the node name the deprecation issue was found on. If all * nodes in the cluster were configured identically then all tuples in a list will differ only by the node name. */ @@ -146,22 +147,32 @@ private static Map> getMergedIssuesToNodesMap( } public static class Response extends ActionResponse implements ToXContentObject { - static final Set RESERVED_NAMES = Set.of("cluster_settings", "node_settings", "index_settings", "data_streams"); + static final Set RESERVED_NAMES = Set.of( + "cluster_settings", + "node_settings", + "index_settings", + "data_streams", + "templates", + "ilm_policies" + ); + static final Set RESOURCE_CHECKER_FIELD_NAMES = Set.of("index_settings", "data_streams", "templates", "ilm_policies"); private final List clusterSettingsIssues; private final List nodeSettingsIssues; - private final Map> indexSettingsIssues; - private final Map> dataStreamIssues; + private final Map>> resourceDeprecationIssues; private final Map> pluginSettingsIssues; public Response(StreamInput in) throws IOException { super(in); clusterSettingsIssues = in.readCollectionAsList(DeprecationIssue::new); nodeSettingsIssues = in.readCollectionAsList(DeprecationIssue::new); - indexSettingsIssues = in.readMapOfLists(DeprecationIssue::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK)) { - dataStreamIssues = in.readMapOfLists(DeprecationIssue::new); - } else { - dataStreamIssues = Map.of(); + Map>> mutableResourceDeprecations = in.getTransportVersion() + .before(TransportVersions.RESOURCE_DEPRECATION_CHECKS) ? new HashMap<>() : Map.of(); + if (in.getTransportVersion().before(TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { + mutableResourceDeprecations.put(IndexDeprecationChecker.NAME, in.readMapOfLists(DeprecationIssue::new)); + } + if (in.getTransportVersion() + .between(TransportVersions.DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK, TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { + mutableResourceDeprecations.put(DataStreamDeprecationChecker.NAME, in.readMapOfLists(DeprecationIssue::new)); } if (in.getTransportVersion().before(TransportVersions.V_7_11_0)) { List mlIssues = in.readCollectionAsList(DeprecationIssue::new); @@ -170,19 +181,22 @@ public Response(StreamInput in) throws IOException { } else { pluginSettingsIssues = in.readMapOfLists(DeprecationIssue::new); } + if (in.getTransportVersion().onOrAfter(TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { + resourceDeprecationIssues = in.readMap(in2 -> in2.readMapOfLists(DeprecationIssue::new)); + } else { + resourceDeprecationIssues = Collections.unmodifiableMap(mutableResourceDeprecations); + } } public Response( List clusterSettingsIssues, List nodeSettingsIssues, - Map> indexSettingsIssues, - Map> dataStreamIssues, + Map>> resourceDeprecationIssues, Map> pluginSettingsIssues ) { this.clusterSettingsIssues = clusterSettingsIssues; this.nodeSettingsIssues = nodeSettingsIssues; - this.indexSettingsIssues = indexSettingsIssues; - this.dataStreamIssues = dataStreamIssues; + this.resourceDeprecationIssues = resourceDeprecationIssues; Set intersection = Sets.intersection(RESERVED_NAMES, pluginSettingsIssues.keySet()); if (intersection.isEmpty() == false) { throw new ElasticsearchStatusException( @@ -203,39 +217,60 @@ public List getNodeSettingsIssues() { } public Map> getIndexSettingsIssues() { - return indexSettingsIssues; + return resourceDeprecationIssues.getOrDefault(IndexDeprecationChecker.NAME, Map.of()); } public Map> getPluginSettingsIssues() { return pluginSettingsIssues; } + public Map> getDataStreamDeprecationIssues() { + return resourceDeprecationIssues.getOrDefault(DataStreamDeprecationChecker.NAME, Map.of()); + } + + public Map> getTemplateDeprecationIssues() { + return resourceDeprecationIssues.getOrDefault(TemplateDeprecationChecker.NAME, Map.of()); + } + + public Map> getIlmPolicyDeprecationIssues() { + return resourceDeprecationIssues.getOrDefault(IlmPolicyDeprecationChecker.NAME, Map.of()); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(clusterSettingsIssues); out.writeCollection(nodeSettingsIssues); - out.writeMap(indexSettingsIssues, StreamOutput::writeCollection); - if (out.getTransportVersion().onOrAfter(TransportVersions.DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK)) { - out.writeMap(dataStreamIssues, StreamOutput::writeCollection); + if (out.getTransportVersion().before(TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { + out.writeMap(getIndexSettingsIssues(), StreamOutput::writeCollection); + } + if (out.getTransportVersion() + .between(TransportVersions.DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK, TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { + out.writeMap(getDataStreamDeprecationIssues(), StreamOutput::writeCollection); } if (out.getTransportVersion().before(TransportVersions.V_7_11_0)) { out.writeCollection(pluginSettingsIssues.getOrDefault("ml_settings", Collections.emptyList())); } else { out.writeMap(pluginSettingsIssues, StreamOutput::writeCollection); } + if (out.getTransportVersion().onOrAfter(TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { + out.writeMap(resourceDeprecationIssues, (o, v) -> o.writeMap(v, StreamOutput::writeCollection)); + } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject() + builder.startObject() .array("cluster_settings", clusterSettingsIssues.toArray()) .array("node_settings", nodeSettingsIssues.toArray()) - .field("index_settings") - .map(indexSettingsIssues) - .field("data_streams") - .map(dataStreamIssues) - .mapContents(pluginSettingsIssues) - .endObject(); + .mapContents(resourceDeprecationIssues) + .mapContents(pluginSettingsIssues); + // Ensure that all the required fields are present in the response. + for (String fieldName : RESOURCE_CHECKER_FIELD_NAMES) { + if (resourceDeprecationIssues.containsKey(fieldName) == false) { + builder.startObject(fieldName).endObject(); + } + } + return builder.endObject(); } @Override @@ -245,13 +280,13 @@ public boolean equals(Object o) { Response response = (Response) o; return Objects.equals(clusterSettingsIssues, response.clusterSettingsIssues) && Objects.equals(nodeSettingsIssues, response.nodeSettingsIssues) - && Objects.equals(indexSettingsIssues, response.indexSettingsIssues) + && Objects.equals(resourceDeprecationIssues, response.resourceDeprecationIssues) && Objects.equals(pluginSettingsIssues, response.pluginSettingsIssues); } @Override public int hashCode() { - return Objects.hash(clusterSettingsIssues, nodeSettingsIssues, indexSettingsIssues, pluginSettingsIssues); + return Objects.hash(clusterSettingsIssues, nodeSettingsIssues, resourceDeprecationIssues, pluginSettingsIssues); } /** @@ -264,9 +299,12 @@ public int hashCode() { * @param indexNameExpressionResolver Used to resolve indices into their concrete names * @param request The originating request containing the index expressions to evaluate * @param nodeDeprecationResponse The response containing the deprecation issues found on each node - * @param indexSettingsChecks The list of index-level checks that will be run across all specified - * concrete indices * @param clusterSettingsChecks The list of cluster-level checks + * @param pluginSettingIssues this map gets modified to move transform deprecation issues into cluster_settings + * @param skipTheseDeprecatedSettings the settings that will be removed from cluster metadata and the index metadata of all the + * indexes specified by indexNames + * @param resourceDeprecationCheckers these are checkers that take as input the cluster state and return a map from resource type + * to issues grouped by the resource name. * @return The list of deprecation issues found in the cluster */ public static DeprecationInfoAction.Response from( @@ -274,11 +312,10 @@ public static DeprecationInfoAction.Response from( IndexNameExpressionResolver indexNameExpressionResolver, Request request, NodesDeprecationCheckResponse nodeDeprecationResponse, - List> indexSettingsChecks, - List> dataStreamChecks, List> clusterSettingsChecks, Map> pluginSettingIssues, - List skipTheseDeprecatedSettings + List skipTheseDeprecatedSettings, + List resourceDeprecationCheckers ) { assert Transports.assertNotTransportThread("walking mappings in indexSettingsChecks is expensive"); // Allow system index access here to prevent deprecation warnings when we call this API @@ -290,28 +327,11 @@ public static DeprecationInfoAction.Response from( ); List nodeSettingsIssues = mergeNodeIssues(nodeDeprecationResponse); - Map> indexSettingsIssues = new HashMap<>(); - for (String concreteIndex : concreteIndexNames) { - IndexMetadata indexMetadata = stateWithSkippedSettingsRemoved.getMetadata().index(concreteIndex); - List singleIndexIssues = filterChecks( - indexSettingsChecks, - c -> c.apply(indexMetadata, stateWithSkippedSettingsRemoved) - ); - if (singleIndexIssues.size() > 0) { - indexSettingsIssues.put(concreteIndex, singleIndexIssues); - } - } - - List dataStreamNames = indexNameExpressionResolver.dataStreamNames( - state, - IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN_NO_SELECTOR - ); - Map> dataStreamIssues = new HashMap<>(); - for (String dataStreamName : dataStreamNames) { - DataStream dataStream = stateWithSkippedSettingsRemoved.metadata().dataStreams().get(dataStreamName); - List issuesForSingleDataStream = filterChecks(dataStreamChecks, c -> c.apply(dataStream, state)); - if (issuesForSingleDataStream.isEmpty() == false) { - dataStreamIssues.put(dataStreamName, issuesForSingleDataStream); + Map>> resourceDeprecationIssues = new HashMap<>(); + for (ResourceDeprecationChecker resourceDeprecationChecker : resourceDeprecationCheckers) { + Map> issues = resourceDeprecationChecker.check(stateWithSkippedSettingsRemoved, request); + if (issues.isEmpty() == false) { + resourceDeprecationIssues.put(resourceDeprecationChecker.getName(), issues); } } @@ -326,8 +346,7 @@ public static DeprecationInfoAction.Response from( return new DeprecationInfoAction.Response( clusterSettingsIssues, nodeSettingsIssues, - indexSettingsIssues, - dataStreamIssues, + resourceDeprecationIssues, pluginSettingIssues ); } @@ -342,6 +361,10 @@ public static DeprecationInfoAction.Response from( * @return A modified cluster state with the given settings removed */ private static ClusterState removeSkippedSettings(ClusterState state, String[] indexNames, List skipTheseDeprecatedSettings) { + // Short-circuit, no need to reconstruct the cluster state if there are no settings to remove + if (skipTheseDeprecatedSettings == null || skipTheseDeprecatedSettings.isEmpty()) { + return state; + } ClusterState.Builder clusterStateBuilder = new ClusterState.Builder(state); Metadata.Builder metadataBuilder = Metadata.builder(state.metadata()); metadataBuilder.transientSettings( @@ -359,6 +382,47 @@ private static ClusterState removeSkippedSettings(ClusterState state, String[] i filteredIndexMetadataBuilder.settings(filteredSettings); indicesBuilder.put(indexName, filteredIndexMetadataBuilder.build()); } + metadataBuilder.componentTemplates(state.metadata().componentTemplates().entrySet().stream().map(entry -> { + String templateName = entry.getKey(); + ComponentTemplate componentTemplate = entry.getValue(); + Template template = componentTemplate.template(); + if (template.settings() == null || template.settings().isEmpty()) { + return Tuple.tuple(templateName, componentTemplate); + } + return Tuple.tuple( + templateName, + new ComponentTemplate( + Template.builder(template) + .settings(template.settings().filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false)) + .build(), + componentTemplate.version(), + componentTemplate.metadata(), + componentTemplate.deprecated() + ) + ); + }).collect(Collectors.toMap(Tuple::v1, Tuple::v2))); + metadataBuilder.indexTemplates(state.metadata().templatesV2().entrySet().stream().map(entry -> { + String templateName = entry.getKey(); + ComposableIndexTemplate indexTemplate = entry.getValue(); + Template template = indexTemplate.template(); + if (templateName == null || template.settings() == null || template.settings().isEmpty()) { + return Tuple.tuple(templateName, indexTemplate); + } + return Tuple.tuple( + templateName, + indexTemplate.toBuilder() + .template( + Template.builder(indexTemplate.template()) + .settings( + indexTemplate.template() + .settings() + .filter(setting -> Regex.simpleMatch(skipTheseDeprecatedSettings, setting) == false) + ) + ) + .build() + ); + }).collect(Collectors.toMap(Tuple::v1, Tuple::v2))); + metadataBuilder.indices(indicesBuilder); clusterStateBuilder.metadata(metadataBuilder); return clusterStateBuilder.build(); diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationChecker.java new file mode 100644 index 0000000000000..6d7f860f645f1 --- /dev/null +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationChecker.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; +import org.elasticsearch.xpack.core.ilm.AllocateAction; +import org.elasticsearch.xpack.core.ilm.FreezeAction; +import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata; +import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; +import org.elasticsearch.xpack.core.ilm.LifecyclePolicyMetadata; +import org.elasticsearch.xpack.core.ilm.Phase; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; +import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL; +import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL; +import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_MESSAGE; +import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.containsDeprecatedFilteredAllocationConfig; + +/** + * Checks the ILM policies for deprecation warnings. + */ +public class IlmPolicyDeprecationChecker implements ResourceDeprecationChecker { + + public static final String NAME = "ilm_policies"; + private static final List> CHECKS = List.of( + IlmPolicyDeprecationChecker::checkLegacyTiers, + IlmPolicyDeprecationChecker::checkFrozenAction + ); + + /** + * @param clusterState The cluster state provided for the checker + * @return the name of the data streams that have violated the checks with their respective warnings. + */ + @Override + public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { + IndexLifecycleMetadata lifecycleMetadata = clusterState.metadata().custom(IndexLifecycleMetadata.TYPE); + if (lifecycleMetadata == null || lifecycleMetadata.getPolicyMetadatas().isEmpty()) { + return Map.of(); + } + Map> issues = new HashMap<>(); + for (Map.Entry entry : lifecycleMetadata.getPolicyMetadatas().entrySet()) { + String name = entry.getKey(); + LifecyclePolicyMetadata policyMetadata = entry.getValue(); + + List issuesForSinglePolicy = filterChecks(CHECKS, c -> c.apply(policyMetadata.getPolicy())); + if (issuesForSinglePolicy.isEmpty() == false) { + issues.put(name, issuesForSinglePolicy); + } + } + return issues.isEmpty() ? Map.of() : issues; + } + + static DeprecationIssue checkLegacyTiers(LifecyclePolicy policy) { + for (Phase phase : policy.getPhases().values()) { + AllocateAction allocateAction = (AllocateAction) phase.getActions().get(AllocateAction.NAME); + if (allocateAction != null) { + if (containsDeprecatedFilteredAllocationConfig(allocateAction.getExclude()) + || containsDeprecatedFilteredAllocationConfig(allocateAction.getInclude()) + || containsDeprecatedFilteredAllocationConfig(allocateAction.getRequire())) { + return new DeprecationIssue( + DeprecationIssue.Level.WARNING, + DEPRECATION_MESSAGE, + DEPRECATION_HELP_URL, + "One or more of your ILM policies is configuring tiers via the 'data' node attribute. " + DEPRECATION_COMMON_DETAIL, + false, + null + ); + } + } + } + return null; + } + + static DeprecationIssue checkFrozenAction(LifecyclePolicy policy) { + for (Phase phase : policy.getPhases().values()) { + if (phase.getActions().containsKey(FreezeAction.NAME)) { + return new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "ILM policy [" + + policy.getName() + + "] contains the action 'freeze' that is deprecated and will be removed in a future version.", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/frozen-indices.html", + "This action is already a noop so it can be safely removed, because frozen indices no longer offer any advantages." + + " Consider cold or frozen tiers in place of frozen indices.", + false, + null + ); + } + } + return null; + } + + @Override + public String getName() { + return NAME; + } +} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java similarity index 80% rename from x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java rename to x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java index 5a9d6771e5f45..e941ebfc05e49 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java @@ -8,6 +8,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.LegacyFormatNames; @@ -20,6 +21,7 @@ import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; @@ -27,10 +29,54 @@ import java.util.function.BiFunction; import java.util.function.Function; +import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; +import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL; +import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL; + /** * Index-specific deprecation checks */ -public class IndexDeprecationChecks { +public class IndexDeprecationChecker implements ResourceDeprecationChecker { + + public static final String NAME = "index_settings"; + private static final List> INDEX_SETTINGS_CHECKS = List.of( + IndexDeprecationChecker::oldIndicesCheck, + IndexDeprecationChecker::ignoredOldIndicesCheck, + IndexDeprecationChecker::translogRetentionSettingCheck, + IndexDeprecationChecker::checkIndexDataPath, + IndexDeprecationChecker::storeTypeSettingCheck, + IndexDeprecationChecker::frozenIndexSettingCheck, + IndexDeprecationChecker::deprecatedCamelCasePattern, + IndexDeprecationChecker::legacyRoutingSettingCheck + ); + + private final IndexNameExpressionResolver indexNameExpressionResolver; + + public IndexDeprecationChecker(IndexNameExpressionResolver indexNameExpressionResolver) { + this.indexNameExpressionResolver = indexNameExpressionResolver; + } + + @Override + public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { + Map> indexSettingsIssues = new HashMap<>(); + String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(clusterState, request); + for (String concreteIndex : concreteIndexNames) { + IndexMetadata indexMetadata = clusterState.getMetadata().index(concreteIndex); + List singleIndexIssues = filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetadata, clusterState)); + if (singleIndexIssues.isEmpty() == false) { + indexSettingsIssues.put(concreteIndex, singleIndexIssues); + } + } + if (indexSettingsIssues.isEmpty()) { + return Map.of(); + } + return indexSettingsIssues; + } + + @Override + public String getName() { + return NAME; + } static DeprecationIssue oldIndicesCheck(IndexMetadata indexMetadata, ClusterState clusterState) { // TODO: this check needs to be revised. It's trivially true right now. @@ -148,6 +194,22 @@ static DeprecationIssue frozenIndexSettingCheck(IndexMetadata indexMetadata, Clu return null; } + static DeprecationIssue legacyRoutingSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + List deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(indexMetadata.getSettings()); + if (deprecatedSettings.isEmpty()) { + return null; + } + String indexName = indexMetadata.getIndex().getName(); + return new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "index [" + indexName + "] is configuring tiers via filtered allocation which is not recommended.", + DEPRECATION_HELP_URL, + "One or more of your indices is configured with 'index.routing.allocation.*.data' settings. " + DEPRECATION_COMMON_DETAIL, + false, + DeprecationIssue.createMetaMapForRemovableSettings(deprecatedSettings) + ); + } + private static void fieldLevelMappingIssue(IndexMetadata indexMetadata, BiConsumer> checker) { if (indexMetadata.mapping() != null) { Map sourceAsMap = indexMetadata.mapping().sourceAsMap(); @@ -228,8 +290,8 @@ static DeprecationIssue deprecatedCamelCasePattern(IndexMetadata indexMetadata, findInPropertiesRecursively( mappingMetadata.type(), sourceAsMap, - IndexDeprecationChecks::isDateFieldWithCamelCasePattern, - IndexDeprecationChecks::changeFormatToSnakeCase, + IndexDeprecationChecker::isDateFieldWithCamelCasePattern, + IndexDeprecationChecker::changeFormatToSnakeCase, "", "" ) diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/LegacyIndexTemplateDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/LegacyIndexTemplateDeprecationChecker.java new file mode 100644 index 0000000000000..f7aba6491dfd2 --- /dev/null +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/LegacyIndexTemplateDeprecationChecker.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; +import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; +import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL; +import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL; +import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_MESSAGE; + +/** + * Checks the legacy index templates for deprecation warnings. + */ +public class LegacyIndexTemplateDeprecationChecker implements ResourceDeprecationChecker { + + public static final String NAME = "legacy_templates"; + private static final List> CHECKS = List.of( + LegacyIndexTemplateDeprecationChecker::checkIndexTemplates + ); + + /** + * @param clusterState The cluster state provided for the checker + * @return the name of the data streams that have violated the checks with their respective warnings. + */ + @Override + public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { + var templates = clusterState.metadata().templates().entrySet(); + if (templates.isEmpty()) { + return Map.of(); + } + Map> issues = new HashMap<>(); + for (Map.Entry entry : templates) { + String name = entry.getKey(); + IndexTemplateMetadata template = entry.getValue(); + + List issuesForSingleIndexTemplate = filterChecks(CHECKS, c -> c.apply(template)); + if (issuesForSingleIndexTemplate.isEmpty() == false) { + issues.put(name, issuesForSingleIndexTemplate); + } + } + return issues.isEmpty() ? Map.of() : issues; + } + + static DeprecationIssue checkIndexTemplates(IndexTemplateMetadata indexTemplateMetadata) { + List deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(indexTemplateMetadata.settings()); + if (deprecatedSettings.isEmpty()) { + return null; + } + return new DeprecationIssue( + DeprecationIssue.Level.WARNING, + DEPRECATION_MESSAGE, + DEPRECATION_HELP_URL, + "One or more of your legacy index templates is configured with 'index.routing.allocation.*.data' settings. " + + DEPRECATION_COMMON_DETAIL, + false, + DeprecationIssue.createMetaMapForRemovableSettings(deprecatedSettings) + ); + } + + @Override + public String getName() { + return NAME; + } +} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/LegacyTiersDetection.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/LegacyTiersDetection.java new file mode 100644 index 0000000000000..2455ba8e7487a --- /dev/null +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/LegacyTiersDetection.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.elasticsearch.common.settings.Settings; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_SETTING; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING; + +/** + * Contains conctants and helper methods that help detect when the settings or the ILM policy are + * implementing tiers via filtered allocation. + */ +public class LegacyTiersDetection { + + public static final String DEPRECATION_MESSAGE = "Configuring tiers via filtered allocation is not recommended."; + public static final String DEPRECATION_COMMON_DETAIL = + "This is typically used to create a hot/warm or tiered architecture, based on legacy guidelines." + + " Data tiers are a recommended replacement for tiered architecture clusters."; + public static final String DEPRECATION_HELP_URL = "https://ela.st/migrate-to-tiers"; + + public static final String NODE_ATTRIBUTE = "data"; + private static final Set DEPRECATED_SETTINGS = Set.of( + INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + NODE_ATTRIBUTE, + INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + NODE_ATTRIBUTE, + INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + NODE_ATTRIBUTE + ); + + private static final Set TIER_VALUES = Set.of("hot", "warm", "cold", "frozen"); + + /** + * @return the deprecated filtered allocation settings that are used as tier implementation + */ + public static List getDeprecatedFilteredAllocationSettings(Settings settings) { + if (settings == null || settings.isEmpty()) { + return List.of(); + } + + return DEPRECATED_SETTINGS.stream().filter(setting -> { + String value = settings.get(setting); + return value != null && TIER_VALUES.contains(value); + }).toList(); + } + + /** + * @param nodeAttributeConfiguration a map from node attribute to value + * @return + */ + public static boolean containsDeprecatedFilteredAllocationConfig(Map nodeAttributeConfiguration) { + return nodeAttributeConfiguration != null + && nodeAttributeConfiguration.containsKey(NODE_ATTRIBUTE) + && TIER_VALUES.contains(nodeAttributeConfiguration.get(NODE_ATTRIBUTE)); + } +} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java index f1a1f91ba35a0..b6fff5a82f0cd 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java @@ -9,15 +9,12 @@ import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.script.ScriptService; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; @@ -1015,43 +1012,4 @@ static DeprecationIssue checkTracingApmSettings( DeprecationIssue.Level.CRITICAL ); } - - static DeprecationIssue checkSourceModeInComponentTemplates( - final Settings settings, - final PluginsAndModules pluginsAndModules, - final ClusterState clusterState, - final XPackLicenseState licenseState - ) { - List templates = new ArrayList<>(); - var templateNames = clusterState.metadata().componentTemplates().keySet(); - for (String templateName : templateNames) { - ComponentTemplate template = clusterState.metadata().componentTemplates().get(templateName); - if (template.template().mappings() != null) { - var sourceAsMap = (Map) XContentHelper.convertToMap(template.template().mappings().uncompressed(), true) - .v2() - .get("_doc"); - if (sourceAsMap != null) { - Object source = sourceAsMap.get("_source"); - if (source instanceof Map sourceMap) { - if (sourceMap.containsKey("mode")) { - templates.add(templateName); - } - } - } - } - - } - if (templates.isEmpty()) { - return null; - } - Collections.sort(templates); - return new DeprecationIssue( - DeprecationIssue.Level.CRITICAL, - SourceFieldMapper.DEPRECATION_WARNING, - "https://github.com/elastic/elasticsearch/pull/117172", - SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + String.join(", ", templates) + "]", - false, - null - ); - } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ResourceDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ResourceDeprecationChecker.java new file mode 100644 index 0000000000000..71b9903f69f86 --- /dev/null +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ResourceDeprecationChecker.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; + +import java.util.List; +import java.util.Map; + +/** + * The resource deprecation checker allows us to extend the deprecation API by adding deprecation checks for resources. It differs + * from the {@link DeprecationChecker} because it groups the deprecation issues by a single resource, not only by type. For example, + * the "data_streams" checker will contain a map from data stream name its deprecation issues. + */ +public interface ResourceDeprecationChecker { + + /** + * This runs the checks for the current deprecation checker. + * + * @param clusterState The cluster state provided for the checker + */ + Map> check(ClusterState clusterState, DeprecationInfoAction.Request request); + + /** + * @return The name of the checker + */ + String getName(); +} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationChecker.java new file mode 100644 index 0000000000000..5a451a9613797 --- /dev/null +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationChecker.java @@ -0,0 +1,139 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.filterChecks; +import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_COMMON_DETAIL; +import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_HELP_URL; +import static org.elasticsearch.xpack.deprecation.LegacyTiersDetection.DEPRECATION_MESSAGE; + +/** + * Checks the index and component templates for deprecation warnings. + */ +public class TemplateDeprecationChecker implements ResourceDeprecationChecker { + + public static final String NAME = "templates"; + private static final List> INDEX_TEMPLATE_CHECKS = List.of( + TemplateDeprecationChecker::checkLegacyTiersInIndexTemplate + ); + private static final List> COMPONENT_TEMPLATE_CHECKS = List.of( + TemplateDeprecationChecker::checkSourceModeInComponentTemplates, + TemplateDeprecationChecker::checkLegacyTiersInComponentTemplates + ); + + /** + * @param clusterState The cluster state provided for the checker + * @return the name of the data streams that have violated the checks with their respective warnings. + */ + @Override + public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { + var indexTemplates = clusterState.metadata().templatesV2().entrySet(); + var componentTemplates = clusterState.metadata().componentTemplates().entrySet(); + if (indexTemplates.isEmpty() && componentTemplates.isEmpty()) { + return Map.of(); + } + Map> issues = new HashMap<>(); + for (Map.Entry entry : indexTemplates) { + String name = entry.getKey(); + ComposableIndexTemplate template = entry.getValue(); + + List issuesForSingleIndexTemplate = filterChecks(INDEX_TEMPLATE_CHECKS, c -> c.apply(template)); + if (issuesForSingleIndexTemplate.isEmpty() == false) { + issues.computeIfAbsent(name, ignored -> new ArrayList<>()).addAll(issuesForSingleIndexTemplate); + } + } + for (Map.Entry entry : componentTemplates) { + String name = entry.getKey(); + ComponentTemplate template = entry.getValue(); + + List issuesForSingleIndexTemplate = filterChecks(COMPONENT_TEMPLATE_CHECKS, c -> c.apply(template)); + if (issuesForSingleIndexTemplate.isEmpty() == false) { + issues.computeIfAbsent(name, ignored -> new ArrayList<>()).addAll(issuesForSingleIndexTemplate); + } + } + return issues.isEmpty() ? Map.of() : issues; + } + + static DeprecationIssue checkLegacyTiersInIndexTemplate(ComposableIndexTemplate composableIndexTemplate) { + Template template = composableIndexTemplate.template(); + if (template != null) { + List deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(template.settings()); + if (deprecatedSettings.isEmpty()) { + return null; + } + return new DeprecationIssue( + DeprecationIssue.Level.WARNING, + DEPRECATION_MESSAGE, + DEPRECATION_HELP_URL, + "One or more of your index templates is configured with 'index.routing.allocation.*.data' settings. " + + DEPRECATION_COMMON_DETAIL, + false, + DeprecationIssue.createMetaMapForRemovableSettings(deprecatedSettings) + ); + } + return null; + } + + static DeprecationIssue checkSourceModeInComponentTemplates(ComponentTemplate template) { + if (template.template().mappings() != null) { + var sourceAsMap = (Map) XContentHelper.convertToMap(template.template().mappings().uncompressed(), true).v2().get("_doc"); + if (sourceAsMap != null) { + Object source = sourceAsMap.get("_source"); + if (source instanceof Map sourceMap) { + if (sourceMap.containsKey("mode")) { + return new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + SourceFieldMapper.DEPRECATION_WARNING, + "https://github.com/elastic/elasticsearch/pull/117172", + null, + false, + null + ); + } + } + } + } + return null; + } + + static DeprecationIssue checkLegacyTiersInComponentTemplates(ComponentTemplate componentTemplate) { + Template template = componentTemplate.template(); + List deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(template.settings()); + if (deprecatedSettings.isEmpty()) { + return null; + } + return new DeprecationIssue( + DeprecationIssue.Level.WARNING, + DEPRECATION_MESSAGE, + DEPRECATION_HELP_URL, + "One or more of your component templates is configured with 'index.routing.allocation.*.data' settings. " + + DEPRECATION_COMMON_DETAIL, + false, + DeprecationIssue.createMetaMapForRemovableSettings(deprecatedSettings) + ); + } + + @Override + public String getName() { + return NAME; + } +} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java index f700530adef81..5ff1acf2c0e24 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java @@ -36,8 +36,6 @@ import java.util.stream.Collectors; import static org.elasticsearch.xpack.deprecation.DeprecationChecks.CLUSTER_SETTINGS_CHECKS; -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.DATA_STREAM_CHECKS; -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.INDEX_SETTINGS_CHECKS; public class TransportDeprecationInfoAction extends TransportMasterNodeReadAction< DeprecationInfoAction.Request, @@ -50,6 +48,7 @@ public class TransportDeprecationInfoAction extends TransportMasterNodeReadActio private final Settings settings; private final NamedXContentRegistry xContentRegistry; private volatile List skipTheseDeprecations; + private final List resourceDeprecationCheckers; @Inject public TransportDeprecationInfoAction( @@ -76,6 +75,12 @@ public TransportDeprecationInfoAction( this.indexNameExpressionResolver = indexNameExpressionResolver; this.settings = settings; this.xContentRegistry = xContentRegistry; + this.resourceDeprecationCheckers = List.of( + new IndexDeprecationChecker(indexNameExpressionResolver), + new DataStreamDeprecationChecker(indexNameExpressionResolver), + new TemplateDeprecationChecker(), + new IlmPolicyDeprecationChecker() + ); skipTheseDeprecations = DeprecationChecks.SKIP_DEPRECATIONS_SETTING.get(settings); // Safe to register this here because it happens synchronously before the cluster service is started: clusterService.getClusterSettings() @@ -133,11 +138,10 @@ protected final void masterOperation( indexNameExpressionResolver, request, response, - INDEX_SETTINGS_CHECKS, - DATA_STREAM_CHECKS, CLUSTER_SETTINGS_CHECKS, deprecationIssues, - skipTheseDeprecations + skipTheseDeprecations, + resourceDeprecationCheckers ) ) ) diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java similarity index 84% rename from x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecksTests.java rename to x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java index edc7ea03823da..e3c205ff8c740 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.snapshots.SearchableSnapshotsSettings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; @@ -29,14 +30,14 @@ import java.util.Map; import java.util.Set; -import static java.util.Collections.singletonList; import static java.util.Map.entry; import static java.util.Map.ofEntries; import static org.elasticsearch.index.IndexModule.INDEX_STORE_TYPE_SETTING; -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.DATA_STREAM_CHECKS; import static org.hamcrest.Matchers.equalTo; -public class DataStreamDeprecationChecksTests extends ESTestCase { +public class DataStreamDeprecationCheckerTests extends ESTestCase { + + private final DataStreamDeprecationChecker checker = new DataStreamDeprecationChecker(TestIndexNameExpressionResolver.newInstance()); public void testOldIndicesCheck() { int oldIndexCount = randomIntBetween(1, 100); @@ -47,7 +48,10 @@ public void testOldIndicesCheck() { DataStream dataStream = createTestDataStream(oldIndexCount, 0, newIndexCount, 0, nameToIndexMetadata, expectedIndices); - Metadata metadata = Metadata.builder().indices(nameToIndexMetadata).build(); + Metadata metadata = Metadata.builder() + .indices(nameToIndexMetadata) + .dataStreams(Map.of(dataStream.getName(), dataStream), Map.of()) + .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); DeprecationIssue expected = new DeprecationIssue( @@ -64,9 +68,11 @@ public void testOldIndicesCheck() { ) ); - List issues = DeprecationChecks.filterChecks(DATA_STREAM_CHECKS, c -> c.apply(dataStream, clusterState)); - - assertThat(issues, equalTo(singletonList(expected))); + // We know that the data stream checks ignore the request. + Map> issuesByDataStream = checker.check(clusterState, null); + assertThat(issuesByDataStream.size(), equalTo(1)); + assertThat(issuesByDataStream.containsKey(dataStream.getName()), equalTo(true)); + assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected))); } public void testOldIndicesCheckWithOnlyClosedOrNewIndices() { @@ -87,12 +93,14 @@ public void testOldIndicesCheckWithOnlyClosedOrNewIndices() { expectedIndices ); - Metadata metadata = Metadata.builder().indices(nameToIndexMetadata).build(); + Metadata metadata = Metadata.builder() + .indices(nameToIndexMetadata) + .dataStreams(Map.of(dataStream.getName(), dataStream), Map.of()) + .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); - List issues = DeprecationChecks.filterChecks(DATA_STREAM_CHECKS, c -> c.apply(dataStream, clusterState)); - - assertThat(issues.size(), equalTo(0)); + Map> issuesByDataStream = checker.check(clusterState, null); + assertThat(issuesByDataStream.size(), equalTo(0)); } public void testOldIndicesCheckWithClosedAndOpenIndices() { @@ -117,7 +125,10 @@ public void testOldIndicesCheckWithClosedAndOpenIndices() { expectedIndices ); - Metadata metadata = Metadata.builder().indices(nameToIndexMetadata).build(); + Metadata metadata = Metadata.builder() + .indices(nameToIndexMetadata) + .dataStreams(Map.of(dataStream.getName(), dataStream), Map.of()) + .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); DeprecationIssue expected = new DeprecationIssue( @@ -134,9 +145,9 @@ public void testOldIndicesCheckWithClosedAndOpenIndices() { ) ); - List issues = DeprecationChecks.filterChecks(DATA_STREAM_CHECKS, c -> c.apply(dataStream, clusterState)); - - assertThat(issues, equalTo(singletonList(expected))); + Map> issuesByDataStream = checker.check(clusterState, null); + assertThat(issuesByDataStream.containsKey(dataStream.getName()), equalTo(true)); + assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected))); } /* @@ -273,7 +284,10 @@ public void testOldIndicesIgnoredWarningCheck() { null ); - Metadata metadata = Metadata.builder().indices(nameToIndexMetadata).build(); + Metadata metadata = Metadata.builder() + .indices(nameToIndexMetadata) + .dataStreams(Map.of(dataStream.getName(), dataStream), Map.of()) + .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); DeprecationIssue expected = new DeprecationIssue( @@ -291,9 +305,9 @@ public void testOldIndicesIgnoredWarningCheck() { ) ); - List issues = DeprecationChecks.filterChecks(DATA_STREAM_CHECKS, c -> c.apply(dataStream, clusterState)); - - assertThat(issues, equalTo(singletonList(expected))); + Map> issuesByDataStream = checker.check(clusterState, null); + assertThat(issuesByDataStream.containsKey(dataStream.getName()), equalTo(true)); + assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected))); } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java index 67950f3b9f623..28fd14abecbc1 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java @@ -9,10 +9,13 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.common.Strings; @@ -31,7 +34,6 @@ import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -44,7 +46,6 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.deprecation.DeprecationInfoAction.Response.RESERVED_NAMES; import static org.hamcrest.Matchers.empty; import static org.hamcrest.core.IsEqual.equalTo; @@ -53,39 +54,98 @@ public class DeprecationInfoActionResponseTests extends AbstractWireSerializingT @Override protected DeprecationInfoAction.Response createTestInstance() { - List clusterIssues = Stream.generate(DeprecationInfoActionResponseTests::createTestDeprecationIssue) - .limit(randomIntBetween(0, 10)) - .collect(Collectors.toList()); - List nodeIssues = Stream.generate(DeprecationInfoActionResponseTests::createTestDeprecationIssue) - .limit(randomIntBetween(0, 10)) - .collect(Collectors.toList()); - Map> indexIssues = new HashMap<>(); - for (int i = 0; i < randomIntBetween(0, 10); i++) { - List perIndexIssues = Stream.generate(DeprecationInfoActionResponseTests::createTestDeprecationIssue) - .limit(randomIntBetween(0, 10)) - .collect(Collectors.toList()); - indexIssues.put(randomAlphaOfLength(10), perIndexIssues); - } - Map> dataStreamIssues = new HashMap<>(); - for (int i = 0; i < randomIntBetween(0, 10); i++) { - List perDataStreamIssues = Stream.generate(DeprecationInfoActionResponseTests::createTestDeprecationIssue) - .limit(randomIntBetween(0, 10)) - .collect(Collectors.toList()); - dataStreamIssues.put(randomAlphaOfLength(10), perDataStreamIssues); - } - Map> pluginIssues = new HashMap<>(); - for (int i = 0; i < randomIntBetween(0, 10); i++) { - List perPluginIssues = Stream.generate(DeprecationInfoActionResponseTests::createTestDeprecationIssue) - .limit(randomIntBetween(0, 10)) - .collect(Collectors.toList()); - pluginIssues.put(randomAlphaOfLength(10), perPluginIssues); - } - return new DeprecationInfoAction.Response(clusterIssues, nodeIssues, indexIssues, dataStreamIssues, pluginIssues); + List clusterIssues = randomDeprecationIssues(); + List nodeIssues = randomDeprecationIssues(); + Map> indexIssues = randomMap( + 0, + 10, + () -> Tuple.tuple(randomAlphaOfLength(10), randomDeprecationIssues()) + ); + Map> dataStreamIssues = randomMap( + 0, + 10, + () -> Tuple.tuple(randomAlphaOfLength(10), randomDeprecationIssues()) + ); + Map> templateIssues = randomMap( + 0, + 10, + () -> Tuple.tuple(randomAlphaOfLength(10), randomDeprecationIssues()) + ); + Map> ilmPolicyIssues = randomMap( + 0, + 10, + () -> Tuple.tuple(randomAlphaOfLength(10), randomDeprecationIssues()) + ); + Map> pluginIssues = randomMap( + 0, + 10, + () -> Tuple.tuple(randomAlphaOfLength(10), randomDeprecationIssues()) + ); + return new DeprecationInfoAction.Response( + clusterIssues, + nodeIssues, + Map.of( + "data_streams", + dataStreamIssues, + "index_settings", + indexIssues, + "templates", + templateIssues, + "ilm_policies", + ilmPolicyIssues + ), + pluginIssues + ); } @Override protected DeprecationInfoAction.Response mutateInstance(DeprecationInfoAction.Response instance) { - return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 + List clusterIssues = instance.getClusterSettingsIssues(); + List nodeIssues = instance.getNodeSettingsIssues(); + Map> indexIssues = instance.getIndexSettingsIssues(); + Map> dataStreamIssues = instance.getDataStreamDeprecationIssues(); + Map> templateIssues = instance.getTemplateDeprecationIssues(); + Map> ilmPolicyIssues = instance.getIlmPolicyDeprecationIssues(); + Map> pluginIssues = instance.getPluginSettingsIssues(); + switch (randomIntBetween(1, 7)) { + case 1 -> clusterIssues = randomValueOtherThan(clusterIssues, DeprecationInfoActionResponseTests::randomDeprecationIssues); + case 2 -> nodeIssues = randomValueOtherThan(nodeIssues, DeprecationInfoActionResponseTests::randomDeprecationIssues); + case 3 -> indexIssues = randomValueOtherThan( + indexIssues, + () -> randomMap(0, 10, () -> Tuple.tuple(randomAlphaOfLength(10), randomDeprecationIssues())) + ); + case 4 -> dataStreamIssues = randomValueOtherThan( + dataStreamIssues, + () -> randomMap(0, 10, () -> Tuple.tuple(randomAlphaOfLength(10), randomDeprecationIssues())) + ); + case 5 -> templateIssues = randomValueOtherThan( + templateIssues, + () -> randomMap(0, 10, () -> Tuple.tuple(randomAlphaOfLength(10), randomDeprecationIssues())) + ); + case 6 -> ilmPolicyIssues = randomValueOtherThan( + ilmPolicyIssues, + () -> randomMap(0, 10, () -> Tuple.tuple(randomAlphaOfLength(10), randomDeprecationIssues())) + ); + case 7 -> pluginIssues = randomValueOtherThan( + pluginIssues, + () -> randomMap(0, 10, () -> Tuple.tuple(randomAlphaOfLength(10), randomDeprecationIssues())) + ); + } + return new DeprecationInfoAction.Response( + clusterIssues, + nodeIssues, + Map.of( + "data_streams", + dataStreamIssues, + "index_settings", + indexIssues, + "templates", + templateIssues, + "ilm_policies", + ilmPolicyIssues + ), + pluginIssues + ); } @Override @@ -115,23 +175,41 @@ public void testFrom() throws IOException { boolean nodeIssueFound = randomBoolean(); boolean indexIssueFound = randomBoolean(); boolean dataStreamIssueFound = randomBoolean(); + boolean indexTemplateIssueFound = randomBoolean(); + boolean componentTemplateIssueFound = randomBoolean(); + boolean ilmPolicyIssueFound = randomBoolean(); DeprecationIssue foundIssue = createTestDeprecationIssue(); List> clusterSettingsChecks = List.of((s) -> clusterIssueFound ? foundIssue : null); - List> indexSettingsChecks = List.of( - (idx, cs) -> indexIssueFound ? foundIssue : null - ); - List> dataStreamChecks = List.of( - (ds, cs) -> dataStreamIssueFound ? foundIssue : null - ); + List resourceCheckers = List.of(createResourceChecker("index_settings", (cs, req) -> { + if (indexIssueFound) { + return Map.of("test", List.of(foundIssue)); + } + return Map.of(); + }), createResourceChecker("data_streams", (cs, req) -> { + if (dataStreamIssueFound) { + return Map.of("my-ds", List.of(foundIssue)); + } + return Map.of(); + }), createResourceChecker("templates", (cs, req) -> { + Map> issues = new HashMap<>(); + if (componentTemplateIssueFound) { + issues.put("my-component-template", List.of(foundIssue)); + } + if (indexTemplateIssueFound) { + issues.put("my-index-template", List.of(foundIssue)); + } + return issues; + }), createResourceChecker("ilm_policies", (cs, req) -> { + if (ilmPolicyIssueFound) { + return Map.of("my-policy", List.of(foundIssue)); + } + return Map.of(); + })); NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse( new ClusterName(randomAlphaOfLength(5)), - nodeIssueFound - ? Collections.singletonList( - new NodesDeprecationCheckAction.NodeResponse(discoveryNode, Collections.singletonList(foundIssue)) - ) - : emptyList(), - emptyList() + nodeIssueFound ? List.of(new NodesDeprecationCheckAction.NodeResponse(discoveryNode, List.of(foundIssue))) : List.of(), + List.of() ); DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY); @@ -140,15 +218,14 @@ public void testFrom() throws IOException { resolver, request, nodeDeprecationIssues, - indexSettingsChecks, - dataStreamChecks, clusterSettingsChecks, - Collections.emptyMap(), - Collections.emptyList() + new HashMap<>(), // modified in the method to move transform deprecation issues into cluster_settings + List.of(), + resourceCheckers ); if (clusterIssueFound) { - assertThat(response.getClusterSettingsIssues(), equalTo(Collections.singletonList(foundIssue))); + assertThat(response.getClusterSettingsIssues(), equalTo(List.of(foundIssue))); } else { assertThat(response.getClusterSettingsIssues(), empty()); } @@ -163,16 +240,37 @@ public void testFrom() throws IOException { foundIssue.isResolveDuringRollingUpgrade(), foundIssue.getMeta() ); - assertThat(response.getNodeSettingsIssues(), equalTo(Collections.singletonList(mergedFoundIssue))); + assertThat(response.getNodeSettingsIssues(), equalTo(List.of(mergedFoundIssue))); } else { assertTrue(response.getNodeSettingsIssues().isEmpty()); } if (indexIssueFound) { - assertThat(response.getIndexSettingsIssues(), equalTo(Collections.singletonMap("test", Collections.singletonList(foundIssue)))); + assertThat(response.getIndexSettingsIssues(), equalTo(Map.of("test", List.of(foundIssue)))); } else { assertTrue(response.getIndexSettingsIssues().isEmpty()); } + if (dataStreamIssueFound) { + assertThat(response.getDataStreamDeprecationIssues(), equalTo(Map.of("my-ds", List.of(foundIssue)))); + } else { + assertTrue(response.getDataStreamDeprecationIssues().isEmpty()); + } + if (ilmPolicyIssueFound) { + assertThat(response.getIlmPolicyDeprecationIssues(), equalTo(Map.of("my-policy", List.of(foundIssue)))); + } else { + assertTrue(response.getIlmPolicyDeprecationIssues().isEmpty()); + } + if (componentTemplateIssueFound == false && indexTemplateIssueFound == false) { + assertTrue(response.getTemplateDeprecationIssues().isEmpty()); + } else { + if (componentTemplateIssueFound) { + assertThat(response.getTemplateDeprecationIssues().get("my-component-template"), equalTo(List.of(foundIssue))); + } + if (indexTemplateIssueFound) { + assertThat(response.getTemplateDeprecationIssues().get("my-index-template"), equalTo(List.of(foundIssue))); + } + + } } public void testFromWithMergeableNodeIssues() throws IOException { @@ -194,35 +292,30 @@ public void testFromWithMergeableNodeIssues() throws IOException { .name("node1") .ephemeralId("ephemeralId1") .address("hostName1", "hostAddress1", new TransportAddress(TransportAddress.META_ADDRESS, 9300)) - .roles(Collections.emptySet()) + .roles(Set.of()) .build(); DiscoveryNode node2 = DiscoveryNodeUtils.builder("nodeId2") .name("node2") .ephemeralId("ephemeralId2") .address("hostName2", "hostAddress2", new TransportAddress(TransportAddress.META_ADDRESS, 9500)) - .roles(Collections.emptySet()) + .roles(Set.of()) .build(); ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance(); - Map metaMap1 = DeprecationIssue.createMetaMapForRemovableSettings( - Collections.unmodifiableList(Arrays.asList("setting.1", "setting.2", "setting.3")) - ); - Map metaMap2 = DeprecationIssue.createMetaMapForRemovableSettings( - Collections.unmodifiableList(Arrays.asList("setting.2", "setting.3")) - ); + Map metaMap1 = DeprecationIssue.createMetaMapForRemovableSettings(List.of("setting.1", "setting.2", "setting.3")); + Map metaMap2 = DeprecationIssue.createMetaMapForRemovableSettings(List.of("setting.2", "setting.3")); DeprecationIssue foundIssue1 = createTestDeprecationIssue(metaMap1); DeprecationIssue foundIssue2 = createTestDeprecationIssue(foundIssue1, metaMap2); - List> clusterSettingsChecks = Collections.emptyList(); - List> indexSettingsChecks = List.of((idx, cs) -> null); - List> dataStreamChecks = List.of((ds, cs) -> null); + List> clusterSettingsChecks = List.of(); + List resourceCheckers = List.of(); NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse( new ClusterName(randomAlphaOfLength(5)), Arrays.asList( - new NodesDeprecationCheckAction.NodeResponse(node1, Collections.singletonList(foundIssue1)), - new NodesDeprecationCheckAction.NodeResponse(node2, Collections.singletonList(foundIssue2)) + new NodesDeprecationCheckAction.NodeResponse(node1, List.of(foundIssue1)), + new NodesDeprecationCheckAction.NodeResponse(node2, List.of(foundIssue2)) ), - emptyList() + List.of() ); DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY); @@ -231,11 +324,10 @@ public void testFromWithMergeableNodeIssues() throws IOException { resolver, request, nodeDeprecationIssues, - indexSettingsChecks, - dataStreamChecks, clusterSettingsChecks, - Collections.emptyMap(), - Collections.emptyList() + new HashMap<>(), // modified in the method to move transform deprecation issues into cluster_settings + List.of(), + resourceCheckers ); String details = foundIssue1.getDetails() != null ? foundIssue1.getDetails() + " " : ""; @@ -247,11 +339,10 @@ public void testFromWithMergeableNodeIssues() throws IOException { foundIssue1.isResolveDuringRollingUpgrade(), foundIssue2.getMeta() ); - assertThat(response.getNodeSettingsIssues(), equalTo(Collections.singletonList(mergedFoundIssue))); + assertThat(response.getNodeSettingsIssues(), equalTo(List.of(mergedFoundIssue))); } - public void testRemoveSkippedSettings() throws IOException { - + public void testRemoveSkippedSettings() { Settings.Builder settingsBuilder = settings(IndexVersion.current()); settingsBuilder.put("some.deprecated.property", "someValue1"); settingsBuilder.put("some.other.bad.deprecated.property", "someValue2"); @@ -263,39 +354,51 @@ public void testRemoveSkippedSettings() throws IOException { .numberOfShards(1) .numberOfReplicas(0) .build(); + ComponentTemplate componentTemplate = new ComponentTemplate(Template.builder().settings(inputSettings).build(), null, null); + ComposableIndexTemplate indexTemplate = ComposableIndexTemplate.builder() + .template(Template.builder().settings(inputSettings)) + .build(); Metadata metadata = Metadata.builder() .put(IndexMetadata.builder("test").settings(inputSettings).numberOfShards(1).numberOfReplicas(0)) .put(dataStreamIndexMetadata, true) .put(DataStream.builder("ds-test", List.of(dataStreamIndexMetadata.getIndex())).build()) + .indexTemplates(Map.of("my-index-template", indexTemplate)) + .componentTemplates(Map.of("my-component-template", componentTemplate)) .persistentSettings(inputSettings) .build(); ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); IndexNameExpressionResolver resolver = TestIndexNameExpressionResolver.newInstance(); AtomicReference visibleClusterSettings = new AtomicReference<>(); - List> clusterSettingsChecks = Collections.unmodifiableList(Arrays.asList((s) -> { + List> clusterSettingsChecks = List.of((s) -> { visibleClusterSettings.set(s.getMetadata().settings()); return null; - })); + }); AtomicReference visibleIndexSettings = new AtomicReference<>(); - List> indexSettingsChecks = Collections.unmodifiableList( - Arrays.asList((idx, cs) -> { - visibleIndexSettings.set(idx.getSettings()); - return null; - }) - ); + AtomicReference visibleComponentTemplateSettings = new AtomicReference<>(); + AtomicReference visibleIndexTemplateSettings = new AtomicReference<>(); AtomicInteger backingIndicesCount = new AtomicInteger(0); - List> dataStreamChecks = Collections.unmodifiableList( - Arrays.asList((ds, cs) -> { - backingIndicesCount.set(ds.getIndices().size()); - return null; - }) - ); + List resourceCheckers = List.of(createResourceChecker("index_settings", (cs, req) -> { + for (String indexName : resolver.concreteIndexNames(cs, req)) { + visibleIndexSettings.set(cs.metadata().index(indexName).getSettings()); + } + return Map.of(); + }), createResourceChecker("data_streams", (cs, req) -> { + cs.metadata().dataStreams().values().forEach(ds -> backingIndicesCount.set(ds.getIndices().size())); + return Map.of(); + }), createResourceChecker("templates", (cs, req) -> { + cs.metadata() + .componentTemplates() + .values() + .forEach(template -> visibleComponentTemplateSettings.set(template.template().settings())); + cs.metadata().templatesV2().values().forEach(template -> visibleIndexTemplateSettings.set(template.template().settings())); + return Map.of(); + })); NodesDeprecationCheckResponse nodeDeprecationIssues = new NodesDeprecationCheckResponse( new ClusterName(randomAlphaOfLength(5)), - emptyList(), - emptyList() + List.of(), + List.of() ); DeprecationInfoAction.Request request = new DeprecationInfoAction.Request(randomTimeValue(), Strings.EMPTY_ARRAY); @@ -304,48 +407,53 @@ public void testRemoveSkippedSettings() throws IOException { resolver, request, nodeDeprecationIssues, - indexSettingsChecks, - dataStreamChecks, clusterSettingsChecks, - Collections.emptyMap(), - List.of("some.deprecated.property", "some.other.*.deprecated.property") + new HashMap<>(), // modified in the method to move transform deprecation issues into cluster_settings + List.of("some.deprecated.property", "some.other.*.deprecated.property"), + resourceCheckers ); settingsBuilder = settings(IndexVersion.current()); settingsBuilder.put("some.undeprecated.property", "someValue3"); settingsBuilder.putList("some.undeprecated.list.property", List.of("someValue4", "someValue5")); + Settings expectedSettings = settingsBuilder.build(); Settings resultClusterSettings = visibleClusterSettings.get(); Assert.assertNotNull(resultClusterSettings); Assert.assertEquals(expectedSettings, visibleClusterSettings.get()); + Settings resultIndexSettings = visibleIndexSettings.get(); Assert.assertNotNull(resultIndexSettings); - Assert.assertTrue(resultIndexSettings.get("some.undeprecated.property").equals("someValue3")); - Assert.assertTrue(resultIndexSettings.getAsList("some.undeprecated.list.property").equals(List.of("someValue4", "someValue5"))); + Assert.assertEquals("someValue3", resultIndexSettings.get("some.undeprecated.property")); + Assert.assertEquals(resultIndexSettings.getAsList("some.undeprecated.list.property"), List.of("someValue4", "someValue5")); Assert.assertFalse(resultIndexSettings.hasValue("some.deprecated.property")); Assert.assertFalse(resultIndexSettings.hasValue("some.other.bad.deprecated.property")); assertThat(backingIndicesCount.get(), equalTo(1)); + + Assert.assertNotNull(visibleComponentTemplateSettings.get()); + Assert.assertEquals(expectedSettings, visibleComponentTemplateSettings.get()); + Assert.assertNotNull(visibleIndexTemplateSettings.get()); + Assert.assertEquals(expectedSettings, visibleIndexTemplateSettings.get()); } public void testCtorFailure() { Map> indexNames = Stream.generate(() -> randomAlphaOfLength(10)) .limit(10) - .collect(Collectors.toMap(Function.identity(), (_k) -> Collections.emptyList())); + .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); Map> dataStreamNames = Stream.generate(() -> randomAlphaOfLength(10)) .limit(10) - .collect(Collectors.toMap(Function.identity(), (_k) -> Collections.emptyList())); + .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); Set shouldCauseFailure = new HashSet<>(RESERVED_NAMES); for (int i = 0; i < NUMBER_OF_TEST_RUNS; i++) { Map> pluginSettingsIssues = randomSubsetOf(3, shouldCauseFailure).stream() - .collect(Collectors.toMap(Function.identity(), (_k) -> Collections.emptyList())); + .collect(Collectors.toMap(Function.identity(), (_k) -> List.of())); expectThrows( ElasticsearchStatusException.class, () -> new DeprecationInfoAction.Response( - Collections.emptyList(), - Collections.emptyList(), - indexNames, - dataStreamNames, + List.of(), + List.of(), + Map.of("data_streams", dataStreamNames, "index_settings", indexNames), pluginSettingsIssues ) ); @@ -378,4 +486,28 @@ private static DeprecationIssue createTestDeprecationIssue(DeprecationIssue seed metaMap ); } + + private static List randomDeprecationIssues() { + return Stream.generate(DeprecationInfoActionResponseTests::createTestDeprecationIssue) + .limit(randomIntBetween(0, 10)) + .collect(Collectors.toList()); + } + + private static ResourceDeprecationChecker createResourceChecker( + String name, + BiFunction>> check + ) { + return new ResourceDeprecationChecker() { + + @Override + public Map> check(ClusterState clusterState, DeprecationInfoAction.Request request) { + return check.apply(clusterState, request); + } + + @Override + public String getName() { + return name; + } + }; + } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationCheckerTests.java new file mode 100644 index 0000000000000..2032a6faedc92 --- /dev/null +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IlmPolicyDeprecationCheckerTests.java @@ -0,0 +1,151 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; +import org.elasticsearch.xpack.core.ilm.AllocateAction; +import org.elasticsearch.xpack.core.ilm.FreezeAction; +import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata; +import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; +import org.elasticsearch.xpack.core.ilm.LifecyclePolicyMetadata; +import org.elasticsearch.xpack.core.ilm.OperationMode; +import org.elasticsearch.xpack.core.ilm.Phase; +import org.elasticsearch.xpack.core.ilm.TimeseriesLifecycleType; + +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; + +public class IlmPolicyDeprecationCheckerTests extends ESTestCase { + + private final IlmPolicyDeprecationChecker checker = new IlmPolicyDeprecationChecker(); + + public void testLegacyTierSettings() { + + LifecyclePolicy deprecatedTiersPolicy = new LifecyclePolicy( + TimeseriesLifecycleType.INSTANCE, + "deprecated-tiers", + Map.of( + "warm", + new Phase( + "warm", + TimeValue.ONE_MINUTE, + Map.of(AllocateAction.NAME, new AllocateAction(null, null, Map.of("data", "hot"), null, null)) + ) + ), + Map.of(), + randomOptionalBoolean() + ); + LifecyclePolicy otherAttributePolicy = new LifecyclePolicy( + TimeseriesLifecycleType.INSTANCE, + "other-attribute", + Map.of( + "warm", + new Phase( + "warm", + TimeValue.ONE_MINUTE, + Map.of(AllocateAction.NAME, new AllocateAction(null, null, Map.of("other", "hot"), null, null)) + ) + ), + Map.of(), + randomOptionalBoolean() + ); + + ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata( + Metadata.builder() + .putCustom( + IndexLifecycleMetadata.TYPE, + new IndexLifecycleMetadata( + Map.of( + "deprecated-tiers", + new LifecyclePolicyMetadata( + deprecatedTiersPolicy, + Map.of(), + randomNonNegativeLong(), + randomNonNegativeLong() + ), + "other-attribute", + new LifecyclePolicyMetadata( + otherAttributePolicy, + Map.of(), + randomNonNegativeLong(), + randomNonNegativeLong() + ) + ), + OperationMode.RUNNING + ) + ) + ) + .build(); + + Map> issuesByComponentTemplate = checker.check(clusterState, null); + final DeprecationIssue expected = new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Configuring tiers via filtered allocation is not recommended.", + "https://ela.st/migrate-to-tiers", + "One or more of your ILM policies is configuring tiers via the 'data' node attribute." + + " This is typically used to create a hot/warm or tiered architecture, based on legacy guidelines." + + " Data tiers are a recommended replacement for tiered architecture clusters.", + false, + null + ); + assertThat(issuesByComponentTemplate.get("deprecated-tiers"), hasItem(expected)); + assertThat(issuesByComponentTemplate.containsKey("other-attribute"), is(false)); + } + + public void testFrozenAction() { + + LifecyclePolicy deprecatedTiersPolicy = new LifecyclePolicy( + TimeseriesLifecycleType.INSTANCE, + "deprecated-action", + Map.of("cold", new Phase("cold", TimeValue.ONE_MINUTE, Map.of(FreezeAction.NAME, FreezeAction.INSTANCE))), + Map.of(), + randomOptionalBoolean() + ); + + ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata( + Metadata.builder() + .putCustom( + IndexLifecycleMetadata.TYPE, + new IndexLifecycleMetadata( + Map.of( + "deprecated-action", + new LifecyclePolicyMetadata( + deprecatedTiersPolicy, + Map.of(), + randomNonNegativeLong(), + randomNonNegativeLong() + ) + ), + OperationMode.RUNNING + ) + ) + ) + .build(); + + Map> issuesByComponentTemplate = checker.check(clusterState, null); + final DeprecationIssue expected = new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "ILM policy [deprecated-action] contains the action 'freeze' that is deprecated and will be removed in a future version.", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/frozen-indices.html", + "This action is already a noop so it can be safely removed, because frozen indices no longer offer any advantages." + + " Consider cold or frozen tiers in place of frozen indices.", + false, + null + ); + assertThat(issuesByComponentTemplate.get("deprecated-action"), hasItem(expected)); + } +} diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java similarity index 68% rename from x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java rename to x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java index ed119634427ec..dae7970d4a2e7 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java @@ -16,29 +16,30 @@ import org.elasticsearch.cluster.metadata.MetadataIndexStateService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexMode; -import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.engine.frozen.FrozenEngine; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.snapshots.SearchableSnapshotsSettings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; -import java.io.IOException; import java.util.List; import java.util.Map; import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; import static org.elasticsearch.index.IndexModule.INDEX_STORE_TYPE_SETTING; -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.INDEX_SETTINGS_CHECKS; -import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; -import static org.hamcrest.collection.IsIterableContainingInOrder.contains; -public class IndexDeprecationChecksTests extends ESTestCase { +public class IndexDeprecationCheckerTests extends ESTestCase { + + private final IndexDeprecationChecker checker = new IndexDeprecationChecker(TestIndexNameExpressionResolver.newInstance()); + public void testOldIndicesCheck() { IndexVersion createdWith = IndexVersion.fromId(7170099); IndexMetadata indexMetadata = IndexMetadata.builder("test") @@ -57,7 +58,11 @@ public void testOldIndicesCheck() { false, singletonMap("reindex_required", true) ); - List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetadata, clusterState)); + Map> issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + ); + List issues = issuesByIndex.get("test"); assertEquals(singletonList(expected), issues); } @@ -99,8 +104,11 @@ public void testOldIndicesCheckDataStreamIndex() { ) ) .build(); - List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetadata, clusterState)); - assertThat(issues.size(), equalTo(0)); + Map> issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + ); + assertThat(issuesByIndex.size(), equalTo(0)); } public void testOldIndicesCheckSnapshotIgnored() { @@ -112,9 +120,11 @@ public void testOldIndicesCheckSnapshotIgnored() { .metadata(Metadata.builder().put(indexMetadata, true)) .build(); - List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetadata, clusterState)); - - assertThat(issues, empty()); + Map> issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + ); + assertThat(issuesByIndex.size(), equalTo(0)); } public void testOldIndicesCheckClosedIgnored() { @@ -129,8 +139,11 @@ public void testOldIndicesCheckClosedIgnored() { ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder().put(indexMetadata, true)) .build(); - List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetadata, clusterState)); - assertThat(issues, empty()); + Map> issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + ); + assertThat(issuesByIndex.size(), equalTo(0)); } public void testOldIndicesIgnoredWarningCheck() { @@ -148,8 +161,12 @@ public void testOldIndicesIgnoredWarningCheck() { false, singletonMap("reindex_required", true) ); - List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetadata, clusterState)); - assertEquals(singletonList(expected), issues); + Map> issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + ); + assertTrue(issuesByIndex.containsKey("test")); + assertEquals(List.of(expected), issuesByIndex.get("test")); } public void testTranslogRetentionSettings() { @@ -157,10 +174,12 @@ public void testTranslogRetentionSettings() { settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), randomPositiveTimeValue()); settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), between(1, 1024) + "b"); IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - List issues = DeprecationChecks.filterChecks( - INDEX_SETTINGS_CHECKS, - c -> c.apply(indexMetadata, ClusterState.EMPTY_STATE) + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + Map> issuesByIndex = checker.check( + state, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) ); + List issues = issuesByIndex.get("test"); assertThat( issues, contains( @@ -190,25 +209,27 @@ public void testDefaultTranslogRetentionSettings() { settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false); } IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - List issues = DeprecationChecks.filterChecks( - INDEX_SETTINGS_CHECKS, - c -> c.apply(indexMetadata, ClusterState.EMPTY_STATE) + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + Map> issuesByIndex = checker.check( + state, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) ); - assertThat(issues, empty()); + assertThat(issuesByIndex.size(), equalTo(0)); } public void testIndexDataPathSetting() { Settings.Builder settings = settings(IndexVersion.current()); settings.put(IndexMetadata.INDEX_DATA_PATH_SETTING.getKey(), createTempDir()); IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - List issues = DeprecationChecks.filterChecks( - INDEX_SETTINGS_CHECKS, - c -> c.apply(indexMetadata, ClusterState.EMPTY_STATE) + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + Map> issuesByIndex = checker.check( + state, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) ); final String expectedUrl = "https://www.elastic.co/guide/en/elasticsearch/reference/7.13/breaking-changes-7.13.html#deprecate-shared-data-path-setting"; assertThat( - issues, + issuesByIndex.get("test"), contains( new DeprecationIssue( DeprecationIssue.Level.WARNING, @@ -224,14 +245,15 @@ public void testIndexDataPathSetting() { public void testSimpleFSSetting() { Settings.Builder settings = settings(IndexVersion.current()); - settings.put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "simplefs"); + settings.put(INDEX_STORE_TYPE_SETTING.getKey(), "simplefs"); IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - List issues = DeprecationChecks.filterChecks( - INDEX_SETTINGS_CHECKS, - c -> c.apply(indexMetadata, ClusterState.EMPTY_STATE) + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + Map> issuesByIndex = checker.check( + state, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) ); assertThat( - issues, + issuesByIndex.get("test"), contains( new DeprecationIssue( DeprecationIssue.Level.WARNING, @@ -251,12 +273,13 @@ public void testFrozenIndex() { Settings.Builder settings = settings(IndexVersion.current()); settings.put(FrozenEngine.INDEX_FROZEN.getKey(), true); IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - List issues = DeprecationChecks.filterChecks( - INDEX_SETTINGS_CHECKS, - c -> c.apply(indexMetadata, ClusterState.EMPTY_STATE) + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + Map> issuesByIndex = checker.check( + state, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) ); assertThat( - issues, + issuesByIndex.get("test"), contains( new DeprecationIssue( DeprecationIssue.Level.WARNING, @@ -270,7 +293,7 @@ public void testFrozenIndex() { ); } - public void testCamelCaseDeprecation() throws IOException { + public void testCamelCaseDeprecation() { String simpleMapping = "{\n\"_doc\": {" + "\"properties\" : {\n" + " \"date_time_field\" : {\n" @@ -280,13 +303,18 @@ public void testCamelCaseDeprecation() throws IOException { + " }" + "} }"; - IndexMetadata simpleIndex = IndexMetadata.builder(randomAlphaOfLengthBetween(5, 10)) + String indexName = randomAlphaOfLengthBetween(5, 10); + IndexMetadata simpleIndex = IndexMetadata.builder(indexName) .settings(settings(IndexVersion.current())) .numberOfShards(1) .numberOfReplicas(1) .putMapping(simpleMapping) .build(); - + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(simpleIndex, true)).build(); + Map> issuesByIndex = checker.check( + state, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + ); DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.CRITICAL, "Date fields use deprecated camel case formats", @@ -296,10 +324,34 @@ public void testCamelCaseDeprecation() throws IOException { false, null ); - List issues = DeprecationChecks.filterChecks( - INDEX_SETTINGS_CHECKS, - c -> c.apply(simpleIndex, ClusterState.EMPTY_STATE) + assertThat(issuesByIndex.get(indexName), hasItem(expected)); + } + + public void testLegacyTierIndex() { + Settings.Builder settings = settings(IndexVersion.current()); + String filter = randomFrom("include", "exclude", "require"); + String tier = randomFrom("hot", "warm", "cold", "frozen"); + settings.put("index.routing.allocation." + filter + ".data", tier); + IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + Map> issuesByIndex = checker.check( + state, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) + ); + assertThat( + issuesByIndex.get("test"), + contains( + new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "index [test] is configuring tiers via filtered allocation which is not recommended.", + "https://ela.st/migrate-to-tiers", + "One or more of your indices is configured with 'index.routing.allocation.*.data' settings." + + " This is typically used to create a hot/warm or tiered architecture, based on legacy guidelines." + + " Data tiers are a recommended replacement for tiered architecture clusters.", + false, + DeprecationIssue.createMetaMapForRemovableSettings(List.of("index.routing.allocation." + filter + ".data")) + ) + ) ); - assertThat(issues, hasItem(expected)); } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java index 7fe2be2736ea8..3aaee0e5cdb52 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java @@ -11,29 +11,23 @@ import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import org.elasticsearch.xpack.core.ilm.LifecycleSettings; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -838,42 +832,4 @@ public void testCheckNodeAttrData() { ); assertThat(issues, hasItem(expected)); } - - public void testCheckSourceModeInComponentTemplates() throws IOException { - Template template = Template.builder().mappings(CompressedXContent.fromJSON(""" - { "_doc": { "_source": { "mode": "stored"} } }""")).build(); - ComponentTemplate componentTemplate = new ComponentTemplate(template, 1L, new HashMap<>()); - - Template template2 = Template.builder().mappings(CompressedXContent.fromJSON(""" - { "_doc": { "_source": { "enabled": false} } }""")).build(); - ComponentTemplate componentTemplate2 = new ComponentTemplate(template2, 1L, new HashMap<>()); - - ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) - .metadata( - Metadata.builder() - .componentTemplates( - Map.of("my-template-1", componentTemplate, "my-template-2", componentTemplate, "my-template-3", componentTemplate2) - ) - ) - .build(); - - final List issues = DeprecationChecks.filterChecks( - DeprecationChecks.NODE_SETTINGS_CHECKS, - c -> c.apply( - Settings.EMPTY, - new PluginsAndModules(Collections.emptyList(), Collections.emptyList()), - clusterState, - new XPackLicenseState(() -> 0) - ) - ); - final DeprecationIssue expected = new DeprecationIssue( - DeprecationIssue.Level.CRITICAL, - SourceFieldMapper.DEPRECATION_WARNING, - "https://github.com/elastic/elasticsearch/pull/117172", - SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [my-template-1, my-template-2]", - false, - null - ); - assertThat(issues, hasItem(expected)); - } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationCheckerTests.java new file mode 100644 index 0000000000000..81c0d1c7dc918 --- /dev/null +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/TemplateDeprecationCheckerTests.java @@ -0,0 +1,193 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; + +public class TemplateDeprecationCheckerTests extends ESTestCase { + + private final TemplateDeprecationChecker checker = new TemplateDeprecationChecker(); + + public void testCheckSourceModeInComponentTemplates() throws IOException { + Template template = Template.builder().mappings(CompressedXContent.fromJSON(""" + { "_doc": { "_source": { "mode": "stored"} } }""")).build(); + ComponentTemplate componentTemplate = new ComponentTemplate(template, 1L, new HashMap<>()); + + Template template2 = Template.builder().mappings(CompressedXContent.fromJSON(""" + { "_doc": { "_source": { "enabled": false} } }""")).build(); + ComponentTemplate componentTemplate2 = new ComponentTemplate(template2, 1L, new HashMap<>()); + + ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata( + Metadata.builder() + .componentTemplates( + Map.of("my-template-1", componentTemplate, "my-template-2", componentTemplate, "my-template-3", componentTemplate2) + ) + ) + .build(); + + Map> issuesByComponentTemplate = checker.check(clusterState, null); + final DeprecationIssue expected = new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + SourceFieldMapper.DEPRECATION_WARNING, + "https://github.com/elastic/elasticsearch/pull/117172", + null, + false, + null + ); + assertThat(issuesByComponentTemplate.get("my-template-1"), hasItem(expected)); + assertThat(issuesByComponentTemplate.get("my-template-2"), hasItem(expected)); + assertThat(issuesByComponentTemplate.containsKey("my-template-3"), is(false)); + } + + public void testCheckLegacyTiersInComponentTemplates() { + String setting = "index.routing.allocation." + randomFrom("include", "require", "exclude") + ".data"; + Template template = Template.builder().settings(Settings.builder().put(setting, "hot").build()).build(); + ComponentTemplate componentTemplate = new ComponentTemplate(template, 1L, new HashMap<>()); + + Template template2 = Template.builder() + .settings(Settings.builder().put("index.routing.allocation.require.data", randomAlphaOfLength(10)).build()) + .build(); + ComponentTemplate componentTemplate2 = new ComponentTemplate(template2, 1L, new HashMap<>()); + + ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata( + Metadata.builder() + .componentTemplates( + Map.of("my-template-1", componentTemplate, "my-template-2", componentTemplate, "my-template-3", componentTemplate2) + ) + ) + .build(); + + Map> issuesByComponentTemplate = checker.check(clusterState, null); + final DeprecationIssue expected = new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Configuring tiers via filtered allocation is not recommended.", + "https://ela.st/migrate-to-tiers", + "One or more of your component templates is configured with 'index.routing.allocation.*.data' settings." + + " This is typically used to create a hot/warm or tiered architecture, based on legacy guidelines." + + " Data tiers are a recommended replacement for tiered architecture clusters.", + false, + DeprecationIssue.createMetaMapForRemovableSettings(List.of(setting)) + ); + assertThat(issuesByComponentTemplate.get("my-template-1"), hasItem(expected)); + assertThat(issuesByComponentTemplate.get("my-template-2"), hasItem(expected)); + assertThat(issuesByComponentTemplate.containsKey("my-template-3"), is(false)); + } + + public void testCheckLegacyTierSettings() { + String setting = "index.routing.allocation." + randomFrom("include", "require", "exclude") + ".data"; + Template template = Template.builder().settings(Settings.builder().put(setting, "hot").build()).build(); + + Template template2 = Template.builder() + .settings(Settings.builder().put("index.routing.allocation.require.data", randomAlphaOfLength(10)).build()) + .build(); + + ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata( + Metadata.builder() + .indexTemplates( + Map.of( + "my-template-1", + ComposableIndexTemplate.builder().template(template).indexPatterns(List.of(randomAlphaOfLength(10))).build(), + "my-template-2", + ComposableIndexTemplate.builder().template(template).indexPatterns(List.of(randomAlphaOfLength(10))).build(), + "my-template-3", + ComposableIndexTemplate.builder().template(template2).indexPatterns(List.of(randomAlphaOfLength(10))).build() + ) + ) + ) + .build(); + + Map> issuesByComponentTemplate = checker.check(clusterState, null); + final DeprecationIssue expected = new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Configuring tiers via filtered allocation is not recommended.", + "https://ela.st/migrate-to-tiers", + "One or more of your index templates is configured with 'index.routing.allocation.*.data' settings." + + " This is typically used to create a hot/warm or tiered architecture, based on legacy guidelines." + + " Data tiers are a recommended replacement for tiered architecture clusters.", + false, + DeprecationIssue.createMetaMapForRemovableSettings(List.of(setting)) + ); + assertThat(issuesByComponentTemplate.get("my-template-1"), hasItem(expected)); + assertThat(issuesByComponentTemplate.get("my-template-2"), hasItem(expected)); + assertThat(issuesByComponentTemplate.containsKey("my-template-3"), is(false)); + } + + public void testComponentAndComposableTemplateWithSameName() { + String setting = "index.routing.allocation." + randomFrom("include", "require", "exclude") + ".data"; + Template template = Template.builder().settings(Settings.builder().put(setting, "hot").build()).build(); + + Template template2 = Template.builder() + .settings(Settings.builder().put("index.routing.allocation.require.data", randomAlphaOfLength(10)).build()) + .build(); + + ComponentTemplate componentTemplate = new ComponentTemplate(template, 1L, new HashMap<>()); + + ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata( + Metadata.builder() + .componentTemplates(Map.of("my-template-1", componentTemplate)) + .indexTemplates( + Map.of( + "my-template-1", + ComposableIndexTemplate.builder().template(template).indexPatterns(List.of(randomAlphaOfLength(10))).build(), + "my-template-2", + ComposableIndexTemplate.builder().template(template).indexPatterns(List.of(randomAlphaOfLength(10))).build(), + "my-template-3", + ComposableIndexTemplate.builder().template(template2).indexPatterns(List.of(randomAlphaOfLength(10))).build() + ) + ) + ) + .build(); + + Map> issuesByComponentTemplate = checker.check(clusterState, null); + final DeprecationIssue expectedIndexTemplateIssue = new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Configuring tiers via filtered allocation is not recommended.", + "https://ela.st/migrate-to-tiers", + "One or more of your index templates is configured with 'index.routing.allocation.*.data' settings." + + " This is typically used to create a hot/warm or tiered architecture, based on legacy guidelines." + + " Data tiers are a recommended replacement for tiered architecture clusters.", + false, + DeprecationIssue.createMetaMapForRemovableSettings(List.of(setting)) + ); + final DeprecationIssue expectedComponentTemplateIssue = new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Configuring tiers via filtered allocation is not recommended.", + "https://ela.st/migrate-to-tiers", + "One or more of your component templates is configured with 'index.routing.allocation.*.data' settings." + + " This is typically used to create a hot/warm or tiered architecture, based on legacy guidelines." + + " Data tiers are a recommended replacement for tiered architecture clusters.", + false, + DeprecationIssue.createMetaMapForRemovableSettings(List.of(setting)) + ); + assertThat(issuesByComponentTemplate.get("my-template-1"), hasItem(expectedIndexTemplateIssue)); + assertThat(issuesByComponentTemplate.get("my-template-1"), hasItem(expectedComponentTemplateIssue)); + assertThat(issuesByComponentTemplate.get("my-template-2"), hasItem(expectedIndexTemplateIssue)); + assertThat(issuesByComponentTemplate.containsKey("my-template-3"), is(false)); + } +} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/deprecation/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/deprecation/10_basic.yml index b93241c09302b..78015468ba673 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/deprecation/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/deprecation/10_basic.yml @@ -6,6 +6,13 @@ setup: --- "Test Deprecations": + - requires: + capabilities: + - method: GET + path: /_migration/deprecations + capabilities: [ data_streams, ilm_policies, templates ] + test_runner_features: capabilities + reason: "Support for data streams, ILM policies and templates" - do: migration.deprecations: index: "*" @@ -13,6 +20,9 @@ setup: - length: { node_settings: 0 } - length: { index_settings: 0 } - length: { ml_settings: 0 } + - length: { data_streams: 0 } + - length: { templates: 0 } + - length: { ilm_policies: 0 } --- "Test ml": From 5e953dc7c3881690c61ef39bbef0ae88af35647b Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Tue, 28 Jan 2025 12:00:30 +0100 Subject: [PATCH 112/383] Fix NPE on disabled API key auth cache (#120483) Currently, when `xpack.security.authc.api_key.cache.ttl` is set to `0d` API key creation (and invalidation) fail with NPEs. This PR adds null checks to fix this. --- docs/changelog/120483.yaml | 5 +++ .../xpack/security/authc/ApiKeyService.java | 16 ++++--- .../security/authc/ApiKeyServiceTests.java | 44 +++++++++++++++++++ 3 files changed, 60 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/120483.yaml diff --git a/docs/changelog/120483.yaml b/docs/changelog/120483.yaml new file mode 100644 index 0000000000000..20da3b9ab4e8d --- /dev/null +++ b/docs/changelog/120483.yaml @@ -0,0 +1,5 @@ +pr: 120483 +summary: Fix NPE on disabled API auth key cache +area: Authentication +type: bug +issues: [] diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index c1be25b27c51e..c2d1370c2cbf3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -266,7 +266,9 @@ public void invalidate(Collection keys) { if (apiKeyDocCache != null) { apiKeyDocCache.invalidate(keys); } - keys.forEach(apiKeyAuthCache::invalidate); + if (apiKeyAuthCache != null) { + keys.forEach(apiKeyAuthCache::invalidate); + } } @Override @@ -274,7 +276,9 @@ public void invalidateAll() { if (apiKeyDocCache != null) { apiKeyDocCache.invalidateAll(); } - apiKeyAuthCache.invalidateAll(); + if (apiKeyAuthCache != null) { + apiKeyAuthCache.invalidateAll(); + } } }); cacheInvalidatorRegistry.registerCacheInvalidator("api_key_doc", new CacheInvalidatorRegistry.CacheInvalidator() { @@ -589,9 +593,11 @@ private void createApiKeyAndIndexIt( + "])"; assert indexResponse.getResult() == DocWriteResponse.Result.CREATED : "Index response was [" + indexResponse.getResult() + "]"; - final ListenableFuture listenableFuture = new ListenableFuture<>(); - listenableFuture.onResponse(new CachedApiKeyHashResult(true, apiKey)); - apiKeyAuthCache.put(request.getId(), listenableFuture); + if (apiKeyAuthCache != null) { + final ListenableFuture listenableFuture = new ListenableFuture<>(); + listenableFuture.onResponse(new CachedApiKeyHashResult(true, apiKey)); + apiKeyAuthCache.put(request.getId(), listenableFuture); + } listener.onResponse(new CreateApiKeyResponse(request.getName(), request.getId(), apiKey, expiration)); }, listener::onFailure)) ) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 996291c52c71f..185669a6a203b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -2367,6 +2367,50 @@ public void testWillInvalidateAuthCacheWhenDocNotFound() { assertNull(service.getApiKeyAuthCache().get(docId)); } + public void testCanCreateApiKeyWithAuthCacheDisabled() { + final ApiKeyService service = createApiKeyService( + Settings.builder() + .put(XPackSettings.API_KEY_SERVICE_ENABLED_SETTING.getKey(), true) + .put("xpack.security.authc.api_key.cache.ttl", "0s") + .build() + ); + final Authentication authentication = AuthenticationTestHelper.builder() + .user(new User(randomAlphaOfLengthBetween(8, 16), "superuser")) + .realmRef(new RealmRef(randomAlphaOfLengthBetween(3, 8), randomAlphaOfLengthBetween(3, 8), randomAlphaOfLengthBetween(3, 8))) + .build(false); + final CreateApiKeyRequest createApiKeyRequest = new CreateApiKeyRequest(randomAlphaOfLengthBetween(3, 8), null, null); + when(client.prepareIndex(anyString())).thenReturn(new IndexRequestBuilder(client)); + when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client)); + when(client.threadPool()).thenReturn(threadPool); + doAnswer(inv -> { + final Object[] args = inv.getArguments(); + @SuppressWarnings("unchecked") + final ActionListener listener = (ActionListener) args[2]; + final IndexResponse indexResponse = new IndexResponse( + new ShardId(INTERNAL_SECURITY_MAIN_INDEX_7, randomAlphaOfLength(22), randomIntBetween(0, 1)), + createApiKeyRequest.getId(), + randomLongBetween(1, 99), + randomLongBetween(1, 99), + randomIntBetween(1, 99), + true + ); + listener.onResponse( + new BulkResponse( + new BulkItemResponse[] { BulkItemResponse.success(randomInt(), DocWriteRequest.OpType.INDEX, indexResponse) }, + randomLongBetween(0, 100) + ) + ); + return null; + }).when(client).execute(eq(TransportBulkAction.TYPE), any(BulkRequest.class), any()); + + assertThat(service.getFromCache(createApiKeyRequest.getId()), is(nullValue())); + final PlainActionFuture listener = new PlainActionFuture<>(); + service.createApiKey(authentication, createApiKeyRequest, Set.of(), listener); + final CreateApiKeyResponse createApiKeyResponse = listener.actionGet(); + assertThat(createApiKeyResponse.getId(), equalTo(createApiKeyRequest.getId())); + assertThat(service.getFromCache(createApiKeyResponse.getId()), is(nullValue())); + } + public void testGetCreatorRealm() { final User user = AuthenticationTests.randomUser(); From 8f70713dd7b02ced51c0bdd3a8b083bd3a6b4ba3 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 28 Jan 2025 11:14:57 +0000 Subject: [PATCH 113/383] Add `?master_timeout` to `POST /_ilm/migrate_to_data_tiers` (#120883) Relates #107984 --- docs/changelog/120883.yaml | 5 ++++ .../api/ilm.migrate_to_data_tiers.json | 4 +++ .../action/MigrateToDataTiersRequest.java | 26 +++++++++++-------- .../MigrateToDataTiersRequestTests.java | 2 +- .../action/RestMigrateToDataTiersAction.java | 15 ++++++++--- 5 files changed, 37 insertions(+), 15 deletions(-) create mode 100644 docs/changelog/120883.yaml diff --git a/docs/changelog/120883.yaml b/docs/changelog/120883.yaml new file mode 100644 index 0000000000000..00a54a8b3ad8c --- /dev/null +++ b/docs/changelog/120883.yaml @@ -0,0 +1,5 @@ +pr: 120883 +summary: Add `?master_timeout` to `POST /_ilm/migrate_to_data_tiers` +area: Indices APIs +type: bug +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.migrate_to_data_tiers.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.migrate_to_data_tiers.json index 8d7e4509b68cd..78bcb182ec74f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.migrate_to_data_tiers.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ilm.migrate_to_data_tiers.json @@ -21,6 +21,10 @@ ] }, "params": { + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + }, "dry_run": { "type": "boolean", "description": "If set to true it will simulate the migration, providing a way to retrieve the ILM policies and indices that need to be migrated. The default is false" diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java index 6584dcc279e85..06546a04e8256 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -23,11 +24,14 @@ public class MigrateToDataTiersRequest extends AcknowledgedRequest PARSER = new ConstructingObjectParser<>( + public interface Factory { + MigrateToDataTiersRequest create(@Nullable String legacyTemplateToDelete, @Nullable String nodeAttributeName); + } + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "index_template", false, - a -> new MigrateToDataTiersRequest((String) a[0], (String) a[1]) + (a, factory) -> factory.create((String) a[0], (String) a[1]) ); static { @@ -48,20 +52,20 @@ public class MigrateToDataTiersRequest extends AcknowledgedRequest instanceReader() { @Override protected MigrateToDataTiersRequest createTestInstance() { - return new MigrateToDataTiersRequest(randomAlphaOfLength(10), randomAlphaOfLength(10)); + return new MigrateToDataTiersRequest(TEST_REQUEST_TIMEOUT, randomAlphaOfLength(10), randomAlphaOfLength(10)); } @Override diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMigrateToDataTiersAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMigrateToDataTiersAction.java index 095cb212be558..d1e370b531025 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMigrateToDataTiersAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMigrateToDataTiersAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.cluster.action.MigrateToDataTiersAction; import org.elasticsearch.xpack.cluster.action.MigrateToDataTiersRequest; @@ -33,13 +34,21 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - MigrateToDataTiersRequest migrateRequest; + final var masterNodeTimeout = RestUtils.getMasterNodeTimeout(request); + final MigrateToDataTiersRequest migrateRequest; if (request.hasContent()) { try (var parser = request.contentParser()) { - migrateRequest = MigrateToDataTiersRequest.parse(parser); + migrateRequest = MigrateToDataTiersRequest.parse( + (legacyTemplateToDelete, nodeAttributeName) -> new MigrateToDataTiersRequest( + masterNodeTimeout, + legacyTemplateToDelete, + nodeAttributeName + ), + parser + ); } } else { - migrateRequest = new MigrateToDataTiersRequest(); + migrateRequest = new MigrateToDataTiersRequest(masterNodeTimeout, null, null); } migrateRequest.setDryRun(request.paramAsBoolean("dry_run", false)); return channel -> client.execute(MigrateToDataTiersAction.INSTANCE, migrateRequest, new RestToXContentListener<>(channel)); From 4fc006e130b8346dd910751072f9380711131c25 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 28 Jan 2025 11:16:09 +0000 Subject: [PATCH 114/383] Async repo contents iterator (#120819) Reifies the iterator over each repository's contents so that we can (in follow-ups, and under certain combinations of request options) pre-process this iterator to avoid loading `SnapshotInfo` blobs that we can determine to be unnecessary up front. This is just a refactoring, it doesn't change behaviour, and this area is well-covered by tests such as `GetSnapshotsIT#testAllFeatures` so no need for any test changes here. --- .../get/TransportGetSnapshotsAction.java | 109 +++++++++++------- 1 file changed, 66 insertions(+), 43 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index d9fef7e0af8af..896b336d54d7b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -276,55 +276,67 @@ void runOperation(ActionListener listener) { */ private void populateResults(ActionListener listener) { try (var listeners = new RefCountingListener(listener)) { - for (final RepositoryMetadata repository : repositories) { - final String repositoryName = repository.name(); - if (skipRepository(repositoryName)) { - continue; - } - if (listeners.isFailing()) { - return; - } + final BooleanSupplier failFastSupplier = () -> cancellableTask.isCancelled() || listeners.isFailing(); + + final Iterator asyncSnapshotInfoIterators = Iterators.failFast( + Iterators.map( + Iterators.filter( + Iterators.map(repositories.iterator(), RepositoryMetadata::name), + repositoryName -> skipRepository(repositoryName) == false + ), + repositoryName -> asyncRepositoryContentsListener -> SubscribableListener + + .newForked(l -> maybeGetRepositoryData(repositoryName, l)) + .andThenApply(repositoryData -> { + assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); + cancellableTask.ensureNotCancelled(); + ensureRequiredNamesPresent(repositoryName, repositoryData); + return getAsyncSnapshotInfoIterator(repositoriesService.repository(repositoryName), repositoryData); + }) + .addListener(asyncRepositoryContentsListener) + ), + failFastSupplier + ); - maybeGetRepositoryData(repositoryName, listeners.acquire(repositoryData -> { - assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); - cancellableTask.ensureNotCancelled(); - ensureRequiredNamesPresent(repositoryName, repositoryData); - ThrottledIterator.run( - Iterators.failFast( - getAsyncSnapshotInfoIterator(repositoriesService.repository(repositoryName), repositoryData), - () -> cancellableTask.isCancelled() || listeners.isFailing() - ), - (ref, asyncSnapshotInfo) -> ActionListener.run( - ActionListener.runBefore(listeners.acquire(), ref::close), - refListener -> asyncSnapshotInfo.getSnapshotInfo(new ActionListener<>() { - @Override - public void onResponse(SnapshotInfo snapshotInfo) { - if (matchesPredicates(snapshotInfo)) { - totalCount.incrementAndGet(); - if (afterPredicate.test(snapshotInfo)) { - allSnapshotInfos.add(snapshotInfo.maybeWithoutIndices(indices)); + // TODO if the request parameters allow it, modify asyncSnapshotInfoIterators to skip unnecessary GET calls here + + asyncSnapshotInfoIterators.forEachRemaining( + asyncSnapshotInfoIteratorSupplier -> asyncSnapshotInfoIteratorSupplier.getAsyncSnapshotInfoIterator( + listeners.acquire( + asyncSnapshotInfoIterator -> ThrottledIterator.run( + Iterators.failFast(asyncSnapshotInfoIterator, failFastSupplier), + (ref, asyncSnapshotInfo) -> ActionListener.run( + ActionListener.runBefore(listeners.acquire(), ref::close), + refListener -> asyncSnapshotInfo.getSnapshotInfo(new ActionListener<>() { + @Override + public void onResponse(SnapshotInfo snapshotInfo) { + if (matchesPredicates(snapshotInfo)) { + totalCount.incrementAndGet(); + if (afterPredicate.test(snapshotInfo)) { + allSnapshotInfos.add(snapshotInfo.maybeWithoutIndices(indices)); + } } + refListener.onResponse(null); } - refListener.onResponse(null); - } - @Override - public void onFailure(Exception e) { - if (ignoreUnavailable) { - logger.warn(Strings.format("failed to fetch snapshot info for [%s]", asyncSnapshotInfo), e); - refListener.onResponse(null); - } else { - refListener.onFailure(e); + @Override + public void onFailure(Exception e) { + if (ignoreUnavailable) { + logger.warn(Strings.format("failed to fetch snapshot info for [%s]", asyncSnapshotInfo), e); + refListener.onResponse(null); + } else { + refListener.onFailure(e); + } } - } - }) - ), - getSnapshotInfoExecutor.getMaxRunningTasks(), - () -> {} - ); - })); - } + }) + ), + getSnapshotInfoExecutor.getMaxRunningTasks(), + () -> {} + ) + ) + ) + ); } } @@ -383,6 +395,17 @@ private interface AsyncSnapshotInfo { void getSnapshotInfo(ActionListener listener); } + /** + * An asynchronous supplier of the collection of snapshots contained in a repository, as an iterator over snapshots each represented + * as an {@link AsyncSnapshotInfo}. + */ + private interface AsyncSnapshotInfoIterator { + /** + * @param listener completed, possibly asynchronously, with the appropriate iterator over {@link AsyncSnapshotInfo} instances. + */ + void getAsyncSnapshotInfoIterator(ActionListener> listener); + } + /** * @return an {@link AsyncSnapshotInfo} for the given in-progress snapshot entry. */ From d60e6fac85dafb944ef1b54bc2950f4b22561106 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 22:26:16 +1100 Subject: [PATCH 115/383] Mute org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT test {p0=data_stream/140_data_stream_aliases/Create data stream alias with filter} #121014 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 7556f724c861f..1b3327124e9a4 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -280,6 +280,9 @@ tests: - class: org.elasticsearch.action.search.SearchProgressActionListenerIT method: testSearchProgressWithQuery issue: https://github.com/elastic/elasticsearch/issues/120994 +- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT + method: test {p0=data_stream/140_data_stream_aliases/Create data stream alias with filter} + issue: https://github.com/elastic/elasticsearch/issues/121014 # Examples: # From cbb62c2f66ebbd4a69fd0bfd2528abe2e6c40433 Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Tue, 28 Jan 2025 11:38:25 +0000 Subject: [PATCH 116/383] Fix SearchProgressActionListenerIT (#120888) This fixes SearchProgressActionListenerIT by notifying the search progress listener before search operation terminates. --- muted-tests.yml | 6 ------ .../elasticsearch/action/search/FetchSearchPhase.java | 9 +++++++-- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 1b3327124e9a4..c55a65bb8ae47 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -213,9 +213,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120482 - class: org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeToCharProcessorTests issue: https://github.com/elastic/elasticsearch/issues/120575 -- class: org.elasticsearch.action.search.SearchProgressActionListenerIT - method: testSearchProgressWithHitsAndAggs - issue: https://github.com/elastic/elasticsearch/issues/120583 - class: org.elasticsearch.index.reindex.BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests method: testReindex issue: https://github.com/elastic/elasticsearch/issues/120605 @@ -232,9 +229,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120767 - class: org.elasticsearch.search.fieldcaps.FieldCapabilitiesIT issue: https://github.com/elastic/elasticsearch/issues/120772 -- class: org.elasticsearch.action.search.SearchProgressActionListenerIT - method: testSearchProgressWithHits - issue: https://github.com/elastic/elasticsearch/issues/120671 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/3rd_party_deployment/Test start deployment fails while model download in progress} issue: https://github.com/elastic/elasticsearch/issues/120810 diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java index 22ad670c6204d..080295210fced 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java @@ -158,15 +158,21 @@ private void innerRunFetch(ScoreDoc[] scoreDocs, int numShards, SearchPhaseContr ); for (int i = 0; i < docIdsToLoad.length; i++) { List entry = docIdsToLoad[i]; + SearchPhaseResult shardPhaseResult = searchPhaseShardResults.get(i); if (entry == null) { // no results for this shard ID // if we got some hits from this shard we have to release the context // we do this below after sending out the fetch requests relevant to the search to give priority to those requests // that contribute to the final search response // in any case we count down this result since we don't talk to this shard anymore + if (shardPhaseResult != null) { + // notifying the listener here as otherwise the search operation might finish before we + // get a chance to notify the progress listener for some fetch results + progressListener.notifyFetchResult(i); + } counter.countDown(); } else { executeFetch( - searchPhaseShardResults.get(i), + shardPhaseResult, counter, entry, rankDocsPerShard == null || rankDocsPerShard.get(i).isEmpty() ? null : new RankDocShardInfo(rankDocsPerShard.get(i)), @@ -179,7 +185,6 @@ private void innerRunFetch(ScoreDoc[] scoreDocs, int numShards, SearchPhaseContr SearchPhaseResult shardPhaseResult = searchPhaseShardResults.get(i); if (shardPhaseResult != null) { releaseIrrelevantSearchContext(shardPhaseResult, context); - progressListener.notifyFetchResult(i); } } } From 723387a9c43c213f9dfd4611020c3b8e90b84cfb Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Tue, 28 Jan 2025 12:54:28 +0100 Subject: [PATCH 117/383] Mute org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT test {p0=data_stream/80_resolve_index_data_streams/Resolve index with hidden and closed indices} #120965 (#121013) --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c55a65bb8ae47..c42bf0969211a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -274,6 +274,9 @@ tests: - class: org.elasticsearch.action.search.SearchProgressActionListenerIT method: testSearchProgressWithQuery issue: https://github.com/elastic/elasticsearch/issues/120994 +- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT + method: test {p0=data_stream/80_resolve_index_data_streams/Resolve index with hidden and closed indices} + issue: https://github.com/elastic/elasticsearch/issues/120965 - class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT method: test {p0=data_stream/140_data_stream_aliases/Create data stream alias with filter} issue: https://github.com/elastic/elasticsearch/issues/121014 From 436e604b904ae506331d25ba1c2da8171b11a896 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Tue, 28 Jan 2025 13:08:05 +0100 Subject: [PATCH 118/383] [Entitlements] Add checks for native libraries restricted methods (#120775) --- .../bridge/EntitlementChecker.java | 66 ++++++++++ .../qa/test/RestEntitlementsCheckAction.java | 16 ++- .../qa/test/VersionSpecificNativeChecks.java | 33 +++++ .../qa/test/VersionSpecificNativeChecks.java | 118 ++++++++++++++++++ .../EntitlementInitialization.java | 4 +- .../api/ElasticsearchEntitlementChecker.java | 99 +++++++++++++++ 6 files changed, 334 insertions(+), 2 deletions(-) create mode 100644 libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java create mode 100644 libs/entitlement/qa/entitlement-test-plugin/src/main22/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index d2c9541742d0a..d509763b3541d 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -12,6 +12,13 @@ import java.io.InputStream; import java.io.PrintStream; import java.io.PrintWriter; +import java.lang.foreign.AddressLayout; +import java.lang.foreign.Arena; +import java.lang.foreign.FunctionDescriptor; +import java.lang.foreign.Linker; +import java.lang.foreign.MemoryLayout; +import java.lang.foreign.MemorySegment; +import java.lang.invoke.MethodHandle; import java.net.ContentHandlerFactory; import java.net.DatagramPacket; import java.net.DatagramSocket; @@ -40,11 +47,13 @@ import java.nio.channels.DatagramChannel; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; +import java.nio.file.Path; import java.security.cert.CertStoreParameters; import java.util.List; import java.util.Locale; import java.util.Properties; import java.util.TimeZone; +import java.util.function.Consumer; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; @@ -411,6 +420,7 @@ public interface EntitlementChecker { // // Load native libraries // + // Using the list of restricted methods from https://download.java.net/java/early_access/jdk24/docs/api/restricted-list.html void check$java_lang_Runtime$load(Class callerClass, Runtime that, String filename); void check$java_lang_Runtime$loadLibrary(Class callerClass, Runtime that, String libname); @@ -418,4 +428,60 @@ public interface EntitlementChecker { void check$java_lang_System$$load(Class callerClass, String filename); void check$java_lang_System$$loadLibrary(Class callerClass, String libname); + + // Sealed implementation of java.lang.foreign.AddressLayout + void check$jdk_internal_foreign_layout_ValueLayouts$OfAddressImpl$withTargetLayout( + Class callerClass, + AddressLayout that, + MemoryLayout memoryLayout + ); + + // Sealed implementation of java.lang.foreign.Linker + void check$jdk_internal_foreign_abi_AbstractLinker$downcallHandle( + Class callerClass, + Linker that, + FunctionDescriptor function, + Linker.Option... options + ); + + void check$jdk_internal_foreign_abi_AbstractLinker$downcallHandle( + Class callerClass, + Linker that, + MemorySegment address, + FunctionDescriptor function, + Linker.Option... options + ); + + void check$jdk_internal_foreign_abi_AbstractLinker$upcallStub( + Class callerClass, + Linker that, + MethodHandle target, + FunctionDescriptor function, + Arena arena, + Linker.Option... options + ); + + // Sealed implementation for java.lang.foreign.MemorySegment.reinterpret(long) + void check$jdk_internal_foreign_AbstractMemorySegmentImpl$reinterpret(Class callerClass, MemorySegment that, long newSize); + + void check$jdk_internal_foreign_AbstractMemorySegmentImpl$reinterpret( + Class callerClass, + MemorySegment that, + long newSize, + Arena arena, + Consumer cleanup + ); + + void check$jdk_internal_foreign_AbstractMemorySegmentImpl$reinterpret( + Class callerClass, + MemorySegment that, + Arena arena, + Consumer cleanup + ); + + void check$java_lang_foreign_SymbolLookup$$libraryLookup(Class callerClass, String name, Arena arena); + + void check$java_lang_foreign_SymbolLookup$$libraryLookup(Class callerClass, Path path, Arena arena); + + void check$java_lang_ModuleLayer$Controller$enableNativeAccess(Class callerClass, ModuleLayer.Controller that, Module target); } diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java index 1e754f657e260..c2b6478e561a8 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java @@ -199,7 +199,21 @@ static CheckAction alwaysDenied(CheckedRunnable action) { entry("runtime_load", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoad)), entry("runtime_load_library", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoadLibrary)), entry("system_load", forPlugins(LoadNativeLibrariesCheckActions::systemLoad)), - entry("system_load_library", forPlugins(LoadNativeLibrariesCheckActions::systemLoadLibrary)) + entry("system_load_library", forPlugins(LoadNativeLibrariesCheckActions::systemLoadLibrary)), + + entry("enable_native_access", new CheckAction(VersionSpecificNativeChecks::enableNativeAccess, false, 22)), + entry("address_target_layout", new CheckAction(VersionSpecificNativeChecks::addressLayoutWithTargetLayout, false, 22)), + entry("donwncall_handle", new CheckAction(VersionSpecificNativeChecks::linkerDowncallHandle, false, 22)), + entry("donwncall_handle_with_address", new CheckAction(VersionSpecificNativeChecks::linkerDowncallHandleWithAddress, false, 22)), + entry("upcall_stub", new CheckAction(VersionSpecificNativeChecks::linkerUpcallStub, false, 22)), + entry("reinterpret", new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpret, false, 22)), + entry("reinterpret_cleanup", new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpretWithCleanup, false, 22)), + entry( + "reinterpret_size_cleanup", + new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpretWithSizeAndCleanup, false, 22) + ), + entry("symbol_lookup_name", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithName, false, 22)), + entry("symbol_lookup_path", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithPath, false, 22)) ) .filter(entry -> entry.getValue().fromJavaVersion() == null || Runtime.version().feature() >= entry.getValue().fromJavaVersion()) .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java new file mode 100644 index 0000000000000..cb84c9bd9042d --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +class VersionSpecificNativeChecks { + + static void enableNativeAccess() throws Exception {} + + static void addressLayoutWithTargetLayout() {} + + static void linkerDowncallHandle() {} + + static void linkerDowncallHandleWithAddress() {} + + static void linkerUpcallStub() throws NoSuchMethodException {} + + static void memorySegmentReinterpret() {} + + static void memorySegmentReinterpretWithCleanup() {} + + static void memorySegmentReinterpretWithSizeAndCleanup() {} + + static void symbolLookupWithPath() {} + + static void symbolLookupWithName() {} +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main22/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java b/libs/entitlement/qa/entitlement-test-plugin/src/main22/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java new file mode 100644 index 0000000000000..0a69f7255a200 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main22/java/org/elasticsearch/entitlement/qa/test/VersionSpecificNativeChecks.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import org.elasticsearch.entitlement.qa.entitled.EntitledPlugin; + +import java.lang.foreign.AddressLayout; +import java.lang.foreign.Arena; +import java.lang.foreign.FunctionDescriptor; +import java.lang.foreign.Linker; +import java.lang.foreign.MemoryLayout; +import java.lang.foreign.MemorySegment; +import java.lang.foreign.SymbolLookup; +import java.lang.foreign.ValueLayout; +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodHandles; +import java.lang.invoke.MethodType; +import java.lang.module.Configuration; +import java.lang.module.ModuleFinder; +import java.nio.file.Path; +import java.util.List; +import java.util.Set; + +import static java.lang.foreign.ValueLayout.ADDRESS; +import static java.lang.foreign.ValueLayout.JAVA_LONG; + +class VersionSpecificNativeChecks { + + static void enableNativeAccess() throws Exception { + ModuleLayer parent = ModuleLayer.boot(); + + var location = EntitledPlugin.class.getProtectionDomain().getCodeSource().getLocation(); + + // We create a layer for our own module, so we have a controller to try and call enableNativeAccess on it. + // This works in both the modular and non-modular case: the target module has to be present in the new layer, but its entitlements + // and policies do not matter to us: we are checking that the caller is (or isn't) entitled to use enableNativeAccess + Configuration cf = parent.configuration() + .resolve(ModuleFinder.of(Path.of(location.toURI())), ModuleFinder.of(), Set.of("org.elasticsearch.entitlement.qa.entitled")); + var controller = ModuleLayer.defineModulesWithOneLoader(cf, List.of(parent), ClassLoader.getSystemClassLoader()); + var targetModule = controller.layer().findModule("org.elasticsearch.entitlement.qa.entitled"); + + controller.enableNativeAccess(targetModule.get()); + } + + static void addressLayoutWithTargetLayout() { + AddressLayout addressLayout = ADDRESS.withoutTargetLayout(); + addressLayout.withTargetLayout(MemoryLayout.sequenceLayout(Long.MAX_VALUE, ValueLayout.JAVA_BYTE)); + } + + static void linkerDowncallHandle() { + Linker linker = Linker.nativeLinker(); + linker.downcallHandle(FunctionDescriptor.of(JAVA_LONG, ADDRESS)); + } + + static void linkerDowncallHandleWithAddress() { + Linker linker = Linker.nativeLinker(); + linker.downcallHandle(linker.defaultLookup().find("strlen").get(), FunctionDescriptor.of(JAVA_LONG, ADDRESS)); + } + + static int callback() { + return 0; + } + + static void linkerUpcallStub() throws NoSuchMethodException { + Linker linker = Linker.nativeLinker(); + + MethodHandle mh = null; + try { + mh = MethodHandles.lookup().findStatic(VersionSpecificNativeChecks.class, "callback", MethodType.methodType(int.class)); + } catch (IllegalAccessException e) { + assert false; + } + + FunctionDescriptor callbackDescriptor = FunctionDescriptor.of(ValueLayout.JAVA_INT); + linker.upcallStub(mh, callbackDescriptor, Arena.ofAuto()); + } + + static void memorySegmentReinterpret() { + Arena arena = Arena.ofAuto(); + MemorySegment segment = arena.allocate(100); + segment.reinterpret(50); + } + + static void memorySegmentReinterpretWithCleanup() { + Arena arena = Arena.ofAuto(); + MemorySegment segment = arena.allocate(100); + segment.reinterpret(Arena.ofAuto(), s -> {}); + } + + static void memorySegmentReinterpretWithSizeAndCleanup() { + Arena arena = Arena.ofAuto(); + MemorySegment segment = arena.allocate(100); + segment.reinterpret(50, Arena.ofAuto(), s -> {}); + } + + static void symbolLookupWithPath() { + try { + SymbolLookup.libraryLookup(Path.of("/foo/bar/libFoo.so"), Arena.ofAuto()); + } catch (IllegalArgumentException e) { + // IllegalArgumentException is thrown if path does not point to a valid library (and it does not) + } + } + + static void symbolLookupWithName() { + try { + SymbolLookup.libraryLookup("foo", Arena.ofAuto()); + } catch (IllegalArgumentException e) { + // IllegalArgumentException is thrown if path does not point to a valid library (and it does not) + } + } +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 4ee9fd1bbca25..a8938c16955a7 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -98,7 +98,9 @@ private static PolicyManager createPolicyManager() { ) ), new Scope("org.apache.httpcomponents.httpclient", List.of(new OutboundNetworkEntitlement())), - new Scope("io.netty.transport", List.of(new InboundNetworkEntitlement(), new OutboundNetworkEntitlement())) + new Scope("io.netty.transport", List.of(new InboundNetworkEntitlement(), new OutboundNetworkEntitlement())), + new Scope("org.apache.lucene.core", List.of(new LoadNativeLibrariesEntitlement())), + new Scope("org.elasticsearch.nativeaccess", List.of(new LoadNativeLibrariesEntitlement())) ) ); // agents run without a module, so this is a special hack for the apm agent diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 55adbf45699ab..8600dd357c384 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -15,6 +15,13 @@ import java.io.InputStream; import java.io.PrintStream; import java.io.PrintWriter; +import java.lang.foreign.AddressLayout; +import java.lang.foreign.Arena; +import java.lang.foreign.FunctionDescriptor; +import java.lang.foreign.Linker; +import java.lang.foreign.MemoryLayout; +import java.lang.foreign.MemorySegment; +import java.lang.invoke.MethodHandle; import java.net.ContentHandlerFactory; import java.net.DatagramPacket; import java.net.DatagramSocket; @@ -44,11 +51,13 @@ import java.nio.channels.DatagramChannel; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; +import java.nio.file.Path; import java.security.cert.CertStoreParameters; import java.util.List; import java.util.Locale; import java.util.Properties; import java.util.TimeZone; +import java.util.function.Consumer; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; @@ -752,6 +761,7 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { @Override public void check$java_lang_Runtime$load(Class callerClass, Runtime that, String filename) { + // TODO: check filesystem entitlement READ policyManager.checkLoadingNativeLibraries(callerClass); } @@ -762,6 +772,7 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { @Override public void check$java_lang_System$$load(Class callerClass, String filename) { + // TODO: check filesystem entitlement READ policyManager.checkLoadingNativeLibraries(callerClass); } @@ -769,4 +780,92 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { public void check$java_lang_System$$loadLibrary(Class callerClass, String libname) { policyManager.checkLoadingNativeLibraries(callerClass); } + + @Override + public void check$jdk_internal_foreign_layout_ValueLayouts$OfAddressImpl$withTargetLayout( + Class callerClass, + AddressLayout that, + MemoryLayout memoryLayout + ) { + policyManager.checkLoadingNativeLibraries(callerClass); + } + + @Override + public void check$jdk_internal_foreign_abi_AbstractLinker$downcallHandle( + Class callerClass, + Linker that, + FunctionDescriptor function, + Linker.Option... options + ) { + policyManager.checkLoadingNativeLibraries(callerClass); + } + + @Override + public void check$jdk_internal_foreign_abi_AbstractLinker$downcallHandle( + Class callerClass, + Linker that, + MemorySegment address, + FunctionDescriptor function, + Linker.Option... options + ) { + policyManager.checkLoadingNativeLibraries(callerClass); + } + + @Override + public void check$jdk_internal_foreign_abi_AbstractLinker$upcallStub( + Class callerClass, + Linker that, + MethodHandle target, + FunctionDescriptor function, + Arena arena, + Linker.Option... options + ) { + policyManager.checkLoadingNativeLibraries(callerClass); + } + + @Override + public void check$jdk_internal_foreign_AbstractMemorySegmentImpl$reinterpret(Class callerClass, MemorySegment that, long newSize) { + policyManager.checkLoadingNativeLibraries(callerClass); + } + + @Override + public void check$jdk_internal_foreign_AbstractMemorySegmentImpl$reinterpret( + Class callerClass, + MemorySegment that, + long newSize, + Arena arena, + Consumer cleanup + ) { + policyManager.checkLoadingNativeLibraries(callerClass); + } + + @Override + public void check$jdk_internal_foreign_AbstractMemorySegmentImpl$reinterpret( + Class callerClass, + MemorySegment that, + Arena arena, + Consumer cleanup + ) { + policyManager.checkLoadingNativeLibraries(callerClass); + } + + @Override + public void check$java_lang_foreign_SymbolLookup$$libraryLookup(Class callerClass, String name, Arena arena) { + policyManager.checkLoadingNativeLibraries(callerClass); + } + + @Override + public void check$java_lang_foreign_SymbolLookup$$libraryLookup(Class callerClass, Path path, Arena arena) { + // TODO: check filesystem entitlement READ + policyManager.checkLoadingNativeLibraries(callerClass); + } + + @Override + public void check$java_lang_ModuleLayer$Controller$enableNativeAccess( + Class callerClass, + ModuleLayer.Controller that, + Module target + ) { + policyManager.checkLoadingNativeLibraries(callerClass); + } } From 655fec2a93a60f387e05132ee312175ed099c63e Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Tue, 28 Jan 2025 12:14:21 +0000 Subject: [PATCH 119/383] Simplify some chunked xcontent implementations (#120714) Remove the use of `ChunkedXContentHelper.field`, as that can easily capture variables that shouldn't be captured --- .../rest/StreamingXContentResponseIT.java | 2 +- .../xcontent/ChunkedToXContentHelper.java | 47 +++++------- .../org/elasticsearch/health/Diagnosis.java | 33 ++++---- .../health/HealthIndicatorResult.java | 35 +++++---- .../script/ScriptCacheStats.java | 38 ++++++---- .../StreamingChatCompletionResults.java | 6 +- ...StreamingUnifiedChatCompletionResults.java | 76 +++++++++---------- .../xpack/core/watcher/WatcherMetadata.java | 5 +- .../compute/operator/DriverProfile.java | 2 +- .../esql/action/CrossClusterAsyncQueryIT.java | 4 +- .../xpack/esql/action/EsqlExecutionInfo.java | 30 +++++--- .../xpack/esql/action/EsqlQueryResponse.java | 15 ++-- .../xpack/esql/session/EsqlSession.java | 5 +- .../esql/action/EsqlQueryResponseTests.java | 2 +- ...stStreamingCompletionServiceExtension.java | 39 ++++------ ...rverSentEventsRestActionListenerTests.java | 9 +-- .../action/GetFlamegraphResponse.java | 10 +-- 17 files changed, 178 insertions(+), 180 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/rest/StreamingXContentResponseIT.java b/server/src/internalClusterTest/java/org/elasticsearch/rest/StreamingXContentResponseIT.java index 13f0abcd2f8c4..fce1a24970836 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/rest/StreamingXContentResponseIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/rest/StreamingXContentResponseIT.java @@ -186,7 +186,7 @@ private static void handleStreamingXContentRestRequest( ActionRunnable.run(ActionListener.releaseAfter(refs.acquireListener(), ref), () -> { Thread.yield(); streamingXContentResponse.writeFragment( - p -> ChunkedToXContentHelper.field(fragment.getKey(), fragment.getValue()), + p -> ChunkedToXContentHelper.chunk((b, xp) -> b.field(fragment.getKey(), fragment.getValue())), refs.acquire() ); }) diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java index 7c7a1ea4a1891..7afc33ff265bb 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.xcontent.ToXContent; -import java.util.Collections; import java.util.Iterator; import java.util.Map; import java.util.function.Function; @@ -51,6 +50,13 @@ public static Iterator object(String name, Map map) { return object(name, map, e -> (b, p) -> b.field(e.getKey(), e.getValue())); } + /** + * Defines an object named {@code name}, with the contents set by calling {@code toXContent} on each entry in {@code map} + */ + public static Iterator object(String name, Map map, Function, ToXContent> toXContent) { + return object(name, Iterators.map(map.entrySet().iterator(), toXContent)); + } + /** * Defines an object named {@code name}, with the contents of each field created from each entry in {@code map} */ @@ -65,26 +71,6 @@ public static Iterator xContentObjectFieldObjects(String name, Map (b, p) -> e.getValue().toXContent(b.startObject(e.getKey()), p).endObject()); } - public static Iterator field(String name, boolean value) { - return Iterators.single((b, p) -> b.field(name, value)); - } - - public static Iterator field(String name, long value) { - return Iterators.single((b, p) -> b.field(name, value)); - } - - public static Iterator field(String name, String value) { - return Iterators.single((b, p) -> b.field(name, value)); - } - - public static Iterator optionalField(String name, String value) { - if (value == null) { - return Collections.emptyIterator(); - } else { - return field(name, value); - } - } - /** * Creates an Iterator to serialize a named field where the value is represented by a {@link ChunkedToXContentObject}. * Chunked equivalent for {@code XContentBuilder field(String name, ToXContent value)} @@ -97,10 +83,22 @@ public static Iterator field(String name, ChunkedToXContentObject va return Iterators.concat(Iterators.single((builder, innerParam) -> builder.field(name)), value.toXContentChunked(params)); } + public static Iterator array(Iterator contents) { + return Iterators.concat(startArray(), contents, endArray()); + } + public static Iterator array(String name, Iterator contents) { return Iterators.concat(startArray(name), contents, endArray()); } + public static Iterator array(Iterator items, Function toXContent) { + return Iterators.concat(startArray(), Iterators.map(items, toXContent), endArray()); + } + + public static Iterator array(String name, Iterator items, Function toXContent) { + return Iterators.concat(startArray(name), Iterators.map(items, toXContent), endArray()); + } + /** * Creates an Iterator to serialize a named field where the value is represented by an iterator of {@link ChunkedToXContentObject}. * Chunked equivalent for {@code XContentBuilder array(String name, ToXContent value)} @@ -120,13 +118,6 @@ public static Iterator object(String name, Iterator Iterator object(String name, Map map, Function, ToXContent> toXContent) { - return object(name, Iterators.map(map.entrySet().iterator(), toXContent)); - } - /** * Creates an Iterator of a single ToXContent object that serializes the given object as a single chunk. Just wraps {@link * Iterators#single}, but still useful because it avoids any type ambiguity. diff --git a/server/src/main/java/org/elasticsearch/health/Diagnosis.java b/server/src/main/java/org/elasticsearch/health/Diagnosis.java index 41301e2d52a53..7dfbf2d98eb5e 100644 --- a/server/src/main/java/org/elasticsearch/health/Diagnosis.java +++ b/server/src/main/java/org/elasticsearch/health/Diagnosis.java @@ -143,31 +143,34 @@ public String getUniqueId() { } } + private boolean hasResources() { + return affectedResources != null && affectedResources.isEmpty() == false; + } + @Override public Iterator toXContentChunked(ToXContent.Params outerParams) { - final Iterator resourcesIterator; - if (affectedResources == null) { - resourcesIterator = Collections.emptyIterator(); - } else { - resourcesIterator = Iterators.flatMap(affectedResources.iterator(), s -> s.toXContentChunked(outerParams)); - } - return Iterators.concat(Iterators.single((ToXContent) (builder, params) -> { + return Iterators.concat(ChunkedToXContentHelper.chunk((builder, params) -> { builder.startObject(); builder.field("id", definition.getUniqueId()); builder.field("cause", definition.cause); builder.field("action", definition.action); builder.field("help_url", definition.helpURL); - if (affectedResources != null && affectedResources.size() > 0) { + if (hasResources()) { + // don't want to have a new chunk & nested iterator for this, so we start the object here builder.startObject("affected_resources"); } return builder; - }), resourcesIterator, Iterators.single((builder, params) -> { - if (affectedResources != null && affectedResources.size() > 0) { - builder.endObject(); - } - builder.endObject(); - return builder; - })); + }), + hasResources() + ? Iterators.flatMap(affectedResources.iterator(), s -> s.toXContentChunked(outerParams)) + : Collections.emptyIterator(), + ChunkedToXContentHelper.chunk((b, p) -> { + if (hasResources()) { + b.endObject(); + } + return b.endObject(); + }) + ); } } diff --git a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java index 6944ac74c8115..fcffe624a9a6f 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java +++ b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java @@ -10,6 +10,7 @@ package org.elasticsearch.health; import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.xcontent.ToXContent; @@ -25,15 +26,14 @@ public record HealthIndicatorResult( List impacts, List diagnosisList ) implements ChunkedToXContentObject { + + private boolean hasDiagnosis() { + return diagnosisList != null && diagnosisList.isEmpty() == false; + } + @Override public Iterator toXContentChunked(ToXContent.Params outerParams) { - final Iterator diagnosisIterator; - if (diagnosisList == null) { - diagnosisIterator = Collections.emptyIterator(); - } else { - diagnosisIterator = Iterators.flatMap(diagnosisList.iterator(), s -> s.toXContentChunked(outerParams)); - } - return Iterators.concat(Iterators.single((ToXContent) (builder, params) -> { + return Iterators.concat(ChunkedToXContentHelper.chunk((builder, params) -> { builder.startObject(); builder.field("status", status.xContentValue()); builder.field("symptom", symptom); @@ -43,16 +43,21 @@ public Iterator toXContentChunked(ToXContent.Params outerP if (impacts != null && impacts.isEmpty() == false) { builder.field("impacts", impacts); } - if (diagnosisList != null && diagnosisList.isEmpty() == false) { + if (hasDiagnosis()) { + // don't want to have a new chunk & nested iterator for this, so we start the object here builder.startArray("diagnosis"); } return builder; - }), diagnosisIterator, Iterators.single((builder, params) -> { - if (diagnosisList != null && diagnosisList.isEmpty() == false) { - builder.endArray(); - } - builder.endObject(); - return builder; - })); + }), + hasDiagnosis() + ? Iterators.flatMap(diagnosisList.iterator(), s -> s.toXContentChunked(outerParams)) + : Collections.emptyIterator(), + ChunkedToXContentHelper.chunk((b, p) -> { + if (hasDiagnosis()) { + b.endArray(); + } + return b.endObject(); + }) + ); } } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptCacheStats.java b/server/src/main/java/org/elasticsearch/script/ScriptCacheStats.java index e752f5f811a43..f593a62d8a37c 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptCacheStats.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptCacheStats.java @@ -17,6 +17,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Arrays; import java.util.Collections; import java.util.Map; import java.util.Objects; @@ -47,6 +48,13 @@ public static ScriptCacheStats read(StreamInput in) throws IOException { return new ScriptCacheStats(context); } + private Map.Entry[] sortedContextStats() { + @SuppressWarnings("unchecked") + Map.Entry[] stats = context.entrySet().toArray(Map.Entry[]::new); + Arrays.sort(stats, Map.Entry.comparingByKey()); + return stats; + } + @Override public void writeTo(StreamOutput out) throws IOException { if (general != null) { @@ -57,38 +65,36 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(true); out.writeInt(context.size()); - for (String name : context.keySet().stream().sorted().toList()) { - out.writeString(name); - context.get(name).writeTo(out); + for (Map.Entry stats : sortedContextStats()) { + out.writeString(stats.getKey()); + stats.getValue().writeTo(out); } } + private static void scriptStatsToXContent(ScriptStats s, XContentBuilder builder) throws IOException { + builder.field(ScriptStats.Fields.COMPILATIONS, s.getCompilations()); + builder.field(ScriptStats.Fields.CACHE_EVICTIONS, s.getCacheEvictions()); + builder.field(ScriptStats.Fields.COMPILATION_LIMIT_TRIGGERED, s.getCompilationLimitTriggered()); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields.SCRIPT_CACHE_STATS); builder.startObject(Fields.SUM); if (general != null) { - builder.field(ScriptStats.Fields.COMPILATIONS, general.getCompilations()); - builder.field(ScriptStats.Fields.CACHE_EVICTIONS, general.getCacheEvictions()); - builder.field(ScriptStats.Fields.COMPILATION_LIMIT_TRIGGERED, general.getCompilationLimitTriggered()); + scriptStatsToXContent(general, builder); builder.endObject().endObject(); return builder; } - ScriptStats sum = sum(); - builder.field(ScriptStats.Fields.COMPILATIONS, sum.getCompilations()); - builder.field(ScriptStats.Fields.CACHE_EVICTIONS, sum.getCacheEvictions()); - builder.field(ScriptStats.Fields.COMPILATION_LIMIT_TRIGGERED, sum.getCompilationLimitTriggered()); + scriptStatsToXContent(sum(), builder); builder.endObject(); builder.startArray(Fields.CONTEXTS); - for (String name : context.keySet().stream().sorted().toList()) { - ScriptStats stats = context.get(name); + for (Map.Entry stats : sortedContextStats()) { builder.startObject(); - builder.field(Fields.CONTEXT, name); - builder.field(ScriptStats.Fields.COMPILATIONS, stats.getCompilations()); - builder.field(ScriptStats.Fields.CACHE_EVICTIONS, stats.getCacheEvictions()); - builder.field(ScriptStats.Fields.COMPILATION_LIMIT_TRIGGERED, stats.getCompilationLimitTriggered()); + builder.field(Fields.CONTEXT, stats.getKey()); + scriptStatsToXContent(stats.getValue(), builder); builder.endObject(); } builder.endArray(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/StreamingChatCompletionResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/StreamingChatCompletionResults.java index 05a181d3fc5b6..59778b83953ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/StreamingChatCompletionResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/StreamingChatCompletionResults.java @@ -82,11 +82,7 @@ public record Result(String delta) implements ChunkedToXContent { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return Iterators.concat( - ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.field(RESULT, delta), - ChunkedToXContentHelper.endObject() - ); + return ChunkedToXContentHelper.chunk((b, p) -> b.startObject().field(RESULT, delta).endObject()); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/StreamingUnifiedChatCompletionResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/StreamingUnifiedChatCompletionResults.java index 90038c67036c4..515c366b5ed13 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/StreamingUnifiedChatCompletionResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/StreamingUnifiedChatCompletionResults.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xcontent.ToXContent; @@ -23,6 +24,8 @@ import java.util.Map; import java.util.concurrent.Flow; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.chunk; + /** * Chat Completion results that only contain a Flow.Publisher. */ @@ -131,39 +134,25 @@ public ChatCompletionChunk(String id, List choices, String model, String @Override public Iterator toXContentChunked(ToXContent.Params params) { - - Iterator choicesIterator = Collections.emptyIterator(); - if (choices != null) { - choicesIterator = Iterators.concat( - ChunkedToXContentHelper.startArray(CHOICES_FIELD), - Iterators.flatMap(choices.iterator(), c -> c.toXContentChunked(params)), - ChunkedToXContentHelper.endArray() - ); - } - - Iterator usageIterator = Collections.emptyIterator(); - if (usage != null) { - usageIterator = Iterators.concat( - ChunkedToXContentHelper.startObject(USAGE_FIELD), - ChunkedToXContentHelper.field(COMPLETION_TOKENS_FIELD, usage.completionTokens()), - ChunkedToXContentHelper.field(PROMPT_TOKENS_FIELD, usage.promptTokens()), - ChunkedToXContentHelper.field(TOTAL_TOKENS_FIELD, usage.totalTokens()), - ChunkedToXContentHelper.endObject() - ); - } - return Iterators.concat( ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.field(ID_FIELD, id), - choicesIterator, - ChunkedToXContentHelper.field(MODEL_FIELD, model), - ChunkedToXContentHelper.field(OBJECT_FIELD, object), - usageIterator, + chunk((b, p) -> b.field(ID_FIELD, id)), + choices != null ? ChunkedToXContentHelper.array(CHOICES_FIELD, choices.iterator(), params) : Collections.emptyIterator(), + chunk((b, p) -> b.field(MODEL_FIELD, model).field(OBJECT_FIELD, object)), + usage != null + ? chunk( + (b, p) -> b.startObject(USAGE_FIELD) + .field(COMPLETION_TOKENS_FIELD, usage.completionTokens()) + .field(PROMPT_TOKENS_FIELD, usage.promptTokens()) + .field(TOTAL_TOKENS_FIELD, usage.totalTokens()) + .endObject() + ) + : Collections.emptyIterator(), ChunkedToXContentHelper.endObject() ); } - public record Choice(ChatCompletionChunk.Choice.Delta delta, String finishReason, int index) { + public record Choice(ChatCompletionChunk.Choice.Delta delta, String finishReason, int index) implements ChunkedToXContentObject { /* choices: Array<{ @@ -172,12 +161,13 @@ public record Choice(ChatCompletionChunk.Choice.Delta delta, String finishReason index: number; }>; */ + @Override public Iterator toXContentChunked(ToXContent.Params params) { return Iterators.concat( ChunkedToXContentHelper.startObject(), delta.toXContentChunked(params), - ChunkedToXContentHelper.optionalField(FINISH_REASON_FIELD, finishReason), - ChunkedToXContentHelper.field(INDEX_FIELD, index), + optionalField(FINISH_REASON_FIELD, finishReason), + chunk((b, p) -> b.field(INDEX_FIELD, index)), ChunkedToXContentHelper.endObject() ); } @@ -206,9 +196,9 @@ public Delta(String content, String refusal, String role, List toolCal public Iterator toXContentChunked(ToXContent.Params params) { var xContent = Iterators.concat( ChunkedToXContentHelper.startObject(DELTA_FIELD), - ChunkedToXContentHelper.optionalField(CONTENT_FIELD, content), - ChunkedToXContentHelper.optionalField(REFUSAL_FIELD, refusal), - ChunkedToXContentHelper.optionalField(ROLE_FIELD, role) + optionalField(CONTENT_FIELD, content), + optionalField(REFUSAL_FIELD, refusal), + optionalField(ROLE_FIELD, role) ); if (toolCalls != null && toolCalls.isEmpty() == false) { @@ -240,7 +230,7 @@ public List getToolCalls() { return toolCalls; } - public static class ToolCall { + public static class ToolCall implements ChunkedToXContentObject { private final int index; private final String id; public ChatCompletionChunk.Choice.Delta.ToolCall.Function function; @@ -278,26 +268,27 @@ public String getType() { }; type?: 'function'; */ + @Override public Iterator toXContentChunked(ToXContent.Params params) { var content = Iterators.concat( ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.field(INDEX_FIELD, index), - ChunkedToXContentHelper.optionalField(ID_FIELD, id) + chunk((b, p) -> b.field(INDEX_FIELD, index)), + optionalField(ID_FIELD, id) ); if (function != null) { content = Iterators.concat( content, ChunkedToXContentHelper.startObject(FUNCTION_FIELD), - ChunkedToXContentHelper.optionalField(FUNCTION_ARGUMENTS_FIELD, function.getArguments()), - ChunkedToXContentHelper.optionalField(FUNCTION_NAME_FIELD, function.getName()), + optionalField(FUNCTION_ARGUMENTS_FIELD, function.getArguments()), + optionalField(FUNCTION_NAME_FIELD, function.getName()), ChunkedToXContentHelper.endObject() ); } content = Iterators.concat( content, - ChunkedToXContentHelper.field(TYPE_FIELD, type), + ChunkedToXContentHelper.chunk((b, p) -> b.field(TYPE_FIELD, type)), ChunkedToXContentHelper.endObject() ); return content; @@ -325,5 +316,14 @@ public String getName() { } public record Usage(int completionTokens, int promptTokens, int totalTokens) {} + + private static Iterator optionalField(String name, String value) { + if (value == null) { + return Collections.emptyIterator(); + } else { + return ChunkedToXContentHelper.chunk((b, p) -> b.field(name, value)); + } + } + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherMetadata.java index 994bb8b75178e..767d58f99aa1c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherMetadata.java @@ -13,7 +13,6 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; @@ -23,6 +22,8 @@ import java.util.Iterator; import java.util.Objects; +import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.chunk; + public class WatcherMetadata extends AbstractNamedDiffable implements Metadata.Custom { public static final String TYPE = "watcher"; @@ -109,7 +110,7 @@ public static Metadata.Custom fromXContent(XContentParser parser) throws IOExcep @Override public Iterator toXContentChunked(ToXContent.Params ignored) { - return ChunkedToXContentHelper.field(Field.MANUALLY_STOPPED.getPreferredName(), manuallyStopped); + return chunk((b, p) -> b.field(Field.MANUALLY_STOPPED.getPreferredName(), manuallyStopped)); } interface Field { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java index ad7a4f169d92a..59ecdde230413 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java @@ -183,7 +183,7 @@ public Iterator toXContentChunked(ToXContent.Params params return b; }), ChunkedToXContentHelper.array("operators", operators.iterator()), - Iterators.single((b, p) -> b.field("sleeps", sleeps)), + ChunkedToXContentHelper.chunk((b, p) -> b.field("sleeps", sleeps)), ChunkedToXContentHelper.endObject() ); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java index 8a163d7336b0b..42a96cc7b7743 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java @@ -146,8 +146,8 @@ public void testSuccessfulPathways() throws Exception { executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2, RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY)) ); - assertThat(executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.RUNNING), equalTo(2)); - assertThat(executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL), equalTo(1)); + assertThat(executionInfo.getClusterStates(EsqlExecutionInfo.Cluster.Status.RUNNING).count(), equalTo(2L)); + assertThat(executionInfo.getClusterStates(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL).count(), equalTo(1L)); EsqlExecutionInfo.Cluster clusterA = executionInfo.getCluster(REMOTE_CLUSTER_1); // Should be done and successful diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java index c1e43a74c2273..e8c98322221c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java @@ -29,6 +29,7 @@ import java.io.IOException; import java.util.Collections; +import java.util.EnumMap; import java.util.Iterator; import java.util.List; import java.util.Locale; @@ -39,6 +40,7 @@ import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; import java.util.function.Predicate; +import java.util.stream.Stream; /** * Holds execution metadata about ES|QL queries for cross-cluster searches in order to display @@ -251,14 +253,20 @@ public Iterator toXContentChunked(ToXContent.Params params if (isCrossClusterSearch() == false || clusterInfo.isEmpty()) { return Collections.emptyIterator(); } + Map clusterStatuses = new EnumMap<>(Cluster.Status.class); + for (Cluster info : clusterInfo.values()) { + clusterStatuses.merge(info.getStatus(), 1, Integer::sum); + } return Iterators.concat( ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.field(TOTAL_FIELD.getPreferredName(), clusterInfo.size()), - ChunkedToXContentHelper.field(SUCCESSFUL_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.SUCCESSFUL)), - ChunkedToXContentHelper.field(RUNNING_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.RUNNING)), - ChunkedToXContentHelper.field(SKIPPED_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.SKIPPED)), - ChunkedToXContentHelper.field(PARTIAL_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.PARTIAL)), - ChunkedToXContentHelper.field(FAILED_FIELD.getPreferredName(), getClusterStateCount(Cluster.Status.FAILED)), + ChunkedToXContentHelper.chunk( + (b, p) -> b.field(TOTAL_FIELD.getPreferredName(), clusterInfo.size()) + .field(SUCCESSFUL_FIELD.getPreferredName(), clusterStatuses.getOrDefault(Cluster.Status.SUCCESSFUL, 0)) + .field(RUNNING_FIELD.getPreferredName(), clusterStatuses.getOrDefault(Cluster.Status.RUNNING, 0)) + .field(SKIPPED_FIELD.getPreferredName(), clusterStatuses.getOrDefault(Cluster.Status.SKIPPED, 0)) + .field(PARTIAL_FIELD.getPreferredName(), clusterStatuses.getOrDefault(Cluster.Status.PARTIAL, 0)) + .field(FAILED_FIELD.getPreferredName(), clusterStatuses.getOrDefault(Cluster.Status.FAILED, 0)) + ), // each Cluster object defines its own field object name ChunkedToXContentHelper.object("details", clusterInfo.values().iterator()), ChunkedToXContentHelper.endObject() @@ -266,12 +274,12 @@ public Iterator toXContentChunked(ToXContent.Params params } /** - * @param status the status you want a count of - * @return how many clusters are currently in a specific state + * @param status the status you want to access + * @return a stream of clusters with that status */ - public int getClusterStateCount(Cluster.Status status) { - assert clusterInfo.size() > 0 : "ClusterMap in EsqlExecutionInfo must not be empty"; - return (int) clusterInfo.values().stream().filter(cluster -> cluster.getStatus() == status).count(); + public Stream getClusterStates(Cluster.Status status) { + assert clusterInfo.isEmpty() == false : "ClusterMap in EsqlExecutionInfo must not be empty"; + return clusterInfo.values().stream().filter(cluster -> cluster.getStatus() == status); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 26b5329589421..1a82bb9b2829d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -206,11 +206,10 @@ public Iterator toXContentChunked(ToXContent.Params params Iterator tookTime; if (executionInfo != null && executionInfo.overallTook() != null) { - tookTime = ChunkedToXContentHelper.chunk((builder, p) -> { - builder.field("took", executionInfo.overallTook().millis()); - builder.field(EsqlExecutionInfo.IS_PARTIAL_FIELD.getPreferredName(), executionInfo.isPartial()); - return builder; - }); + tookTime = ChunkedToXContentHelper.chunk( + (builder, p) -> builder.field("took", executionInfo.overallTook().millis()) + .field(EsqlExecutionInfo.IS_PARTIAL_FIELD.getPreferredName(), executionInfo.isPartial()) + ); } else { tookTime = Collections.emptyIterator(); } @@ -222,9 +221,9 @@ public Iterator toXContentChunked(ToXContent.Params params ) : ResponseXContentUtils.allColumns(columns, "columns"); Iterator valuesIt = ResponseXContentUtils.columnValues(this.columns, this.pages, columnar, nullColumns); - Iterator profileRender = profile == null - ? Collections.emptyIterator() - : ChunkedToXContentHelper.field("profile", profile, params); + Iterator profileRender = profile != null + ? ChunkedToXContentHelper.field("profile", profile, params) + : Collections.emptyIterator(); Iterator executionInfoRender = executionInfo != null && executionInfo.isCrossClusterSearch() && executionInfo.includeCCSMetadata() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 5a340adca4396..8c95992cf9f5a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -468,7 +468,8 @@ private boolean analyzeCCSIndices( IndexResolution indexResolution = result.indices; EsqlSessionCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); EsqlSessionCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, indexResolution.unavailableClusters()); - if (executionInfo.isCrossClusterSearch() && executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.RUNNING) == 0) { + if (executionInfo.isCrossClusterSearch() + && executionInfo.getClusterStates(EsqlExecutionInfo.Cluster.Status.RUNNING).findAny().isEmpty()) { // for a CCS, if all clusters have been marked as SKIPPED, nothing to search so send a sentinel Exception // to let the LogicalPlanActionListener decide how to proceed logicalPlanListener.onFailure(new NoClustersToSearchException()); @@ -483,7 +484,7 @@ private boolean analyzeCCSIndices( // TODO: add a test for this if (targetClusters.containsAll(newClusters) == false // do not bother with a re-resolution if only remotes were requested and all were offline - && executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.RUNNING) > 0) { + && executionInfo.getClusterStates(EsqlExecutionInfo.Cluster.Status.RUNNING).findAny().isPresent()) { enrichPolicyResolver.resolvePolicies( newClusters, unresolvedPolicies, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 5743c7c6ec57f..69e6d97c6daed 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -553,7 +553,7 @@ public static int clusterDetailsSize(int numClusters) { } } */ - return numClusters * 4 + 6; + return numClusters * 4 + 1; } public void testChunkResponseSizeColumnar() { diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java index b0e43c8607078..9355fa7d0ad48 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java @@ -11,7 +11,6 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.ValidationException; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.LazyInitializable; @@ -159,14 +158,8 @@ public void cancel() {} } private ChunkedToXContent completionChunk(String delta) { - return params -> Iterators.concat( - ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.startArray(COMPLETION), - ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.field("delta", delta), - ChunkedToXContentHelper.endObject(), - ChunkedToXContentHelper.endArray(), - ChunkedToXContentHelper.endObject() + return params -> ChunkedToXContentHelper.chunk( + (b, p) -> b.startObject().startArray(COMPLETION).startObject().field("delta", delta).endObject().endArray().endObject() ); } @@ -206,20 +199,20 @@ public void cancel() {} } */ private ChunkedToXContent unifiedCompletionChunk(String delta) { - return params -> Iterators.concat( - ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.field("id", "id"), - ChunkedToXContentHelper.startArray("choices"), - ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.startObject("delta"), - ChunkedToXContentHelper.field("content", delta), - ChunkedToXContentHelper.endObject(), - ChunkedToXContentHelper.field("index", 0), - ChunkedToXContentHelper.endObject(), - ChunkedToXContentHelper.endArray(), - ChunkedToXContentHelper.field("model", "gpt-4o-2024-08-06"), - ChunkedToXContentHelper.field("object", "chat.completion.chunk"), - ChunkedToXContentHelper.endObject() + return params -> ChunkedToXContentHelper.chunk( + (b, p) -> b.startObject() + .field("id", "id") + .startArray("choices") + .startObject() + .startObject("delta") + .field("content", delta) + .endObject() + .field("index", 0) + .endObject() + .endArray() + .field("model", "gpt-4o-2024-08-06") + .field("object", "chat.completion.chunk") + .endObject() ); } diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java index 69912a967fd22..a22e179479dec 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.ClusterSettings; @@ -243,11 +242,7 @@ private static class RandomString implements ChunkedToXContent { @Override public Iterator toXContentChunked(ToXContent.Params params) { var randomString = randomUnicodeOfLengthBetween(2, 20); - return Iterators.concat( - ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.field("delta", randomString), - ChunkedToXContentHelper.endObject() - ); + return ChunkedToXContentHelper.chunk((b, p) -> b.startObject().field("delta", randomString).endObject()); } } @@ -280,7 +275,7 @@ public void writeTo(StreamOutput out) { @Override public Iterator toXContentChunked(ToXContent.Params params) { - return ChunkedToXContentHelper.field("result", randomUnicodeOfLengthBetween(2, 20)); + return ChunkedToXContentHelper.chunk((b, p) -> b.field("result", randomUnicodeOfLengthBetween(2, 20))); } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java index 2419e2e1dc0c2..1c6adff5c63c8 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java @@ -176,17 +176,17 @@ public long getTotalSamples() { @UpdateForV9(owner = UpdateForV9.Owner.PROFILING) // change casing from Camel Case to Snake Case (requires updates in Kibana as well) @Override public Iterator toXContentChunked(ToXContent.Params params) { + /* + * The flamegraph response can be quite big. Some of these arrays need to be individual chunks, some can be a single chunk. + * They also need to be in-line so that neither the constants nor the fields get captured in a closure. + */ return Iterators.concat( ChunkedToXContentHelper.startObject(), ChunkedToXContentHelper.array( "Edges", Iterators.flatMap( edges.iterator(), - perNodeEdges -> Iterators.concat( - ChunkedToXContentHelper.startArray(), - Iterators.map(perNodeEdges.entrySet().iterator(), edge -> (b, p) -> b.value(edge.getValue())), - ChunkedToXContentHelper.endArray() - ) + perNodeEdges -> ChunkedToXContentHelper.array(perNodeEdges.values().iterator(), edge -> (b, p) -> b.value(edge)) ) ), ChunkedToXContentHelper.array("FileID", Iterators.map(fileIds.iterator(), e -> (b, p) -> b.value(e))), From b244d4fa2b2f176751f68c07bb2a512f6852c2a9 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Tue, 28 Jan 2025 12:21:48 +0000 Subject: [PATCH 120/383] Mute org.elasticsearch.xpack.esql.parser.StatementParserTests.testNamedFunctionArgumentInMap --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c42bf0969211a..7ebd754ec41af 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -280,6 +280,9 @@ tests: - class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT method: test {p0=data_stream/140_data_stream_aliases/Create data stream alias with filter} issue: https://github.com/elastic/elasticsearch/issues/121014 +- class: org.elasticsearch.xpack.esql.parser.StatementParserTests + method: testNamedFunctionArgumentInMap + issue: https://github.com/elastic/elasticsearch/issues/121020 # Examples: # From 560ecf02e946e717126764a98b135ca9e9efc59c Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Tue, 28 Jan 2025 12:34:41 +0000 Subject: [PATCH 121/383] Mute org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityEsqlIT.testCrossClusterAsyncQueryXXX --- muted-tests.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 7ebd754ec41af..f8ce249686cf6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -283,6 +283,12 @@ tests: - class: org.elasticsearch.xpack.esql.parser.StatementParserTests method: testNamedFunctionArgumentInMap issue: https://github.com/elastic/elasticsearch/issues/121020 +- class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityEsqlIT + method: testCrossClusterAsyncQuery + issue: https://github.com/elastic/elasticsearch/issues/121021 +- class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityEsqlIT + method: testCrossClusterAsyncQueryStop + issue: https://github.com/elastic/elasticsearch/issues/121021 # Examples: # From 492d4054c8825462b0f7f191983dc534e447ce6e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 28 Jan 2025 23:41:40 +1100 Subject: [PATCH 122/383] Mute org.elasticsearch.xpack.security.profile.ProfileIntegTests testSuggestProfilesWithName #121022 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f8ce249686cf6..cab7ed28f4f03 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -289,6 +289,9 @@ tests: - class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityEsqlIT method: testCrossClusterAsyncQueryStop issue: https://github.com/elastic/elasticsearch/issues/121021 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testSuggestProfilesWithName + issue: https://github.com/elastic/elasticsearch/issues/121022 # Examples: # From f38c3e4858f11239208ed201e39c585f2d3a79de Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 28 Jan 2025 13:42:09 +0100 Subject: [PATCH 123/383] [Inference API] Propagate usage context to Elastic Inference Service (#120698) --- .../ElasticInferenceServiceActionCreator.java | 13 ++++++- ...ServiceSparseEmbeddingsRequestManager.java | 10 ++++- ...ferenceServiceSparseEmbeddingsRequest.java | 34 +++++++++++++++-- ...eServiceSparseEmbeddingsRequestEntity.java | 15 +++++++- .../elastic/ElasticInferenceService.java | 2 +- .../ElasticInferenceServiceUsageContext.java | 27 +++++++++++++ ...ticInferenceServiceActionCreatorTests.java | 29 ++++++++++++-- ...iceSparseEmbeddingsRequestEntityTests.java | 37 ++++++++++++++++-- ...ceServiceSparseEmbeddingsRequestTests.java | 38 +++++++++++++++---- .../elastic/ElasticInferenceServiceTests.java | 6 +-- 10 files changed, 183 insertions(+), 28 deletions(-) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceUsageContext.java diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java index fa096901ed67a..915d8b3b64bc1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.external.action.elastic; +import org.elasticsearch.inference.InputType; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.SenderExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.ElasticInferenceServiceSparseEmbeddingsRequestManager; @@ -29,15 +30,23 @@ public class ElasticInferenceServiceActionCreator implements ElasticInferenceSer private final TraceContext traceContext; - public ElasticInferenceServiceActionCreator(Sender sender, ServiceComponents serviceComponents, TraceContext traceContext) { + private final InputType inputType; + + public ElasticInferenceServiceActionCreator( + Sender sender, + ServiceComponents serviceComponents, + TraceContext traceContext, + InputType inputType + ) { this.sender = Objects.requireNonNull(sender); this.serviceComponents = Objects.requireNonNull(serviceComponents); this.traceContext = traceContext; + this.inputType = inputType; } @Override public ExecutableAction create(ElasticInferenceServiceSparseEmbeddingsModel model) { - var requestManager = new ElasticInferenceServiceSparseEmbeddingsRequestManager(model, serviceComponents, traceContext); + var requestManager = new ElasticInferenceServiceSparseEmbeddingsRequestManager(model, serviceComponents, traceContext, inputType); var errorMessage = constructFailedToSendRequestMessage( model.uri(), String.format(Locale.ROOT, "%s sparse embeddings", ELASTIC_INFERENCE_SERVICE_IDENTIFIER) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java index bf3409888aaf8..693a7ca36785c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.elastic.ElasticInferenceServiceResponseHandler; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; @@ -40,6 +41,8 @@ public class ElasticInferenceServiceSparseEmbeddingsRequestManager extends Elast private final TraceContext traceContext; + private final InputType inputType; + private static ResponseHandler createSparseEmbeddingsHandler() { return new ElasticInferenceServiceResponseHandler( String.format(Locale.ROOT, "%s sparse embeddings", ELASTIC_INFERENCE_SERVICE_IDENTIFIER), @@ -50,12 +53,14 @@ private static ResponseHandler createSparseEmbeddingsHandler() { public ElasticInferenceServiceSparseEmbeddingsRequestManager( ElasticInferenceServiceSparseEmbeddingsModel model, ServiceComponents serviceComponents, - TraceContext traceContext + TraceContext traceContext, + InputType inputType ) { super(serviceComponents.threadPool(), model); this.model = model; this.truncator = serviceComponents.truncator(); this.traceContext = traceContext; + this.inputType = inputType; } @Override @@ -72,7 +77,8 @@ public void execute( truncator, truncatedInput, model, - traceContext + traceContext, + inputType ); execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequest.java index d1aaa6d5f984f..6acaf74a33338 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequest.java @@ -12,11 +12,13 @@ import org.apache.http.entity.ByteArrayEntity; import org.apache.http.message.BasicHeader; import org.elasticsearch.common.Strings; +import org.elasticsearch.inference.InputType; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.request.HttpRequest; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSparseEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceUsageContext; import org.elasticsearch.xpack.inference.telemetry.TraceContext; import org.elasticsearch.xpack.inference.telemetry.TraceContextHandler; @@ -31,24 +33,30 @@ public class ElasticInferenceServiceSparseEmbeddingsRequest implements ElasticIn private final Truncator.TruncationResult truncationResult; private final Truncator truncator; private final TraceContextHandler traceContextHandler; + private final InputType inputType; public ElasticInferenceServiceSparseEmbeddingsRequest( Truncator truncator, Truncator.TruncationResult truncationResult, ElasticInferenceServiceSparseEmbeddingsModel model, - TraceContext traceContext + TraceContext traceContext, + InputType inputType ) { this.truncator = truncator; this.truncationResult = truncationResult; this.model = Objects.requireNonNull(model); this.uri = model.uri(); this.traceContextHandler = new TraceContextHandler(traceContext); + this.inputType = inputType; } @Override public HttpRequest createHttpRequest() { var httpPost = new HttpPost(uri); - var requestEntity = Strings.toString(new ElasticInferenceServiceSparseEmbeddingsRequestEntity(truncationResult.input())); + var usageContext = inputTypeToUsageContext(inputType); + var requestEntity = Strings.toString( + new ElasticInferenceServiceSparseEmbeddingsRequestEntity(truncationResult.input(), usageContext) + ); ByteArrayEntity byteEntity = new ByteArrayEntity(requestEntity.getBytes(StandardCharsets.UTF_8)); httpPost.setEntity(byteEntity); @@ -76,7 +84,13 @@ public URI getURI() { @Override public Request truncate() { var truncatedInput = truncator.truncate(truncationResult.input()); - return new ElasticInferenceServiceSparseEmbeddingsRequest(truncator, truncatedInput, model, traceContextHandler.traceContext()); + return new ElasticInferenceServiceSparseEmbeddingsRequest( + truncator, + truncatedInput, + model, + traceContextHandler.traceContext(), + inputType + ); } @Override @@ -84,4 +98,18 @@ public boolean[] getTruncationInfo() { return truncationResult.truncated().clone(); } + // visible for testing + static ElasticInferenceServiceUsageContext inputTypeToUsageContext(InputType inputType) { + switch (inputType) { + case SEARCH -> { + return ElasticInferenceServiceUsageContext.SEARCH; + } + case INGEST -> { + return ElasticInferenceServiceUsageContext.INGEST; + } + default -> { + return ElasticInferenceServiceUsageContext.UNSPECIFIED; + } + } + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java index 301bbf0146c14..deecd9186aca5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java @@ -7,16 +7,22 @@ package org.elasticsearch.xpack.inference.external.request.elastic; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceUsageContext; import java.io.IOException; import java.util.List; import java.util.Objects; -public record ElasticInferenceServiceSparseEmbeddingsRequestEntity(List inputs) implements ToXContentObject { +public record ElasticInferenceServiceSparseEmbeddingsRequestEntity( + List inputs, + @Nullable ElasticInferenceServiceUsageContext usageContext +) implements ToXContentObject { private static final String INPUT_FIELD = "input"; + private static final String USAGE_CONTEXT = "usage_context"; public ElasticInferenceServiceSparseEmbeddingsRequestEntity { Objects.requireNonNull(inputs); @@ -34,8 +40,15 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } builder.endArray(); + + // optional field + if ((usageContext == ElasticInferenceServiceUsageContext.UNSPECIFIED) == false) { + builder.field(USAGE_CONTEXT, usageContext); + } + builder.endObject(); return builder; } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index 29f1e7cf70e77..f96d3cb325b09 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -231,7 +231,7 @@ protected void doInfer( var currentTraceInfo = getCurrentTraceInfo(); ElasticInferenceServiceExecutableActionModel elasticInferenceServiceModel = (ElasticInferenceServiceExecutableActionModel) model; - var actionCreator = new ElasticInferenceServiceActionCreator(getSender(), getServiceComponents(), currentTraceInfo); + var actionCreator = new ElasticInferenceServiceActionCreator(getSender(), getServiceComponents(), currentTraceInfo, inputType); var action = elasticInferenceServiceModel.accept(actionCreator, taskSettings); action.execute(inputs, timeout, listener); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceUsageContext.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceUsageContext.java new file mode 100644 index 0000000000000..7303f0c6e4436 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceUsageContext.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elastic; + +import java.util.Locale; + +/** + * Specifies the usage context for a request to the Elastic Inference Service. + * This helps to determine the type of resources that are allocated in the Elastic Inference Service for the particular request. + */ +public enum ElasticInferenceServiceUsageContext { + + SEARCH, + INGEST, + UNSPECIFIED; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java index 02b09917d0065..b142371ae1b4b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; @@ -90,7 +91,12 @@ public void testExecute_ReturnsSuccessfulResponse_ForElserAction() throws IOExce webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer)); - var actionCreator = new ElasticInferenceServiceActionCreator(sender, createWithEmptySettings(threadPool), createTraceContext()); + var actionCreator = new ElasticInferenceServiceActionCreator( + sender, + createWithEmptySettings(threadPool), + createTraceContext(), + InputType.UNSPECIFIED + ); var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); @@ -146,7 +152,12 @@ public void testSend_FailsFromInvalidResponseFormat_ForElserAction() throws IOEx webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer)); - var actionCreator = new ElasticInferenceServiceActionCreator(sender, createWithEmptySettings(threadPool), createTraceContext()); + var actionCreator = new ElasticInferenceServiceActionCreator( + sender, + createWithEmptySettings(threadPool), + createTraceContext(), + InputType.UNSPECIFIED + ); var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); @@ -198,7 +209,12 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating() throws IOExc webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer)); - var actionCreator = new ElasticInferenceServiceActionCreator(sender, createWithEmptySettings(threadPool), createTraceContext()); + var actionCreator = new ElasticInferenceServiceActionCreator( + sender, + createWithEmptySettings(threadPool), + createTraceContext(), + InputType.UNSPECIFIED + ); var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); @@ -258,7 +274,12 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { // truncated to 1 token = 3 characters var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), 1); - var actionCreator = new ElasticInferenceServiceActionCreator(sender, createWithEmptySettings(threadPool), createTraceContext()); + var actionCreator = new ElasticInferenceServiceActionCreator( + sender, + createWithEmptySettings(threadPool), + createTraceContext(), + InputType.UNSPECIFIED + ); var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java index 7b10cf600275c..5920e70cfdd18 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceUsageContext; import java.io.IOException; import java.util.List; @@ -20,8 +21,11 @@ public class ElasticInferenceServiceSparseEmbeddingsRequestEntityTests extends ESTestCase { - public void testToXContent_SingleInput() throws IOException { - var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity(List.of("abc")); + public void testToXContent_SingleInput_UnspecifiedUsageContext() throws IOException { + var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity( + List.of("abc"), + ElasticInferenceServiceUsageContext.UNSPECIFIED + ); String xContentString = xContentEntityToString(entity); assertThat(xContentString, equalToIgnoringWhitespaceInJsonString(""" { @@ -29,8 +33,11 @@ public void testToXContent_SingleInput() throws IOException { }""")); } - public void testToXContent_MultipleInputs() throws IOException { - var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity(List.of("abc", "def")); + public void testToXContent_MultipleInputs_UnspecifiedUsageContext() throws IOException { + var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity( + List.of("abc", "def"), + ElasticInferenceServiceUsageContext.UNSPECIFIED + ); String xContentString = xContentEntityToString(entity); assertThat(xContentString, equalToIgnoringWhitespaceInJsonString(""" { @@ -42,6 +49,28 @@ public void testToXContent_MultipleInputs() throws IOException { """)); } + public void testToXContent_MultipleInputs_SearchUsageContext() throws IOException { + var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity(List.of("abc"), ElasticInferenceServiceUsageContext.SEARCH); + String xContentString = xContentEntityToString(entity); + assertThat(xContentString, equalToIgnoringWhitespaceInJsonString(""" + { + "input": ["abc"], + "usage_context": "search" + } + """)); + } + + public void testToXContent_MultipleInputs_IngestUsageContext() throws IOException { + var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity(List.of("abc"), ElasticInferenceServiceUsageContext.INGEST); + String xContentString = xContentEntityToString(entity); + assertThat(xContentString, equalToIgnoringWhitespaceInJsonString(""" + { + "input": ["abc"], + "usage_context": "ingest" + } + """)); + } + private String xContentEntityToString(ElasticInferenceServiceSparseEmbeddingsRequestEntity entity) throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); entity.toXContent(builder, null); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java index 9d3bbe2ed12ae..cb867f15b6d4f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java @@ -9,18 +9,21 @@ import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.inference.InputType; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.common.TruncatorTests; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSparseEmbeddingsModelTests; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceUsageContext; import org.elasticsearch.xpack.inference.telemetry.TraceContext; import java.io.IOException; import java.util.List; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.request.elastic.ElasticInferenceServiceSparseEmbeddingsRequest.inputTypeToUsageContext; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -28,11 +31,11 @@ public class ElasticInferenceServiceSparseEmbeddingsRequestTests extends ESTestCase { - public void testCreateHttpRequest() throws IOException { + public void testCreateHttpRequest_UsageContextSearch() throws IOException { var url = "http://eis-gateway.com"; var input = "input"; - var request = createRequest(url, input); + var request = createRequest(url, input, InputType.SEARCH); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); @@ -40,15 +43,16 @@ public void testCreateHttpRequest() throws IOException { assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap.size(), equalTo(1)); + assertThat(requestMap.size(), equalTo(2)); assertThat(requestMap.get("input"), is(List.of(input))); + assertThat(requestMap.get("usage_context"), equalTo("search")); } public void testTraceContextPropagatedThroughHTTPHeaders() { var url = "http://eis-gateway.com"; var input = "input"; - var request = createRequest(url, input); + var request = createRequest(url, input, InputType.UNSPECIFIED); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); @@ -65,7 +69,7 @@ public void testTruncate_ReducesInputTextSizeByHalf() throws IOException { var url = "http://eis-gateway.com"; var input = "abcd"; - var request = createRequest(url, input); + var request = createRequest(url, input, InputType.UNSPECIFIED); var truncatedRequest = request.truncate(); var httpRequest = truncatedRequest.createHttpRequest(); @@ -81,21 +85,39 @@ public void testIsTruncated_ReturnsTrue() { var url = "http://eis-gateway.com"; var input = "abcd"; - var request = createRequest(url, input); + var request = createRequest(url, input, InputType.UNSPECIFIED); assertFalse(request.getTruncationInfo()[0]); var truncatedRequest = request.truncate(); assertTrue(truncatedRequest.getTruncationInfo()[0]); } - public ElasticInferenceServiceSparseEmbeddingsRequest createRequest(String url, String input) { + public void testInputTypeToUsageContext_Search() { + assertThat(inputTypeToUsageContext(InputType.SEARCH), equalTo(ElasticInferenceServiceUsageContext.SEARCH)); + } + + public void testInputTypeToUsageContext_Ingest() { + assertThat(inputTypeToUsageContext(InputType.INGEST), equalTo(ElasticInferenceServiceUsageContext.INGEST)); + } + + public void testInputTypeToUsageContext_Unspecified() { + assertThat(inputTypeToUsageContext(InputType.UNSPECIFIED), equalTo(ElasticInferenceServiceUsageContext.UNSPECIFIED)); + } + + public void testInputTypeToUsageContext_Unknown_DefaultToUnspecified() { + assertThat(inputTypeToUsageContext(InputType.CLASSIFICATION), equalTo(ElasticInferenceServiceUsageContext.UNSPECIFIED)); + assertThat(inputTypeToUsageContext(InputType.CLUSTERING), equalTo(ElasticInferenceServiceUsageContext.UNSPECIFIED)); + } + + public ElasticInferenceServiceSparseEmbeddingsRequest createRequest(String url, String input, InputType inputType) { var embeddingsModel = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(url); return new ElasticInferenceServiceSparseEmbeddingsRequest( TruncatorTests.createTruncator(), new Truncator.TruncationResult(List.of(input), new boolean[] { false }), embeddingsModel, - new TraceContext(randomAlphaOfLength(10), randomAlphaOfLength(10)) + new TraceContext(randomAlphaOfLength(10), randomAlphaOfLength(10)), + inputType ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index 098f69f80a8a2..334119f999e4c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -462,7 +462,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { List.of("input text"), false, new HashMap<>(), - InputType.INGEST, + InputType.SEARCH, InferenceAction.Request.DEFAULT_TIMEOUT, listener ); @@ -483,7 +483,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), Matchers.equalTo(XContentType.JSON.mediaType())); var requestMap = entityAsMap(request.getBody()); - assertThat(requestMap, is(Map.of("input", List.of("input text")))); + assertThat(requestMap, is(Map.of("input", List.of("input text"), "usage_context", "search"))); } } @@ -541,7 +541,7 @@ public void testChunkedInfer_PassesThrough() throws IOException { ); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - assertThat(requestMap, is(Map.of("input", List.of("input text")))); + assertThat(requestMap, is(Map.of("input", List.of("input text"), "usage_context", "ingest"))); } } From a87bd7ae2689409ce50ca300a62cc31678eacfcd Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Tue, 28 Jan 2025 14:08:13 +0100 Subject: [PATCH 124/383] ESQL - Allow full text functions disjunctions for non-full text functions (#120291) --- docs/changelog/120291.yaml | 5 + docs/reference/esql/esql-limitations.asciidoc | 18 +++- x-pack/plugin/build.gradle | 2 +- .../LuceneQueryExpressionEvaluator.java | 23 ++++- .../LuceneQueryExpressionEvaluatorTests.java | 4 +- .../src/main/resources/kql-function.csv-spec | 37 +++++++ .../main/resources/match-function.csv-spec | 37 +++++++ .../main/resources/match-operator.csv-spec | 37 +++++++ .../src/main/resources/qstr-function.csv-spec | 37 +++++++ .../xpack/esql/plugin/MatchFunctionIT.java | 29 ++---- .../xpack/esql/plugin/MatchOperatorIT.java | 21 ++-- .../xpack/esql/action/EsqlCapabilities.java | 5 + .../xpack/esql/evaluator/EvalMapper.java | 45 ++++++--- .../evaluator/mapper/EvaluatorMapper.java | 7 ++ .../evaluator/mapper/ExpressionMapper.java | 5 +- .../function/fulltext/FullTextFunction.java | 71 ++++++++----- .../comparison/InsensitiveEqualsMapper.java | 27 +++-- .../esql/planner/LocalExecutionPlanner.java | 9 +- .../xpack/esql/plugin/ComputeService.java | 3 +- .../elasticsearch/xpack/esql/CsvTests.java | 3 +- .../xpack/esql/analysis/VerifierTests.java | 99 ++++++++++--------- .../LocalPhysicalPlanOptimizerTests.java | 59 +++++++++++ .../optimizer/PhysicalPlanOptimizerTests.java | 3 +- .../planner/LocalExecutionPlannerTests.java | 12 ++- .../test/esql/180_match_operator.yml | 27 ++--- 25 files changed, 461 insertions(+), 164 deletions(-) create mode 100644 docs/changelog/120291.yaml diff --git a/docs/changelog/120291.yaml b/docs/changelog/120291.yaml new file mode 100644 index 0000000000000..ca5f58f2042ae --- /dev/null +++ b/docs/changelog/120291.yaml @@ -0,0 +1,5 @@ +pr: 120291 +summary: ESQL - Allow full text functions disjunctions for non-full text functions +area: ES|QL +type: feature +issues: [] diff --git a/docs/reference/esql/esql-limitations.asciidoc b/docs/reference/esql/esql-limitations.asciidoc index adfd38478ab21..523330317b9f5 100644 --- a/docs/reference/esql/esql-limitations.asciidoc +++ b/docs/reference/esql/esql-limitations.asciidoc @@ -112,7 +112,9 @@ it is necessary to use the search function, like <>, in a <> source command, or close enough to it. Otherwise, the query will fail with a validation error. Another limitation is that any <> command containing a full-text search function -cannot also use disjunctions (`OR`) unless all functions used in the OR clauses are full-text functions themselves. +cannot use disjunctions (`OR`), unless: + +* All functions used in the OR clauses are full-text functions themselves, or scoring is not used For example, this query is valid: @@ -131,19 +133,27 @@ FROM books | WHERE MATCH(author, "Faulkner") ---- -And this query will fail due to the disjunction: +And this query that uses a disjunction will succeed: [source,esql] ---- FROM books +| WHERE MATCH(author, "Faulkner") OR QSTR("author: Hemingway") +---- + +However using scoring will fail because it uses a non full text function as part of the disjunction: + +[source,esql] +---- +FROM books METADATA _score | WHERE MATCH(author, "Faulkner") OR author LIKE "Hemingway" ---- -However this query will succeed because it uses full text functions on both `OR` clauses: +Scoring will work in the following query, as it uses full text functions on both `OR` clauses: [source,esql] ---- -FROM books +FROM books METADATA _score | WHERE MATCH(author, "Faulkner") OR QSTR("author: Hemingway") ---- diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 7054a71c8c614..850dd4bbf0c59 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -101,7 +101,7 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("esql/190_lookup_join/alias-repeated-index", "LOOKUP JOIN does not support index aliases for now") task.skipTest("esql/190_lookup_join/alias-pattern-multiple", "LOOKUP JOIN does not support index aliases for now") task.skipTest("esql/190_lookup_join/alias-pattern-single", "LOOKUP JOIN does not support index aliases for now") - + task.skipTest("esql/180_match_operator/match with disjunctions", "Disjunctions in full text functions work now") }) tasks.named('yamlRestCompatTest').configure { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java index d7d9da052a962..0ba1872504c40 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java @@ -25,6 +25,7 @@ import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -44,19 +45,20 @@ public record ShardConfig(Query query, IndexSearcher searcher) {} private final BlockFactory blockFactory; private final ShardConfig[] shards; - private final int docChannel; private ShardState[] perShardState = EMPTY_SHARD_STATES; - public LuceneQueryExpressionEvaluator(BlockFactory blockFactory, ShardConfig[] shards, int docChannel) { + public LuceneQueryExpressionEvaluator(BlockFactory blockFactory, ShardConfig[] shards) { this.blockFactory = blockFactory; this.shards = shards; - this.docChannel = docChannel; } @Override public Block eval(Page page) { - DocVector docs = page.getBlock(docChannel).asVector(); + // Lucene based operators retrieve DocVectors as first block + Block block = page.getBlock(0); + assert block instanceof DocBlock : "LuceneQueryExpressionEvaluator expects DocBlock as input"; + DocVector docs = (DocVector) block.asVector(); try { if (docs.singleSegmentNonDecreasing()) { return evalSingleSegmentNonDecreasing(docs).asBlock(); @@ -341,4 +343,17 @@ public void close() { Releasables.closeExpectNoException(builder); } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final ShardConfig[] shardConfigs; + + public Factory(ShardConfig[] shardConfigs) { + this.shardConfigs = shardConfigs; + } + + @Override + public EvalOperator.ExpressionEvaluator get(DriverContext context) { + return new LuceneQueryExpressionEvaluator(context.blockFactory(), shardConfigs); + } + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java index 6c978297b8497..54b33732aa425 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java @@ -183,8 +183,8 @@ private List runQuery(Set values, Query query, boolean shuffleDocs ); LuceneQueryExpressionEvaluator luceneQueryEvaluator = new LuceneQueryExpressionEvaluator( blockFactory, - new LuceneQueryExpressionEvaluator.ShardConfig[] { shard }, - 0 + new LuceneQueryExpressionEvaluator.ShardConfig[] { shard } + ); List operators = new ArrayList<>(); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/kql-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/kql-function.csv-spec index f9dfbc8634c6f..3a8dafe2075f6 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/kql-function.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/kql-function.csv-spec @@ -152,3 +152,40 @@ emp_no:integer | first_name:keyword | last_name:keyword 10053 | Sanjiv | Zschoche 10069 | Margareta | Bierman ; + +testKqlWithNonPushableDisjunctions +required_capability: kql_function +required_capability: full_text_functions_disjunctions_compute_engine + +from books +| where kql("title:lord") or length(title) > 130 +| keep book_no +; +ignoreOrder: true + +book_no:keyword +2675 +2714 +4023 +7140 +8678 +; + +testKqlWithNonPushableDisjunctionsOnComplexExpressions +required_capability: kql_function +required_capability: full_text_functions_disjunctions_compute_engine + +from books +| where (kql("title:lord") and ratings > 4.5) or (kql("author:dostoevsky") and length(title) > 50) +| keep book_no +; +ignoreOrder: true + +book_no:keyword +2675 +2924 +4023 +1937 +7140 +2714 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec index 39af991a9fc41..039174a2f0f2a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec @@ -718,3 +718,40 @@ from books title:text The Hobbit or There and Back Again ; + +testMatchWithNonPushableDisjunctions +required_capability: match_function +required_capability: full_text_functions_disjunctions_compute_engine + +from books +| where match(title, "lord") or length(title) > 130 +| keep book_no +; +ignoreOrder: true + +book_no:keyword +2675 +2714 +4023 +7140 +8678 +; + +testMatchWithNonPushableDisjunctionsOnComplexExpressions +required_capability: match_function +required_capability: full_text_functions_disjunctions_compute_engine + +from books +| where (match(title, "lord") and ratings > 4.5) or (match(author, "dostoevsky") and length(title) > 50) +| keep book_no +; +ignoreOrder: true + +book_no:keyword +2675 +2924 +4023 +1937 +7140 +2714 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec index e0559a9bfe011..e6a63d1078d97 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec @@ -684,3 +684,40 @@ from semantic_text host:keyword | semantic_text_field:text "host1" | live long and prosper ; + +testMatchWithNonPushableDisjunctions +required_capability: match_operator_colon +required_capability: full_text_functions_disjunctions_compute_engine + +from books +| where title:"lord" or length(title) > 130 +| keep book_no +; +ignoreOrder: true + +book_no:keyword +2675 +2714 +4023 +7140 +8678 +; + +testMatchWithNonPushableDisjunctionsOnComplexExpressions +required_capability: match_operator_colon +required_capability: full_text_functions_disjunctions_compute_engine + +from books +| where (title:"lord" and ratings > 4.5) or (author:"dostoevsky" and length(title) > 50) +| keep book_no +; +ignoreOrder: true + +book_no:keyword +2675 +2924 +4023 +1937 +7140 +2714 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec index d2812a861da22..61c0f9a49e7a8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec @@ -151,3 +151,40 @@ emp_no:integer | first_name:keyword | last_name:keyword 10053 | Sanjiv | Zschoche 10069 | Margareta | Bierman ; + +testQstrWithNonPushableDisjunctions +required_capability: qstr_function +required_capability: full_text_functions_disjunctions_compute_engine + +from books +| where qstr("title:lord") or length(title) > 130 +| keep book_no +; +ignoreOrder: true + +book_no:keyword +2675 +2714 +4023 +7140 +8678 +; + +testQstrWithNonPushableDisjunctionsOnComplexExpressions +required_capability: qstr_function +required_capability: full_text_functions_disjunctions_compute_engine + +from books +| where (qstr("title:lord") and ratings > 4.5) or (qstr("author:dostoevsky") and length(title) > 50) +| keep book_no +; +ignoreOrder: true + +book_no:keyword +2675 +2924 +4023 +1937 +7140 +2714 +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java index ad90bbf6ae9db..b928b25929401 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java @@ -14,8 +14,6 @@ import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; -import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; -import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.junit.Before; import java.util.List; @@ -31,12 +29,6 @@ public void setupIndex() { createAndPopulateIndex(); } - @Override - protected EsqlQueryResponse run(EsqlQueryRequest request) { - assumeTrue("match function capability not available", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); - return super.run(request); - } - public void testSimpleWhereMatch() { var query = """ FROM test @@ -230,20 +222,19 @@ public void testWhereMatchAfterStats() { assertThat(error.getMessage(), containsString("Unknown column [content]")); } - public void testWhereMatchWithFunctions() { + public void testWhereMatchNotPushedDown() { var query = """ FROM test - | WHERE match(content, "fox") OR to_upper(content) == "FOX" + | WHERE match(content, "fox") OR length(content) < 20 + | KEEP id + | SORT id """; - var error = expectThrows(ElasticsearchException.class, () -> run(query)); - assertThat( - error.getMessage(), - containsString( - "Invalid condition [match(content, \"fox\") OR to_upper(content) == \"FOX\"]. " - + "Full text functions can be used in an OR condition," - + " but only if just full text functions are used in the OR condition" - ) - ); + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValues(resp.values(), List.of(List.of(1), List.of(2), List.of(6))); + } } public void testWhereMatchWithRow() { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java index 758878b46d51f..bd7246518c958 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java @@ -206,20 +206,19 @@ public void testWhereMatchAfterStats() { assertThat(error.getMessage(), containsString("Unknown column [content]")); } - public void testWhereMatchWithFunctions() { + public void testWhereMatchNotPushedDown() { var query = """ FROM test - | WHERE content:"fox" OR to_upper(content) == "FOX" + | WHERE content:"fox" OR length(content) < 20 + | KEEP id + | SORT id """; - var error = expectThrows(ElasticsearchException.class, () -> run(query)); - assertThat( - error.getMessage(), - containsString( - "Invalid condition [content:\"fox\" OR to_upper(content) == \"FOX\"]. " - + "Full text functions can be used in an OR condition, " - + "but only if just full text functions are used in the OR condition" - ) - ); + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValues(resp.values(), List.of(List.of(1), List.of(2), List.of(6))); + } } public void testWhereMatchWithRow() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 0179027ea7fd6..cf23e4b528f24 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -761,6 +761,11 @@ public enum Cap { */ LOOKUP_JOIN_NO_ALIASES(JOIN_LOOKUP_V12.isEnabled()), + /** + * Full text functions can be used in disjunctions as they are implemented in compute engine + */ + FULL_TEXT_FUNCTIONS_DISJUNCTIONS_COMPUTE_ENGINE, + /** * Support match options in match function */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index c8b9d1583a642..de3b070adbb1f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEqualsMapper; +import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders.ShardContext; import org.elasticsearch.xpack.esql.planner.Layout; import java.util.List; @@ -50,24 +51,46 @@ public final class EvalMapper { private EvalMapper() {} - @SuppressWarnings({ "rawtypes", "unchecked" }) public static ExpressionEvaluator.Factory toEvaluator(FoldContext foldCtx, Expression exp, Layout layout) { + return toEvaluator(foldCtx, exp, layout, List.of()); + } + + @SuppressWarnings({ "rawtypes", "unchecked" }) + /** + * Provides an ExpressionEvaluator factory to evaluate an expression. + * + * @param foldCtx the fold context for folding expressions + * @param exp the expression to generate an evaluator for + * @param layout the mapping from attributes to channels + * @param shardContexts the shard contexts, needed to generate queries for expressions that couldn't be pushed down to Lucene + */ + public static ExpressionEvaluator.Factory toEvaluator( + FoldContext foldCtx, + Expression exp, + Layout layout, + List shardContexts + ) { if (exp instanceof EvaluatorMapper m) { return m.toEvaluator(new EvaluatorMapper.ToEvaluator() { @Override public ExpressionEvaluator.Factory apply(Expression expression) { - return toEvaluator(foldCtx, expression, layout); + return toEvaluator(foldCtx, expression, layout, shardContexts); } @Override public FoldContext foldCtx() { return foldCtx; } + + @Override + public List shardContexts() { + return shardContexts; + } }); } for (ExpressionMapper em : MAPPERS) { if (em.typeToken.isInstance(exp)) { - return em.map(foldCtx, exp, layout); + return em.map(foldCtx, exp, layout, shardContexts); } } throw new QlIllegalArgumentException("Unsupported expression [{}]", exp); @@ -75,9 +98,9 @@ public FoldContext foldCtx() { static class BooleanLogic extends ExpressionMapper { @Override - public ExpressionEvaluator.Factory map(FoldContext foldCtx, BinaryLogic bc, Layout layout) { - var leftEval = toEvaluator(foldCtx, bc.left(), layout); - var rightEval = toEvaluator(foldCtx, bc.right(), layout); + public ExpressionEvaluator.Factory map(FoldContext foldCtx, BinaryLogic bc, Layout layout, List shardContexts) { + var leftEval = toEvaluator(foldCtx, bc.left(), layout, shardContexts); + var rightEval = toEvaluator(foldCtx, bc.right(), layout, shardContexts); /** * Evaluator for the three-valued boolean expressions. * We can't generate these with the {@link Evaluator} annotation because that @@ -153,7 +176,7 @@ public void close() { static class Nots extends ExpressionMapper { @Override - public ExpressionEvaluator.Factory map(FoldContext foldCtx, Not not, Layout layout) { + public ExpressionEvaluator.Factory map(FoldContext foldCtx, Not not, Layout layout, List shardContexts) { var expEval = toEvaluator(foldCtx, not.field(), layout); return dvrCtx -> new org.elasticsearch.xpack.esql.evaluator.predicate.operator.logical.NotEvaluator( not.source(), @@ -165,7 +188,7 @@ public ExpressionEvaluator.Factory map(FoldContext foldCtx, Not not, Layout layo static class Attributes extends ExpressionMapper { @Override - public ExpressionEvaluator.Factory map(FoldContext foldCtx, Attribute attr, Layout layout) { + public ExpressionEvaluator.Factory map(FoldContext foldCtx, Attribute attr, Layout layout, List shardContexts) { record Attribute(int channel) implements ExpressionEvaluator { @Override public Block eval(Page page) { @@ -200,7 +223,7 @@ public boolean eagerEvalSafeInLazy() { static class Literals extends ExpressionMapper { @Override - public ExpressionEvaluator.Factory map(FoldContext foldCtx, Literal lit, Layout layout) { + public ExpressionEvaluator.Factory map(FoldContext foldCtx, Literal lit, Layout layout, List shardContexts) { record LiteralsEvaluator(DriverContext context, Literal lit) implements ExpressionEvaluator { @Override public Block eval(Page page) { @@ -257,7 +280,7 @@ private static Block block(Literal lit, BlockFactory blockFactory, int positions static class IsNulls extends ExpressionMapper { @Override - public ExpressionEvaluator.Factory map(FoldContext foldCtx, IsNull isNull, Layout layout) { + public ExpressionEvaluator.Factory map(FoldContext foldCtx, IsNull isNull, Layout layout, List shardContexts) { var field = toEvaluator(foldCtx, isNull.field(), layout); return new IsNullEvaluatorFactory(field); } @@ -305,7 +328,7 @@ public String toString() { static class IsNotNulls extends ExpressionMapper { @Override - public ExpressionEvaluator.Factory map(FoldContext foldCtx, IsNotNull isNotNull, Layout layout) { + public ExpressionEvaluator.Factory map(FoldContext foldCtx, IsNotNull isNotNull, Layout layout, List shardContexts) { return new IsNotNullEvaluatorFactory(toEvaluator(foldCtx, isNotNull.field(), layout)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java index 5a8b3d32e7db0..a4a17297abc09 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java @@ -21,8 +21,11 @@ import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; +import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.Layout; +import java.util.List; + import static org.elasticsearch.compute.data.BlockUtils.fromArrayRow; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; @@ -34,6 +37,10 @@ interface ToEvaluator { ExpressionEvaluator.Factory apply(Expression expression); FoldContext foldCtx(); + + default List shardContexts() { + throw new UnsupportedOperationException("Shard contexts should only be needed for evaluation operations"); + } } /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/ExpressionMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/ExpressionMapper.java index 5a76080e7995c..06a8a92ecfce8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/ExpressionMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/ExpressionMapper.java @@ -11,8 +11,11 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.util.ReflectionUtils; +import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders.ShardContext; import org.elasticsearch.xpack.esql.planner.Layout; +import java.util.List; + public abstract class ExpressionMapper { public final Class typeToken; @@ -20,5 +23,5 @@ public ExpressionMapper() { typeToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass()); } - public abstract ExpressionEvaluator.Factory map(FoldContext foldCtx, E expression, Layout layout); + public abstract ExpressionEvaluator.Factory map(FoldContext foldCtx, E expression, Layout layout, List shardContexts); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index 687c3b1d23ec5..32a350ac7351e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -8,12 +8,16 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluator; +import org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluator.ShardConfig; +import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.Function; @@ -21,6 +25,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Holder; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; @@ -31,6 +36,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Limit; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.OrderBy; +import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import org.elasticsearch.xpack.esql.querydsl.query.TranslationAwareExpressionQuery; @@ -50,7 +56,7 @@ * These functions needs to be pushed down to Lucene queries to be executed - there's no Evaluator for them, but depend on * {@link org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer} to rewrite them into Lucene queries. */ -public abstract class FullTextFunction extends Function implements TranslationAware, PostAnalysisPlanVerificationAware { +public abstract class FullTextFunction extends Function implements TranslationAware, PostAnalysisPlanVerificationAware, EvaluatorMapper { private final Expression query; private final QueryBuilder queryBuilder; @@ -141,6 +147,7 @@ public boolean equals(Object obj) { @Override public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + // In isolation, full text functions are pushable to source. We check if there are no disjunctions in Or conditions return true; } @@ -200,8 +207,14 @@ private static void checkFullTextQueryFunctions(LogicalPlan plan, Failures failu m -> "[" + m.functionName() + "] " + m.functionType(), failures ); - checkFullTextSearchDisjunctions(condition, ftf -> "[" + ftf.functionName() + "] " + ftf.functionType(), failures); checkFullTextFunctionsParents(condition, failures); + + boolean usesScore = plan.output() + .stream() + .anyMatch(attr -> attr instanceof MetadataAttribute ma && ma.name().equals(MetadataAttribute.SCORE)); + if (usesScore) { + checkFullTextSearchDisjunctions(condition, failures); + } } else { plan.forEachExpression(FullTextFunction.class, ftf -> { failures.add(fail(ftf, "[{}] {} is only supported in WHERE commands", ftf.functionName(), ftf.functionType())); @@ -215,38 +228,41 @@ private static void checkFullTextQueryFunctions(LogicalPlan plan, Failures failu * If not, add a failure to the failures collection. * * @param condition condition to check for disjunctions of full text searches - * @param typeNameProvider provider for the type name to add in the failure message * @param failures failures collection to add to */ - private static void checkFullTextSearchDisjunctions( - Expression condition, - java.util.function.Function typeNameProvider, - Failures failures - ) { + private static void checkFullTextSearchDisjunctions(Expression condition, Failures failures) { Holder isInvalid = new Holder<>(false); condition.forEachDown(Or.class, or -> { if (isInvalid.get()) { // Exit early if we already have a failures return; } - boolean hasFullText = or.anyMatch(FullTextFunction.class::isInstance); - if (hasFullText) { - boolean hasOnlyFullText = onlyFullTextFunctionsInExpression(or); - if (hasOnlyFullText == false) { - isInvalid.set(true); - failures.add( - fail( - or, - "Invalid condition [{}]. Full text functions can be used in an OR condition, " - + "but only if just full text functions are used in the OR condition", - or.sourceText() - ) - ); - } + if (checkDisjunctionPushable(or) == false) { + isInvalid.set(true); + failures.add( + fail( + or, + "Invalid condition when using METADATA _score [{}]. Full text functions can be used in an OR condition, " + + "but only if just full text functions are used in the OR condition", + or.sourceText() + ) + ); } }); } + /** + * Checks if a disjunction is pushable from the point of view of FullTextFunctions. Either it has no FullTextFunctions or + * all it contains are FullTextFunctions. + * + * @param or disjunction to check + * @return true if the disjunction is pushable, false otherwise + */ + private static boolean checkDisjunctionPushable(Or or) { + boolean hasFullText = or.anyMatch(FullTextFunction.class::isInstance); + return hasFullText == false || onlyFullTextFunctionsInExpression(or); + } + /** * Checks whether an expression contains just full text functions or negations (NOT) and combinations (AND, OR) of full text functions * @@ -342,4 +358,15 @@ private static FullTextFunction forEachFullTextFunctionParent(Expression conditi } return null; } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { + List shardContexts = toEvaluator.shardContexts(); + ShardConfig[] shardConfigs = new ShardConfig[shardContexts.size()]; + int i = 0; + for (EsPhysicalOperationProviders.ShardContext shardContext : shardContexts) { + shardConfigs[i++] = new ShardConfig(shardContext.toQuery(queryBuilder()), shardContext.searcher()); + } + return new LuceneQueryExpressionEvaluator.Factory(shardConfigs); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java index 7ea95c764f36c..70d87b7cc77ff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java @@ -18,9 +18,11 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.evaluator.mapper.ExpressionMapper; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; +import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders.ShardContext; import org.elasticsearch.xpack.esql.planner.Layout; +import java.util.List; + import static org.elasticsearch.xpack.esql.evaluator.EvalMapper.toEvaluator; public class InsensitiveEqualsMapper extends ExpressionMapper { @@ -29,12 +31,17 @@ public class InsensitiveEqualsMapper extends ExpressionMapper InsensitiveEqualsEvaluator.Factory::new; @Override - public final ExpressionEvaluator.Factory map(FoldContext foldCtx, InsensitiveEquals bc, Layout layout) { + public final ExpressionEvaluator.Factory map( + FoldContext foldCtx, + InsensitiveEquals bc, + Layout layout, + List shardContexts + ) { DataType leftType = bc.left().dataType(); DataType rightType = bc.right().dataType(); - var leftEval = toEvaluator(foldCtx, bc.left(), layout); - var rightEval = toEvaluator(foldCtx, bc.right(), layout); + var leftEval = toEvaluator(foldCtx, bc.left(), layout, shardContexts); + var rightEval = toEvaluator(foldCtx, bc.right(), layout, shardContexts); if (DataType.isString(leftType)) { if (bc.right().foldable() && DataType.isString(rightType)) { BytesRef rightVal = BytesRefs.toBytesRef(bc.right().fold(FoldContext.small() /* TODO remove me */)); @@ -50,16 +57,4 @@ public final ExpressionEvaluator.Factory map(FoldContext foldCtx, InsensitiveEqu } throw new EsqlIllegalArgumentException("resolved type for [" + bc + "] but didn't implement mapping"); } - - public static ExpressionEvaluator.Factory castToEvaluator( - FoldContext foldCtx, - InsensitiveEquals op, - Layout layout, - DataType required, - TriFunction factory - ) { - var lhs = Cast.cast(op.source(), op.left().dataType(), required, toEvaluator(foldCtx, op.left(), layout)); - var rhs = Cast.cast(op.source(), op.right().dataType(), required, toEvaluator(foldCtx, op.right(), layout)); - return factory.apply(op.source(), lhs, rhs); - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 5975af29f5d04..2e0f97c29ab13 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -92,6 +92,7 @@ import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; +import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders.ShardContext; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.session.Configuration; @@ -132,6 +133,7 @@ public class LocalExecutionPlanner { private final EnrichLookupService enrichLookupService; private final LookupFromIndexService lookupFromIndexService; private final PhysicalOperationProviders physicalOperationProviders; + private final List shardContexts; public LocalExecutionPlanner( String sessionId, @@ -145,8 +147,10 @@ public LocalExecutionPlanner( Supplier exchangeSinkSupplier, EnrichLookupService enrichLookupService, LookupFromIndexService lookupFromIndexService, - PhysicalOperationProviders physicalOperationProviders + PhysicalOperationProviders physicalOperationProviders, + List shardContexts ) { + this.sessionId = sessionId; this.clusterAlias = clusterAlias; this.parentTask = parentTask; @@ -159,6 +163,7 @@ public LocalExecutionPlanner( this.lookupFromIndexService = lookupFromIndexService; this.physicalOperationProviders = physicalOperationProviders; this.configuration = configuration; + this.shardContexts = shardContexts; } /** @@ -672,7 +677,7 @@ private PhysicalOperation planFilter(FilterExec filter, LocalExecutionPlannerCon PhysicalOperation source = plan(filter.child(), context); // TODO: should this be extracted into a separate eval block? return source.with( - new FilterOperatorFactory(EvalMapper.toEvaluator(context.foldCtx(), filter.condition(), source.layout)), + new FilterOperatorFactory(EvalMapper.toEvaluator(context.foldCtx(), filter.condition(), source.layout, shardContexts)), source.layout ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index de6fc082eb243..71c2a65037e9a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -384,7 +384,8 @@ public SourceProvider createSourceProvider() { context.exchangeSinkSupplier(), enrichLookupService, lookupFromIndexService, - new EsPhysicalOperationProviders(context.foldCtx(), contexts, searchService.getIndicesService().getAnalysis()) + new EsPhysicalOperationProviders(context.foldCtx(), contexts, searchService.getIndicesService().getAnalysis()), + contexts ); LOGGER.debug("Received physical plan:\n{}", plan); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 321897c8a062e..bae20bb9b26d3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -630,7 +630,8 @@ void executeSubPlan( () -> exchangeSink.createExchangeSink(() -> {}), Mockito.mock(EnrichLookupService.class), Mockito.mock(LookupFromIndexService.class), - physicalOperationProviders + physicalOperationProviders, + List.of() ); List collectedPages = Collections.synchronizedList(new ArrayList<>()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 86166b0267258..291a10d570093 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1193,21 +1193,6 @@ public void testMatchInsideEval() throws Exception { ); } - public void testMatchFilter() throws Exception { - assertEquals( - "1:19: Invalid condition [first_name:\"Anna\" or starts_with(first_name, \"Anne\")]. " - + "Full text functions can be used in an OR condition, " - + "but only if just full text functions are used in the OR condition", - error("from test | where first_name:\"Anna\" or starts_with(first_name, \"Anne\")") - ); - - assertEquals( - "1:51: Invalid condition [first_name:\"Anna\" OR new_salary > 100]. Full text functions can be" - + " used in an OR condition, but only if just full text functions are used in the OR condition", - error("from test | eval new_salary = salary + 10 | where first_name:\"Anna\" OR new_salary > 100") - ); - } - public void testMatchFunctionNotAllowedAfterCommands() throws Exception { assertEquals( "1:24: [MATCH] function cannot be used after LIMIT", @@ -1429,52 +1414,72 @@ public void testMatchOperatorWithDisjunctions() { } private void checkWithDisjunctions(String functionName, String functionInvocation, String functionType) { - String expression = functionInvocation + " or length(first_name) > 12"; - checkdisjunctionError("1:19", expression, functionName, functionType); - expression = "(" + functionInvocation + " or first_name is not null) or (length(first_name) > 12 and match(last_name, \"Smith\"))"; - checkdisjunctionError("1:19", expression, functionName, functionType); - expression = functionInvocation + " or (last_name is not null and first_name is null)"; - checkdisjunctionError("1:19", expression, functionName, functionType); - } - - private void checkdisjunctionError(String position, String expression, String functionName, String functionType) { - assertEquals( - LoggerMessageFormat.format( - null, - "{}: Invalid condition [{}]. Full text functions can be used in an OR condition, " - + "but only if just full text functions are used in the OR condition", - position, - expression - ), - error("from test | where " + expression) + query("from test | where " + functionInvocation + " or length(first_name) > 12"); + query( + "from test | where (" + + functionInvocation + + " or first_name is not null) or (length(first_name) > 12 and match(last_name, \"Smith\"))" ); + query("from test | where " + functionInvocation + " or (last_name is not null and first_name is null)"); } public void testFullTextFunctionsDisjunctions() { - checkWithFullTextFunctionsDisjunctions("MATCH", "match(last_name, \"Smith\")", "function"); - checkWithFullTextFunctionsDisjunctions(":", "last_name : \"Smith\"", "operator"); - checkWithFullTextFunctionsDisjunctions("QSTR", "qstr(\"last_name: Smith\")", "function"); - checkWithFullTextFunctionsDisjunctions("KQL", "kql(\"last_name: Smith\")", "function"); + checkWithFullTextFunctionsDisjunctions("match(last_name, \"Smith\")"); + checkWithFullTextFunctionsDisjunctions("last_name : \"Smith\""); + checkWithFullTextFunctionsDisjunctions("qstr(\"last_name: Smith\")"); + checkWithFullTextFunctionsDisjunctions("kql(\"last_name: Smith\")"); } - private void checkWithFullTextFunctionsDisjunctions(String functionName, String functionInvocation, String functionType) { - - String expression = functionInvocation + " or length(first_name) > 10"; - checkdisjunctionError("1:19", expression, functionName, functionType); + private void checkWithFullTextFunctionsDisjunctions(String functionInvocation) { - expression = "match(last_name, \"Anneke\") or (" + functionInvocation + " and length(first_name) > 10)"; - checkdisjunctionError("1:19", expression, functionName, functionType); + // Disjunctions with non-pushable functions - scoring + checkdisjunctionScoringError("1:35", functionInvocation + " or length(first_name) > 10"); + checkdisjunctionScoringError("1:35", "match(last_name, \"Anneke\") or (" + functionInvocation + " and length(first_name) > 10)"); + checkdisjunctionScoringError( + "1:35", + "(" + functionInvocation + " and length(first_name) > 0) or (match(last_name, \"Anneke\") and length(first_name) > 10)" + ); - expression = "(" - + functionInvocation - + " and length(first_name) > 0) or (match(last_name, \"Anneke\") and length(first_name) > 10)"; - checkdisjunctionError("1:19", expression, functionName, functionType); + // Disjunctions with non-pushable functions - no scoring + query("from test | where " + functionInvocation + " or length(first_name) > 10"); + query("from test | where match(last_name, \"Anneke\") or (" + functionInvocation + " and length(first_name) > 10)"); + query( + "from test | where (" + + functionInvocation + + " and length(first_name) > 0) or (match(last_name, \"Anneke\") and length(first_name) > 10)" + ); + // Disjunctions with full text functions - no scoring query("from test | where " + functionInvocation + " or match(first_name, \"Anna\")"); query("from test | where " + functionInvocation + " or not match(first_name, \"Anna\")"); query("from test | where (" + functionInvocation + " or match(first_name, \"Anna\")) and length(first_name) > 10"); query("from test | where (" + functionInvocation + " or match(first_name, \"Anna\")) and match(last_name, \"Smith\")"); query("from test | where " + functionInvocation + " or (match(first_name, \"Anna\") and match(last_name, \"Smith\"))"); + + // Disjunctions with full text functions - scoring + query("from test metadata _score | where " + functionInvocation + " or match(first_name, \"Anna\")"); + query("from test metadata _score | where " + functionInvocation + " or not match(first_name, \"Anna\")"); + query("from test metadata _score | where (" + functionInvocation + " or match(first_name, \"Anna\")) and length(first_name) > 10"); + query( + "from test metadata _score | where (" + functionInvocation + " or match(first_name, \"Anna\")) and match(last_name, \"Smith\")" + ); + query( + "from test metadata _score | where " + functionInvocation + " or (match(first_name, \"Anna\") and match(last_name, \"Smith\"))" + ); + + } + + private void checkdisjunctionScoringError(String position, String expression) { + assertEquals( + LoggerMessageFormat.format( + null, + "{}: Invalid condition when using METADATA _score [{}]. Full text functions can be used in an OR condition, " + + "but only if just full text functions are used in the OR condition", + position, + expression + ), + error("from test metadata _score | where " + expression) + ); } public void testQueryStringFunctionWithNonBooleanFunctions() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index dd4a20ea2e974..8bdd7a4e1645f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -47,6 +47,8 @@ import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ExtractAggregateCommonFilter; @@ -1664,6 +1666,63 @@ public void testMatchWithFieldCasting() { assertThat(queryBuilder.value(), is(123456)); } + public void testMatchFunctionWithPushableConjunction() { + String query = """ + from test + | where match(last_name, "Smith") and length(first_name) > 10 + """; + var plan = plannerOptimizer.plan(query); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var filterLimit = as(fieldExtract.child(), LimitExec.class); + var filter = as(filterLimit.child(), FilterExec.class); + assertThat(filter.condition(), instanceOf(GreaterThan.class)); + var fieldFilterExtract = as(filter.child(), FieldExtractExec.class); + var esQuery = as(fieldFilterExtract.child(), EsQueryExec.class); + assertThat(esQuery.query(), instanceOf(MatchQueryBuilder.class)); + } + + public void testMatchFunctionWithNonPushableDisjunction() { + String query = """ + from test + | where match(last_name, "Smith") or length(first_name) > 10 + """; + var plan = plannerOptimizer.plan(query); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var field = as(project.child(), FieldExtractExec.class); + var filterLimit = as(field.child(), LimitExec.class); + var filter = as(filterLimit.child(), FilterExec.class); + Or or = as(filter.condition(), Or.class); + assertThat(or.left(), instanceOf(Match.class)); + assertThat(or.right(), instanceOf(GreaterThan.class)); + var fieldExtract = as(filter.child(), FieldExtractExec.class); + assertThat(fieldExtract.child(), instanceOf(EsQueryExec.class)); + } + + public void testMatchFunctionWithPushableDisjunction() { + String query = """ + from test + | where match(last_name, "Smith") or emp_no > 10"""; + var plan = plannerOptimizer.plan(query); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var esQuery = as(fieldExtract.child(), EsQueryExec.class); + var boolQuery = as(esQuery.query(), BoolQueryBuilder.class); + Source source = new Source(2, 37, "emp_no > 10"); + BoolQueryBuilder expected = new BoolQueryBuilder().should(new MatchQueryBuilder("last_name", "Smith").lenient(true)) + .should(wrapWithSingleQuery(query, QueryBuilders.rangeQuery("emp_no").gt(10), "emp_no", source)); + assertThat(esQuery.query().toString(), equalTo(expected.toString())); + } + private QueryBuilder wrapWithSingleQuery(String query, QueryBuilder inner, String fieldName, Source source) { return FilterTests.singleValueQuery(query, inner, fieldName, source); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 23e0937380f34..78aaf1f354723 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -7600,7 +7600,8 @@ private LocalExecutionPlanner.LocalExecutionPlan physicalOperationsFromPhysicalP () -> exchangeSinkHandler.createExchangeSink(() -> {}), null, null, - new EsPhysicalOperationProviders(FoldContext.small(), List.of(), null) + new EsPhysicalOperationProviders(FoldContext.small(), List.of(), null), + List.of() ); return planner.plan(FoldContext.small(), plan); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index 4ef51d44b9b34..e1e606a6e84b1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -169,6 +169,7 @@ private Matcher maxPageSizeMatcher(boolean estimatedRowSizeIsHuge, int } private LocalExecutionPlanner planner() throws IOException { + List shardContexts = createShardContexts(); return new LocalExecutionPlanner( "test", "", @@ -181,7 +182,8 @@ private LocalExecutionPlanner planner() throws IOException { null, null, null, - esPhysicalOperationProviders() + esPhysicalOperationProviders(shardContexts), + shardContexts ); } @@ -201,7 +203,11 @@ private Configuration config() { ); } - private EsPhysicalOperationProviders esPhysicalOperationProviders() throws IOException { + private EsPhysicalOperationProviders esPhysicalOperationProviders(List shardContexts) { + return new EsPhysicalOperationProviders(FoldContext.small(), shardContexts, null); + } + + private List createShardContexts() throws IOException { int numShards = randomIntBetween(1, 1000); List shardContexts = new ArrayList<>(numShards); var searcher = new ContextIndexSearcher( @@ -221,7 +227,7 @@ private EsPhysicalOperationProviders esPhysicalOperationProviders() throws IOExc ); } releasables.add(searcher); - return new EsPhysicalOperationProviders(FoldContext.small(), shardContexts, null); + return shardContexts; } private IndexReader reader() { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/180_match_operator.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/180_match_operator.yml index 118783b412d48..96a9d1f925d7e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/180_match_operator.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/180_match_operator.yml @@ -171,30 +171,21 @@ setup: --- "match with disjunctions": + - requires: + capabilities: + - method: POST + path: /_query + parameters: [ method, path, parameters, capabilities ] + capabilities: [ full_text_functions_disjunctions_compute_engine ] + reason: "Full text functions disjunctions support" - do: - catch: bad_request - allowed_warnings_regex: - - "No limit defined, adding default limit of \\[.*\\]" - esql.query: - body: - query: 'FROM test | WHERE content:"fox" OR to_upper(content) == "FOX"' - - - match: { status: 400 } - - match: { error.type: verification_exception } - - match: { error.reason: "/.+Invalid\\ condition\\ \\[content\\:\"fox\"\\ OR\\ to_upper\\(content\\)\\ ==\\ \"FOX\"\\]\\./" } - - - do: - catch: bad_request allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM test | WHERE content:"fox" OR to_upper(content) == "FOX"' - - - match: { status: 400 } - - match: { error.type: verification_exception } - - match: { error.reason: "/.+Invalid\\ condition\\ \\[content\\:\"fox\"\\ OR\\ to_upper\\(content\\)\\ ==\\ \"FOX\"\\]\\./" } + query: 'FROM test | WHERE content:"fox" OR length(content) < 20' + - length: { values: 3 } --- "match within eval": From dae61dcb0af72a678e20c3b45e0e56a2679e245e Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 28 Jan 2025 14:15:42 +0100 Subject: [PATCH 125/383] Mark index.mapping.source.mode as serverless public setting (#121011) --- .../src/main/java/org/elasticsearch/index/IndexSettings.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index cd0d16cb3e89f..4895930eaefe4 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -718,7 +718,8 @@ public Iterator> settings() { "index.mapping.source.mode", value -> {}, Setting.Property.Final, - Setting.Property.IndexScope + Setting.Property.IndexScope, + Setting.Property.ServerlessPublic ); public static final Setting RECOVERY_USE_SYNTHETIC_SOURCE_SETTING = Setting.boolSetting( From 3ac0335e26f1a50115a2c0f31e34bc55c5a6a204 Mon Sep 17 00:00:00 2001 From: ARPIT SHARMA <93235104+ARPIT2128@users.noreply.github.com> Date: Tue, 28 Jan 2025 18:52:16 +0530 Subject: [PATCH 126/383] Update README.asciidoc (#96455) --- docs/README.asciidoc | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/docs/README.asciidoc b/docs/README.asciidoc index 9b7e280e532f5..05f449795c7fd 100644 --- a/docs/README.asciidoc +++ b/docs/README.asciidoc @@ -157,16 +157,15 @@ used for its modifiers: * `// TESTRESPONSE[skip:reason]`: Skip the assertions specified by this response. * `// TESTSETUP`: Marks this snippet as the "setup" for all other snippets in - this file. This is a somewhat natural way of structuring documentation. You - say "this is the data we use to explain this feature" then you add the - snippet that you mark `// TESTSETUP` and then every snippet will turn into - a test that runs the setup snippet first. See the "painless" docs for a file - that puts this to good use. This is fairly similar to `// TEST[setup:name]` - but rather than the setup defined in `docs/build.gradle` the setup is defined - right in the documentation file. In general, we should prefer `// TESTSETUP` - over `// TEST[setup:name]` because it makes it more clear what steps have to - be taken before the examples will work. Tip: `// TESTSETUP` can only be used - on the first snippet of a document. + this file. In order to enhance clarity and simplify understanding for readers, + a straightforward approach involves marking the first snippet in the documentation file with the + `// TESTSETUP` marker. By doing so, it clearly indicates that this particular snippet serves as the setup + or preparation step for all subsequent snippets in the file. + This helps in explaining the necessary steps that need to be executed before running the examples. + Unlike the alternative convention `// TEST[setup:name]`, which relies on a setup defined in a separate file, + this convention brings the setup directly into the documentation file, making it more self-contained and reducing ambiguity. + By adopting this convention, users can easily identify and follow the correct sequence + of steps to ensure that the examples provided in the documentation work as intended. * `// TEARDOWN`: Ends and cleans up a test series started with `// TESTSETUP` or `// TEST[setup:name]`. You can use `// TEARDOWN` to set up multiple tests in the same file. From 09148da369fd83180710b7499217853026fbca49 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 29 Jan 2025 00:45:45 +1100 Subject: [PATCH 127/383] Mute org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilterIT testBulkOperations {p0=true} #120969 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index cab7ed28f4f03..0684e17e16adb 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -292,6 +292,9 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testSuggestProfilesWithName issue: https://github.com/elastic/elasticsearch/issues/121022 +- class: org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilterIT + method: testBulkOperations {p0=true} + issue: https://github.com/elastic/elasticsearch/issues/120969 # Examples: # From 8c3d9e19985f5d5d47f90d7680e18920b27ff560 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Tue, 28 Jan 2025 14:49:16 +0100 Subject: [PATCH 128/383] ESQL: Ignore multivalued key columns in lookup index on JOIN (#120726) Fixes https://github.com/elastic/elasticsearch/issues/118780 Second part of https://github.com/elastic/elasticsearch/pull/120519 In the first PR, we avoid matching multivalue keys in lookup when they come from the query. Now, we avoid matching multivalues when the lookup index has multivalues in the key column. --- .../compute/src/main/java/module-info.java | 1 + .../compute/operator/lookup/QueryList.java | 95 +++++++---- .../querydsl/query/SingleValueMatchQuery.java | 6 +- .../EnrichQuerySourceOperatorTests.java | 156 +++++++++--------- .../src/main/resources/lookup-join.csv-spec | 14 +- .../xpack/esql/action/EsqlCapabilities.java | 5 + .../esql/querydsl/query/SingleValueQuery.java | 1 + .../query/SingleValueMathQueryTests.java | 1 + 8 files changed, 161 insertions(+), 118 deletions(-) rename x-pack/plugin/esql/{src/main/java/org/elasticsearch/xpack/esql => compute/src/main/java/org/elasticsearch/compute}/querydsl/query/SingleValueMatchQuery.java (98%) diff --git a/x-pack/plugin/esql/compute/src/main/java/module-info.java b/x-pack/plugin/esql/compute/src/main/java/module-info.java index 1b3253694b298..c4a042d692ea1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/module-info.java +++ b/x-pack/plugin/esql/compute/src/main/java/module-info.java @@ -35,4 +35,5 @@ exports org.elasticsearch.compute.operator.mvdedupe; exports org.elasticsearch.compute.aggregation.table; exports org.elasticsearch.compute.data.sort; + exports org.elasticsearch.compute.querydsl.query; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/QueryList.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/QueryList.java index 1e0d19fac5b51..5d359e2fb612f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/QueryList.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/QueryList.java @@ -9,6 +9,9 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.geo.GeoEncodingUtils; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.geo.ShapeRelation; @@ -20,6 +23,8 @@ import org.elasticsearch.compute.data.FloatBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.compute.querydsl.query.SingleValueMatchQuery; import org.elasticsearch.core.Nullable; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.Point; @@ -30,6 +35,7 @@ import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; +import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.function.IntFunction; @@ -38,10 +44,14 @@ * Generates a list of Lucene queries based on the input block. */ public abstract class QueryList { + protected final SearchExecutionContext searchExecutionContext; + protected final MappedFieldType field; protected final Block block; protected final boolean onlySingleValues; - protected QueryList(Block block, boolean onlySingleValues) { + protected QueryList(MappedFieldType field, SearchExecutionContext searchExecutionContext, Block block, boolean onlySingleValues) { + this.searchExecutionContext = searchExecutionContext; + this.field = field; this.block = block; this.onlySingleValues = onlySingleValues; } @@ -59,11 +69,52 @@ int getPositionCount() { */ public abstract QueryList onlySingleValues(); + final Query getQuery(int position) { + final int valueCount = block.getValueCount(position); + if (onlySingleValues && valueCount != 1) { + return null; + } + final int firstValueIndex = block.getFirstValueIndex(position); + + Query query = doGetQuery(position, firstValueIndex, valueCount); + + if (onlySingleValues) { + query = wrapSingleValueQuery(query); + } + + return query; + } + /** * Returns the query at the given position. */ @Nullable - abstract Query getQuery(int position); + abstract Query doGetQuery(int position, int firstValueIndex, int valueCount); + + private Query wrapSingleValueQuery(Query query) { + SingleValueMatchQuery singleValueQuery = new SingleValueMatchQuery( + searchExecutionContext.getForField(field, MappedFieldType.FielddataOperation.SEARCH), + // Not emitting warnings for multivalued fields not matching + Warnings.NOOP_WARNINGS + ); + + Query rewrite = singleValueQuery; + try { + rewrite = singleValueQuery.rewrite(searchExecutionContext.searcher()); + if (rewrite instanceof MatchAllDocsQuery) { + // nothing to filter + return query; + } + } catch (IOException e) { + // ignore + // TODO: Should we do something with the exception? + } + + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.add(query, BooleanClause.Occur.FILTER); + builder.add(rewrite, BooleanClause.Occur.FILTER); + return builder.build(); + } /** * Returns a list of term queries for the given field and the input block @@ -146,8 +197,6 @@ public static QueryList geoShapeQueryList(MappedFieldType field, SearchExecution } private static class TermQueryList extends QueryList { - private final MappedFieldType field; - private final SearchExecutionContext searchExecutionContext; private final IntFunction blockValueReader; private TermQueryList( @@ -157,9 +206,7 @@ private TermQueryList( boolean onlySingleValues, IntFunction blockValueReader ) { - super(block, onlySingleValues); - this.field = field; - this.searchExecutionContext = searchExecutionContext; + super(field, searchExecutionContext, block, onlySingleValues); this.blockValueReader = blockValueReader; } @@ -169,19 +216,14 @@ public TermQueryList onlySingleValues() { } @Override - Query getQuery(int position) { - final int count = block.getValueCount(position); - if (onlySingleValues && count != 1) { - return null; - } - final int first = block.getFirstValueIndex(position); - return switch (count) { + Query doGetQuery(int position, int firstValueIndex, int valueCount) { + return switch (valueCount) { case 0 -> null; - case 1 -> field.termQuery(blockValueReader.apply(first), searchExecutionContext); + case 1 -> field.termQuery(blockValueReader.apply(firstValueIndex), searchExecutionContext); default -> { - final List terms = new ArrayList<>(count); - for (int i = 0; i < count; i++) { - final Object value = blockValueReader.apply(first + i); + final List terms = new ArrayList<>(valueCount); + for (int i = 0; i < valueCount; i++) { + final Object value = blockValueReader.apply(firstValueIndex + i); terms.add(value); } yield field.termsQuery(terms, searchExecutionContext); @@ -192,8 +234,6 @@ Query getQuery(int position) { private static class GeoShapeQueryList extends QueryList { private final BytesRef scratch = new BytesRef(); - private final MappedFieldType field; - private final SearchExecutionContext searchExecutionContext; private final IntFunction blockValueReader; private final IntFunction shapeQuery; @@ -203,10 +243,8 @@ private GeoShapeQueryList( Block block, boolean onlySingleValues ) { - super(block, onlySingleValues); + super(field, searchExecutionContext, block, onlySingleValues); - this.field = field; - this.searchExecutionContext = searchExecutionContext; this.blockValueReader = blockToGeometry(block); this.shapeQuery = shapeQuery(); } @@ -217,15 +255,10 @@ public GeoShapeQueryList onlySingleValues() { } @Override - Query getQuery(int position) { - final int count = block.getValueCount(position); - if (onlySingleValues && count != 1) { - return null; - } - final int first = block.getFirstValueIndex(position); - return switch (count) { + Query doGetQuery(int position, int firstValueIndex, int valueCount) { + return switch (valueCount) { case 0 -> null; - case 1 -> shapeQuery.apply(first); + case 1 -> shapeQuery.apply(firstValueIndex); // TODO: support multiple values default -> throw new IllegalArgumentException("can't read multiple Geometry values from a single position"); }; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/querydsl/query/SingleValueMatchQuery.java similarity index 98% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java rename to x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/querydsl/query/SingleValueMatchQuery.java index f6668db52b93b..b948d0f409dbb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/querydsl/query/SingleValueMatchQuery.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.querydsl.query; +package org.elasticsearch.compute.querydsl.query; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; @@ -39,7 +39,7 @@ /** * Finds all fields with a single-value. If a field has a multi-value, it emits a {@link Warnings}. */ -final class SingleValueMatchQuery extends Query { +public final class SingleValueMatchQuery extends Query { /** * Choose a big enough value so this approximation never drives the iteration. @@ -52,7 +52,7 @@ final class SingleValueMatchQuery extends Query { private final IndexFieldData fieldData; private final Warnings warnings; - SingleValueMatchQuery(IndexFieldData fieldData, Warnings warnings) { + public SingleValueMatchQuery(IndexFieldData fieldData, Warnings warnings) { this.fieldData = fieldData; this.warnings = warnings; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java index 894843e7e4ec7..454088c1751e8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java @@ -9,7 +9,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.document.StringField; +import org.apache.lucene.document.KeywordField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; @@ -35,9 +35,12 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.fielddata.FieldDataContext; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.junit.After; import org.junit.Before; @@ -54,6 +57,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class EnrichQuerySourceOperatorTests extends ESTestCase { @@ -67,8 +71,7 @@ public void setupBlockFactory() { } @After - public void allBreakersEmpty() throws Exception { - MockBigArrays.ensureAllArraysAreReleased(); + public void allBreakersEmpty() { assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } @@ -76,23 +79,11 @@ public void testQueries() throws Exception { try ( var directoryData = makeDirectoryWith( List.of(List.of("a2"), List.of("a1", "c1", "b2"), List.of("a2"), List.of("a3"), List.of("b2", "b1", "a1")) - ) + ); + var inputTerms = makeTermsBlock(List.of(List.of("b2"), List.of("c1", "a2"), List.of("z2"), List.of(), List.of("a3"), List.of())) ) { - final BytesRefBlock inputTerms; - try (BytesRefBlock.Builder termBuilder = blockFactory.newBytesRefBlockBuilder(6)) { - termBuilder.appendBytesRef(new BytesRef("b2")) - .beginPositionEntry() - .appendBytesRef(new BytesRef("c1")) - .appendBytesRef(new BytesRef("a2")) - .endPositionEntry() - .appendBytesRef(new BytesRef("z2")) - .appendNull() - .appendBytesRef(new BytesRef("a3")) - .appendNull(); - inputTerms = termBuilder.build(); - } MappedFieldType uidField = new KeywordFieldMapper.KeywordFieldType("uid"); - QueryList queryList = QueryList.rawTermQueryList(uidField, mock(SearchExecutionContext.class), inputTerms); + QueryList queryList = QueryList.rawTermQueryList(uidField, directoryData.searchExecutionContext, inputTerms); assertThat(queryList.getPositionCount(), equalTo(6)); assertThat(queryList.getQuery(0), equalTo(new TermQuery(new Term("uid", new BytesRef("b2"))))); assertThat(queryList.getQuery(1), equalTo(new TermInSetQuery("uid", List.of(new BytesRef("c1"), new BytesRef("a2"))))); @@ -106,7 +97,7 @@ public void testQueries() throws Exception { // 1 -> [c1, a2] -> [1, 0, 2] // 2 -> [z2] -> [] // 3 -> [] -> [] - // 4 -> [a1] -> [3] + // 4 -> [a3] -> [3] // 5 -> [] -> [] var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test enrich"); EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator( @@ -136,38 +127,34 @@ public void testQueries() throws Exception { assertThat(BlockUtils.toJavaObject(positions, 5), equalTo(4)); page.releaseBlocks(); assertTrue(queryOperator.isFinished()); - IOUtils.close(inputTerms); } } public void testRandomMatchQueries() throws Exception { + // Build lookup index values int numTerms = randomIntBetween(10, 1000); - List> termsList = IntStream.range(0, numTerms).mapToObj(i -> List.of("term-" + i)).toList(); - Map terms = IntStream.range(0, numTerms).boxed().collect(Collectors.toMap(i -> "term-" + i, i -> i)); + List> directoryTermsList = IntStream.range(0, numTerms).mapToObj(i -> List.of("term-" + i)).toList(); + Map directoryTerms = IntStream.range(0, numTerms).boxed().collect(Collectors.toMap(i -> "term-" + i, i -> i)); - try (var directoryData = makeDirectoryWith(termsList)) { - Map> expectedPositions = new HashMap<>(); - int numPositions = randomIntBetween(1, 1000); - final BytesRefBlock inputTerms; - try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(numPositions)) { - for (int i = 0; i < numPositions; i++) { - if (randomBoolean()) { - String term = randomFrom(terms.keySet()); - builder.appendBytesRef(new BytesRef(term)); - Integer position = terms.get(term); - expectedPositions.put(i, Set.of(position)); - } else { - if (randomBoolean()) { - builder.appendNull(); - } else { - String term = "other-" + randomIntBetween(1, 100); - builder.appendBytesRef(new BytesRef(term)); - } - } - } - inputTerms = builder.build(); + // Build input terms + Map> expectedPositions = new HashMap<>(); + int numPositions = randomIntBetween(1, 1000); + List> inputTermsList = IntStream.range(0, numPositions).>mapToObj(i -> { + if (randomBoolean()) { + String term = randomFrom(directoryTerms.keySet()); + Integer position = directoryTerms.get(term); + expectedPositions.put(i, Set.of(position)); + return List.of(term); + } else if (randomBoolean()) { + return List.of(); + } else { + String term = "other-" + randomIntBetween(1, 100); + return List.of(term); } - var queryList = QueryList.rawTermQueryList(directoryData.field, mock(SearchExecutionContext.class), inputTerms); + }).toList(); + + try (var directoryData = makeDirectoryWith(directoryTermsList); var inputTerms = makeTermsBlock(inputTermsList)) { + var queryList = QueryList.rawTermQueryList(directoryData.field, directoryData.searchExecutionContext, inputTerms); int maxPageSize = between(1, 256); var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test enrich"); EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator( @@ -193,7 +180,6 @@ public void testRandomMatchQueries() throws Exception { } } assertThat(actualPositions, equalTo(expectedPositions)); - IOUtils.close(inputTerms); } } @@ -201,35 +187,20 @@ public void testQueries_OnlySingleValues() throws Exception { try ( var directoryData = makeDirectoryWith( List.of(List.of("a2"), List.of("a1", "c1", "b2"), List.of("a2"), List.of("a3"), List.of("b2", "b1", "a1")) + ); + var inputTerms = makeTermsBlock( + List.of(List.of("b2"), List.of("c1", "a2"), List.of("z2"), List.of(), List.of("a3"), List.of("a3", "a2", "z2", "xx")) ) ) { - final BytesRefBlock inputTerms; - try (BytesRefBlock.Builder termBuilder = blockFactory.newBytesRefBlockBuilder(6)) { - termBuilder.appendBytesRef(new BytesRef("b2")) - .beginPositionEntry() - .appendBytesRef(new BytesRef("c1")) - .appendBytesRef(new BytesRef("a2")) - .endPositionEntry() - .appendBytesRef(new BytesRef("z2")) - .appendNull() - .appendBytesRef(new BytesRef("a3")) - .beginPositionEntry() - .appendBytesRef(new BytesRef("a3")) - .appendBytesRef(new BytesRef("a2")) - .appendBytesRef(new BytesRef("z2")) - .appendBytesRef(new BytesRef("xx")) - .endPositionEntry(); - inputTerms = termBuilder.build(); - } - QueryList queryList = QueryList.rawTermQueryList(directoryData.field, mock(SearchExecutionContext.class), inputTerms) + QueryList queryList = QueryList.rawTermQueryList(directoryData.field, directoryData.searchExecutionContext, inputTerms) .onlySingleValues(); // pos -> terms -> docs // ----------------------------- - // 0 -> [b2] -> [1, 4] + // 0 -> [b2] -> [] // 1 -> [c1, a2] -> [] // 2 -> [z2] -> [] // 3 -> [] -> [] - // 4 -> [a1] -> [3] + // 4 -> [a3] -> [3] // 5 -> [a3, a2, z2, xx] -> [] var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test lookup"); EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator( @@ -241,19 +212,14 @@ public void testQueries_OnlySingleValues() throws Exception { ); Page page = queryOperator.getOutput(); assertNotNull(page); - assertThat(page.getPositionCount(), equalTo(3)); + assertThat(page.getPositionCount(), equalTo(1)); IntVector docs = getDocVector(page, 0); - assertThat(docs.getInt(0), equalTo(1)); - assertThat(docs.getInt(1), equalTo(4)); - assertThat(docs.getInt(2), equalTo(3)); + assertThat(docs.getInt(0), equalTo(3)); Block positions = page.getBlock(1); - assertThat(BlockUtils.toJavaObject(positions, 0), equalTo(0)); - assertThat(BlockUtils.toJavaObject(positions, 1), equalTo(0)); - assertThat(BlockUtils.toJavaObject(positions, 2), equalTo(4)); + assertThat(BlockUtils.toJavaObject(positions, 0), equalTo(4)); page.releaseBlocks(); assertTrue(queryOperator.isFinished()); - IOUtils.close(inputTerms); } } @@ -262,7 +228,12 @@ private static IntVector getDocVector(Page page, int blockIndex) { return doc.asVector().docs(); } - private record DirectoryData(DirectoryReader reader, MockDirectoryWrapper dir, MappedFieldType field) implements AutoCloseable { + private record DirectoryData( + DirectoryReader reader, + MockDirectoryWrapper dir, + SearchExecutionContext searchExecutionContext, + MappedFieldType field + ) implements AutoCloseable { @Override public void close() throws IOException { IOUtils.close(reader, dir); @@ -277,14 +248,45 @@ private static DirectoryData makeDirectoryWith(List> terms) throws for (var termList : terms) { Document doc = new Document(); for (String term : termList) { - doc.add(new StringField("uid", term, Field.Store.NO)); + doc.add(new KeywordField("uid", term, Field.Store.NO)); } writer.addDocument(doc); } writer.forceMerge(1); writer.commit(); - return new DirectoryData(DirectoryReader.open(writer), dir, new KeywordFieldMapper.KeywordFieldType("uid")); + var directoryReader = DirectoryReader.open(writer); + var indexSearcher = newSearcher(directoryReader); + var searchExecutionContext = mock(SearchExecutionContext.class); + var field = new KeywordFieldMapper.KeywordFieldType("uid"); + var fieldDataContext = FieldDataContext.noRuntimeFields("test"); + var indexFieldData = field.fielddataBuilder(fieldDataContext) + .build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()); + + // Required for "onlySingleValues" mode to work + when(searchExecutionContext.searcher()).thenReturn(indexSearcher); + when(searchExecutionContext.getForField(field, MappedFieldType.FielddataOperation.SEARCH)).thenReturn(indexFieldData); + + return new DirectoryData(directoryReader, dir, searchExecutionContext, field); + } + } + + private Block makeTermsBlock(List> terms) { + try (BytesRefBlock.Builder termBuilder = blockFactory.newBytesRefBlockBuilder(6)) { + for (var termList : terms) { + if (termList.isEmpty()) { + termBuilder.appendNull(); + } else if (termList.size() == 1) { + termBuilder.appendBytesRef(new BytesRef(termList.get(0))); + } else { + termBuilder.beginPositionEntry(); + for (String term : termList) { + termBuilder.appendBytesRef(new BytesRef(term)); + } + termBuilder.endPositionEntry(); + } + } + return termBuilder.build(); } } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index 43d397c3d3764..dbeaedd7e0416 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -301,6 +301,7 @@ emp_no:integer | language_code:integer | language_name:keyword mvJoinKeyOnTheLookupIndex required_capability: join_lookup_v12 +required_capability: join_lookup_skip_mv_on_lookup_key FROM employees | WHERE 10003 < emp_no AND emp_no < 10008 @@ -313,9 +314,8 @@ FROM employees emp_no:integer | language_code:integer | language_name:keyword 10004 | 4 | Quenya 10005 | 5 | null -10006 | 6 | Mv-Lang -10007 | 7 | Mv-Lang -10007 | 7 | Mv-Lang2 +10006 | 6 | null +10007 | 7 | null ; mvJoinKeyOnFrom @@ -354,6 +354,7 @@ language_code:integer | language_name:keyword | country:text mvJoinKeyFromRowExpanded required_capability: join_lookup_v12 +required_capability: join_lookup_skip_mv_on_lookup_key ROW language_code = [4, 5, 6, 7, 8] | MV_EXPAND language_code @@ -365,10 +366,9 @@ ROW language_code = [4, 5, 6, 7, 8] language_code:integer | language_name:keyword | country:text 4 | Quenya | null 5 | null | Atlantis -6 | Mv-Lang | Mv-Land -7 | Mv-Lang | Mv-Land -7 | Mv-Lang2 | Mv-Land2 -8 | Mv-Lang2 | Mv-Land2 +6 | null | null +7 | null | null +8 | null | null ; ############################################### diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index cf23e4b528f24..e8c5edc1c8b58 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -706,6 +706,11 @@ public enum Cap { */ JOIN_LOOKUP_SKIP_MV(JOIN_LOOKUP_V12.isEnabled()), + /** + * LOOKUP JOIN without MV matching on lookup index key (https://github.com/elastic/elasticsearch/issues/118780) + */ + JOIN_LOOKUP_SKIP_MV_ON_LOOKUP_KEY(JOIN_LOOKUP_V12.isEnabled()), + /** * Fix for https://github.com/elastic/elasticsearch/issues/117054 */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java index bc11d246904d5..a0a9d36c11000 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.compute.querydsl.query.SingleValueMatchQuery; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMathQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMathQueryTests.java index 7e75a1adc8318..3b5b2d8f85452 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMathQueryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMathQueryTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.compute.querydsl.query.SingleValueMatchQuery; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperServiceTestCase; From 38ea49a1b9d6d52202d173b2a8d7a582a630728d Mon Sep 17 00:00:00 2001 From: Amine GANI Date: Tue, 28 Jan 2025 15:39:25 +0100 Subject: [PATCH 129/383] Fix incorrect use of "updateable" flag in synonyms documentation (#120866) Co-authored-by: Amine GANI Co-authored-by: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> --- .../search-with-synonyms.asciidoc | 24 +++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/docs/reference/search/search-your-data/search-with-synonyms.asciidoc b/docs/reference/search/search-your-data/search-with-synonyms.asciidoc index 61d3a1d8f925b..5a1897febb1fa 100644 --- a/docs/reference/search/search-your-data/search-with-synonyms.asciidoc +++ b/docs/reference/search/search-your-data/search-with-synonyms.asciidoc @@ -145,15 +145,35 @@ The following example adds `my_analyzer` as a search analyzer to the `title` fie [source,JSON] ---- +{ "mappings": { "properties": { "title": { "type": "text", - "search_analyzer": "my_analyzer", - "updateable": true + "search_analyzer": "my_analyzer" + } + } + }, + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "whitespace", + "filter": [ + "synonyms_filter" + ] + } + }, + "filter": { + "synonyms_filter": { + "type": "synonym", + "synonyms_path": "analysis/synonym-set.txt", + "updateable": true + } } } } +} ---- From 2b16515166f32e4b15f6e2dcdd8f5b343b58a74d Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Tue, 28 Jan 2025 15:41:44 +0100 Subject: [PATCH 130/383] Update Gradle wrapper to 8.12.1 (#120960) --- build-tools-internal/gradle/wrapper/gradle-wrapper.properties | 4 ++-- build-tools-internal/src/main/resources/minimumGradleVersion | 2 +- gradle/wrapper/gradle-wrapper.properties | 4 ++-- plugins/examples/gradle/wrapper/gradle-wrapper.properties | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties index e712035eabc7b..b8cea9f02a5bf 100644 --- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties +++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=7ebdac923867a3cec0098302416d1e3c6c0c729fc4e2e05c10637a8af33a76c5 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-all.zip +distributionSha256Sum=296742a352f0b20ec14b143fb684965ad66086c7810b7b255dee216670716175 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.12.1-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/build-tools-internal/src/main/resources/minimumGradleVersion b/build-tools-internal/src/main/resources/minimumGradleVersion index 9c57ca327c7b7..4e28b0862495c 100644 --- a/build-tools-internal/src/main/resources/minimumGradleVersion +++ b/build-tools-internal/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -8.12 \ No newline at end of file +8.12.1 \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index e712035eabc7b..b8cea9f02a5bf 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=7ebdac923867a3cec0098302416d1e3c6c0c729fc4e2e05c10637a8af33a76c5 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-all.zip +distributionSha256Sum=296742a352f0b20ec14b143fb684965ad66086c7810b7b255dee216670716175 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.12.1-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/plugins/examples/gradle/wrapper/gradle-wrapper.properties b/plugins/examples/gradle/wrapper/gradle-wrapper.properties index e712035eabc7b..b8cea9f02a5bf 100644 --- a/plugins/examples/gradle/wrapper/gradle-wrapper.properties +++ b/plugins/examples/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=7ebdac923867a3cec0098302416d1e3c6c0c729fc4e2e05c10637a8af33a76c5 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-all.zip +distributionSha256Sum=296742a352f0b20ec14b143fb684965ad66086c7810b7b255dee216670716175 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.12.1-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME From c8e8ae6e4b455ca041a77474eaa63c0138962071 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 28 Jan 2025 15:54:47 +0100 Subject: [PATCH 131/383] Revert "ESQL: Implement a MetricsAware interface (#120527)" (#121036) This reverts commit a4482d4c4c2d24418553510afa6a11a5f316983a. It turns out that `PlanTelemetry` can add quite a bit of memory usage, at least on "rude" queries. In `HeapAttackIT.testHugeManyConcat`, this was using 30MB. I'd like to revert this to see if we can - either reduce its memory footprint or - track its memory somehow. --- docs/changelog/120527.yaml | 6 -- .../xpack/esql/EsqlTestUtils.java | 2 +- .../xpack/esql/action/TelemetryIT.java | 35 ++++------ .../xpack/esql/analysis/Analyzer.java | 8 +-- .../xpack/esql/analysis/Verifier.java | 4 +- .../esql/capabilities/TelemetryAware.java | 23 ------- .../xpack/esql/execution/PlanExecutor.java | 20 +++--- .../function/EsqlFunctionRegistry.java | 15 ---- .../xpack/esql/parser/AstBuilder.java | 4 +- .../xpack/esql/parser/EsqlParser.java | 13 +--- .../xpack/esql/parser/ExpressionBuilder.java | 39 ++++------- .../xpack/esql/parser/LogicalPlanBuilder.java | 16 ++--- .../xpack/esql/plan/logical/Aggregate.java | 5 +- .../xpack/esql/plan/logical/Dissect.java | 8 ++- .../xpack/esql/plan/logical/Drop.java | 7 +- .../xpack/esql/plan/logical/Enrich.java | 7 +- .../xpack/esql/plan/logical/EsRelation.java | 5 ++ .../xpack/esql/plan/logical/Eval.java | 8 ++- .../xpack/esql/plan/logical/Explain.java | 8 ++- .../xpack/esql/plan/logical/Filter.java | 5 +- .../xpack/esql/plan/logical/Grok.java | 8 ++- .../xpack/esql/plan/logical/InlineStats.java | 8 ++- .../xpack/esql/plan/logical/Keep.java | 8 ++- .../xpack/esql/plan/logical/Limit.java | 8 ++- .../xpack/esql/plan/logical/LogicalPlan.java | 2 + .../xpack/esql/plan/logical/Lookup.java | 8 ++- .../xpack/esql/plan/logical/MvExpand.java | 5 +- .../xpack/esql/plan/logical/OrderBy.java | 5 +- .../xpack/esql/plan/logical/Project.java | 8 +++ .../xpack/esql/plan/logical/Rename.java | 8 ++- .../xpack/esql/plan/logical/Row.java | 8 ++- .../xpack/esql/plan/logical/TopN.java | 7 ++ .../esql/plan/logical/UnresolvedRelation.java | 19 +----- .../xpack/esql/plan/logical/join/Join.java | 5 ++ .../esql/plan/logical/join/LookupJoin.java | 8 +-- .../esql/plan/logical/join/StubRelation.java | 5 ++ .../plan/logical/local/LocalRelation.java | 8 +++ .../esql/plan/logical/show/ShowInfo.java | 5 +- .../xpack/esql/session/EsqlSession.java | 11 +-- .../{telemetry => stats}/FeatureMetric.java | 2 +- .../esql/{telemetry => stats}/Metrics.java | 2 +- .../xpack/esql/stats/PlanningMetrics.java | 41 +++++++++++ .../PlanningMetricsManager.java} | 12 ++-- .../{telemetry => stats}/QueryMetric.java | 2 +- .../xpack/esql/telemetry/PlanTelemetry.java | 68 ------------------- .../elasticsearch/xpack/esql/CsvTests.java | 4 +- .../function/CheckLicenseTests.java | 2 +- .../LocalLogicalPlanOptimizerTests.java | 5 ++ .../LocalPhysicalPlanOptimizerTests.java | 2 +- .../esql/planner/QueryTranslatorTests.java | 2 +- .../PlanExecutorMetricsTests.java | 2 +- .../VerifierMetricsTests.java | 36 +++++----- 52 files changed, 265 insertions(+), 297 deletions(-) delete mode 100644 docs/changelog/120527.yaml delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{telemetry => stats}/FeatureMetric.java (98%) rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{telemetry => stats}/Metrics.java (99%) create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{telemetry/PlanTelemetryManager.java => stats/PlanningMetricsManager.java} (89%) rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{telemetry => stats}/QueryMetric.java (93%) delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java rename x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/{telemetry => stats}/PlanExecutorMetricsTests.java (99%) rename x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/{telemetry => stats}/VerifierMetricsTests.java (93%) diff --git a/docs/changelog/120527.yaml b/docs/changelog/120527.yaml deleted file mode 100644 index a8e8088ea2aba..0000000000000 --- a/docs/changelog/120527.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 120527 -summary: Implement a `MetricsAware` interface -area: ES|QL -type: enhancement -issues: - - 115992 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 01195e0040a75..f3b2ea0d864ff 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -73,8 +73,8 @@ import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; +import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchStats; -import org.elasticsearch.xpack.esql.telemetry.Metrics; import org.elasticsearch.xpack.versionfield.Version; import org.junit.Assert; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java index a27b64044ca9c..25603acece3cb 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java @@ -20,7 +20,7 @@ import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; -import org.elasticsearch.xpack.esql.telemetry.PlanTelemetryManager; +import org.elasticsearch.xpack.esql.stats.PlanningMetricsManager; import org.junit.Before; import java.util.Collection; @@ -113,17 +113,6 @@ public static Iterable parameters() { Map.ofEntries(Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2)), true ) }, - new Object[] { - new Test( - // Using the `::` cast operator and a function alias - """ - FROM idx - | EVAL ip = host::ip::string, y = to_str(host) - """, - Map.ofEntries(Map.entry("FROM", 1), Map.entry("EVAL", 1)), - Map.ofEntries(Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2)), - true - ) }, new Object[] { new Test( "METRICS idx | LIMIT 10", @@ -134,7 +123,9 @@ public static Iterable parameters() { new Object[] { new Test( "METRICS idx max(id) BY host | LIMIT 10", - Build.current().isSnapshot() ? Map.ofEntries(Map.entry("METRICS", 1), Map.entry("LIMIT", 1)) : Collections.emptyMap(), + Build.current().isSnapshot() + ? Map.ofEntries(Map.entry("METRICS", 1), Map.entry("LIMIT", 1), Map.entry("FROM TS", 1)) + : Collections.emptyMap(), Build.current().isSnapshot() ? Map.ofEntries(Map.entry("MAX", 1)) : Collections.emptyMap(), Build.current().isSnapshot() ) } @@ -147,7 +138,7 @@ public static Iterable parameters() { // | EVAL ip = to_ip(host), x = to_string(host), y = to_string(host) // | INLINESTATS max(id) // """, - // Build.current().isSnapshot() ? Map.of("FROM", 1, "EVAL", 1, "INLINESTATS", 1) : Collections.emptyMap(), + // Build.current().isSnapshot() ? Map.of("FROM", 1, "EVAL", 1, "INLINESTATS", 1, "STATS", 1) : Collections.emptyMap(), // Build.current().isSnapshot() // ? Map.ofEntries(Map.entry("MAX", 1), Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2)) // : Collections.emptyMap(), @@ -195,19 +186,19 @@ private static void testQuery( client(dataNode.getName()).execute(EsqlQueryAction.INSTANCE, request, ActionListener.running(() -> { try { // test total commands used - final List commandMeasurementsAll = measurements(plugin, PlanTelemetryManager.FEATURE_METRICS_ALL); + final List commandMeasurementsAll = measurements(plugin, PlanningMetricsManager.FEATURE_METRICS_ALL); assertAllUsages(expectedCommands, commandMeasurementsAll, iteration, success); // test num of queries using a command - final List commandMeasurements = measurements(plugin, PlanTelemetryManager.FEATURE_METRICS); + final List commandMeasurements = measurements(plugin, PlanningMetricsManager.FEATURE_METRICS); assertUsageInQuery(expectedCommands, commandMeasurements, iteration, success); // test total functions used - final List functionMeasurementsAll = measurements(plugin, PlanTelemetryManager.FUNCTION_METRICS_ALL); + final List functionMeasurementsAll = measurements(plugin, PlanningMetricsManager.FUNCTION_METRICS_ALL); assertAllUsages(expectedFunctions, functionMeasurementsAll, iteration, success); // test number of queries using a function - final List functionMeasurements = measurements(plugin, PlanTelemetryManager.FUNCTION_METRICS); + final List functionMeasurements = measurements(plugin, PlanningMetricsManager.FUNCTION_METRICS); assertUsageInQuery(expectedFunctions, functionMeasurements, iteration, success); } finally { latch.countDown(); @@ -225,8 +216,8 @@ private static void assertAllUsages(Map expected, List found = featureNames(metrics); assertThat(found, is(expected.keySet())); for (Measurement metric : metrics) { - assertThat(metric.attributes().get(PlanTelemetryManager.SUCCESS), is(success)); - String featureName = (String) metric.attributes().get(PlanTelemetryManager.FEATURE_NAME); + assertThat(metric.attributes().get(PlanningMetricsManager.SUCCESS), is(success)); + String featureName = (String) metric.attributes().get(PlanningMetricsManager.FEATURE_NAME); assertThat(metric.getLong(), is(iteration * expected.get(featureName))); } } @@ -236,7 +227,7 @@ private static void assertUsageInQuery(Map expected, List measurements(TestTelemetryPlugin plugin, String private static Set featureNames(List functionMeasurements) { return functionMeasurements.stream() - .map(x -> x.attributes().get(PlanTelemetryManager.FEATURE_NAME)) + .map(x -> x.attributes().get(PlanningMetricsManager.FEATURE_NAME)) .map(String.class::cast) .collect(Collectors.toSet()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 812080085b5a7..4f5ff35b84054 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -92,7 +92,7 @@ import org.elasticsearch.xpack.esql.rule.Rule; import org.elasticsearch.xpack.esql.rule.RuleExecutor; import org.elasticsearch.xpack.esql.session.Configuration; -import org.elasticsearch.xpack.esql.telemetry.FeatureMetric; +import org.elasticsearch.xpack.esql.stats.FeatureMetric; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.time.Duration; @@ -133,7 +133,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.core.type.DataType.isTemporalAmount; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.maybeParseTemporalAmount; /** @@ -220,7 +220,7 @@ private LogicalPlan resolveIndex(UnresolvedRelation plan, IndexResolution indexR plan.metadataFields(), plan.indexMode(), indexResolutionMessage, - plan.telemetryLabel() + plan.commandName() ); } IndexPattern table = plan.indexPattern(); @@ -233,7 +233,7 @@ private LogicalPlan resolveIndex(UnresolvedRelation plan, IndexResolution indexR plan.metadataFields(), plan.indexMode(), "invalid [" + table + "] resolution to [" + indexResolution + "]", - plan.telemetryLabel() + plan.commandName() ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index c2663650685eb..b59a112b1adb6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -32,8 +32,8 @@ import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.Project; -import org.elasticsearch.xpack.esql.telemetry.FeatureMetric; -import org.elasticsearch.xpack.esql.telemetry.Metrics; +import org.elasticsearch.xpack.esql.stats.FeatureMetric; +import org.elasticsearch.xpack.esql.stats.Metrics; import java.util.ArrayList; import java.util.BitSet; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java deleted file mode 100644 index 9116c18b7a9bc..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.capabilities; - -import java.util.Locale; - -/** - * Interface for plan nodes that need to be accounted in the statistics - */ -public interface TelemetryAware { - - /** - * @return the label reported in the telemetry data. Only needs to be overwritten if the label doesn't match the class name. - */ - default String telemetryLabel() { - return getClass().getSimpleName().toUpperCase(Locale.ROOT); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index 81f63fd9d37a6..94913581f696d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -26,10 +26,10 @@ import org.elasticsearch.xpack.esql.session.IndexResolver; import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; import org.elasticsearch.xpack.esql.session.Result; -import org.elasticsearch.xpack.esql.telemetry.Metrics; -import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; -import org.elasticsearch.xpack.esql.telemetry.PlanTelemetryManager; -import org.elasticsearch.xpack.esql.telemetry.QueryMetric; +import org.elasticsearch.xpack.esql.stats.Metrics; +import org.elasticsearch.xpack.esql.stats.PlanningMetrics; +import org.elasticsearch.xpack.esql.stats.PlanningMetricsManager; +import org.elasticsearch.xpack.esql.stats.QueryMetric; import static org.elasticsearch.action.ActionListener.wrap; @@ -41,7 +41,7 @@ public class PlanExecutor { private final Mapper mapper; private final Metrics metrics; private final Verifier verifier; - private final PlanTelemetryManager planTelemetryManager; + private final PlanningMetricsManager planningMetricsManager; public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry, XPackLicenseState licenseState) { this.indexResolver = indexResolver; @@ -50,7 +50,7 @@ public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry, XP this.mapper = new Mapper(); this.metrics = new Metrics(functionRegistry); this.verifier = new Verifier(metrics, licenseState); - this.planTelemetryManager = new PlanTelemetryManager(meterRegistry); + this.planningMetricsManager = new PlanningMetricsManager(meterRegistry); } public void esql( @@ -65,7 +65,7 @@ public void esql( QueryBuilderResolver queryBuilderResolver, ActionListener listener ) { - final PlanTelemetry planTelemetry = new PlanTelemetry(functionRegistry); + final PlanningMetrics planningMetrics = new PlanningMetrics(); final var session = new EsqlSession( sessionId, cfg, @@ -76,7 +76,7 @@ public void esql( new LogicalPlanOptimizer(new LogicalOptimizerContext(cfg, foldContext)), mapper, verifier, - planTelemetry, + planningMetrics, indicesExpressionGrouper, queryBuilderResolver ); @@ -84,12 +84,12 @@ public void esql( metrics.total(clientId); ActionListener executeListener = wrap(x -> { - planTelemetryManager.publish(planTelemetry, true); + planningMetricsManager.publish(planningMetrics, true); listener.onResponse(x); }, ex -> { // TODO when we decide if we will differentiate Kibana from REST, this String value will likely come from the request metrics.failed(clientId); - planTelemetryManager.publish(planTelemetry, false); + planningMetricsManager.publish(planningMetrics, false); listener.onFailure(ex); }); // Wrap it in a listener so that if we have any exceptions during execution, the listener picks it up diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index a614a473ebe41..d1622daaa5e33 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -224,7 +224,6 @@ public class EsqlFunctionRegistry { // it has with the alias name associated to the FunctionDefinition instance private final Map defs = new LinkedHashMap<>(); private final Map aliases = new HashMap<>(); - private final Map, String> names = new HashMap<>(); private SnapshotFunctionRegistry snapshotRegistry = null; @@ -259,12 +258,6 @@ public boolean functionExists(String functionName) { return defs.containsKey(functionName); } - public String functionName(Class clazz) { - String name = names.get(clazz); - Check.notNull(name, "Cannot find function by class {}", clazz); - return name; - } - public Collection listFunctions() { // It is worth double checking if we need this copy. These are immutable anyway. return defs.values(); @@ -765,14 +758,6 @@ void register(FunctionDefinition... functions) { } aliases.put(alias, f.name()); } - Check.isTrue( - names.containsKey(f.clazz()) == false, - "function type [{}} is registered twice with names [{}] and [{}]", - f.clazz(), - names.get(f.clazz()), - f.name() - ); - names.put(f.clazz(), f.name()); } // sort the temporary map by key name and add it to the global map of functions defs.putAll( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java index ec23783fe1a2c..3b39e6a9d1fdb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.esql.parser; public class AstBuilder extends LogicalPlanBuilder { - public AstBuilder(ParsingContext context) { - super(context); + public AstBuilder(QueryParams params) { + super(params); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java index 5912f1fe58bcd..9538e3ba495db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java @@ -18,9 +18,7 @@ import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.xpack.esql.core.util.StringUtils; -import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import java.util.BitSet; import java.util.function.BiFunction; @@ -54,27 +52,20 @@ public void setEsqlConfig(EsqlConfig config) { this.config = config; } - // testing utility public LogicalPlan createStatement(String query) { return createStatement(query, new QueryParams()); } - // testing utility public LogicalPlan createStatement(String query, QueryParams params) { - return createStatement(query, params, new PlanTelemetry(new EsqlFunctionRegistry())); - } - - public LogicalPlan createStatement(String query, QueryParams params, PlanTelemetry metrics) { if (log.isDebugEnabled()) { log.debug("Parsing as statement: {}", query); } - return invokeParser(query, params, metrics, EsqlBaseParser::singleStatement, AstBuilder::plan); + return invokeParser(query, params, EsqlBaseParser::singleStatement, AstBuilder::plan); } private T invokeParser( String query, QueryParams params, - PlanTelemetry metrics, Function parseFunction, BiFunction result ) { @@ -108,7 +99,7 @@ private T invokeParser( log.trace("Parse tree: {}", tree.toStringTree()); } - return result.apply(new AstBuilder(new ExpressionBuilder.ParsingContext(params, metrics)), tree); + return result.apply(new AstBuilder(params), tree); } catch (StackOverflowError e) { throw new ParsingException("ESQL statement is too large, causing stack overflow when generating the parsing tree: [{}]", query); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 78c3044257f9f..114fcda1e634a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -62,7 +62,6 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; -import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.math.BigInteger; @@ -116,12 +115,10 @@ public abstract class ExpressionBuilder extends IdentifierBuilder { */ public static final int MAX_EXPRESSION_DEPTH = 400; - protected final ParsingContext context; + protected final QueryParams params; - public record ParsingContext(QueryParams params, PlanTelemetry telemetry) {} - - ExpressionBuilder(ParsingContext context) { - this.context = context; + ExpressionBuilder(QueryParams params) { + this.params = params; } protected Expression expression(ParseTree ctx) { @@ -624,9 +621,7 @@ public Expression visitFunctionExpression(EsqlBaseParser.FunctionExpressionConte @Override public String visitFunctionName(EsqlBaseParser.FunctionNameContext ctx) { - var name = visitIdentifierOrParameter(ctx.identifierOrParameter()); - context.telemetry().function(name); - return name; + return visitIdentifierOrParameter(ctx.identifierOrParameter()); } @Override @@ -688,9 +683,7 @@ private Expression castToType(Source source, ParseTree parseTree, EsqlBaseParser throw new ParsingException(source, "Unsupported conversion to type [{}]", dataType); } Expression expr = expression(parseTree); - var convertFunction = converterToFactory.apply(source, expr); - context.telemetry().function(convertFunction.getClass()); - return convertFunction; + return converterToFactory.apply(source, expr); } @Override @@ -922,10 +915,10 @@ QueryParam paramByToken(TerminalNode node) { return null; } Token token = node.getSymbol(); - if (context.params().contains(token) == false) { + if (params.contains(token) == false) { throw new ParsingException(source(node), "Unexpected parameter"); } - return context.params().get(token); + return params.get(token); } QueryParam paramByNameOrPosition(TerminalNode node) { @@ -936,28 +929,26 @@ QueryParam paramByNameOrPosition(TerminalNode node) { String nameOrPosition = token.getText().substring(1); if (isInteger(nameOrPosition)) { int index = Integer.parseInt(nameOrPosition); - if (context.params().get(index) == null) { + if (params.get(index) == null) { String message = ""; - int np = context.params().size(); + int np = params.size(); if (np > 0) { message = ", did you mean " + (np == 1 ? "position 1?" : "any position between 1 and " + np + "?"); } - context.params() - .addParsingError(new ParsingException(source(node), "No parameter is defined for position " + index + message)); + params.addParsingError(new ParsingException(source(node), "No parameter is defined for position " + index + message)); } - return context.params().get(index); + return params.get(index); } else { - if (context.params().contains(nameOrPosition) == false) { + if (params.contains(nameOrPosition) == false) { String message = ""; - List potentialMatches = StringUtils.findSimilar(nameOrPosition, context.params().namedParams().keySet()); + List potentialMatches = StringUtils.findSimilar(nameOrPosition, params.namedParams().keySet()); if (potentialMatches.size() > 0) { message = ", did you mean " + (potentialMatches.size() == 1 ? "[" + potentialMatches.get(0) + "]?" : "any of " + potentialMatches + "?"); } - context.params() - .addParsingError(new ParsingException(source(node), "Unknown query parameter [" + nameOrPosition + "]" + message)); + params.addParsingError(new ParsingException(source(node), "Unknown query parameter [" + nameOrPosition + "]" + message)); } - return context.params().get(nameOrPosition); + return params.get(nameOrPosition); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 82f3e18912325..7ddd3dafd2784 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -18,7 +18,6 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.esql.VerificationException; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -95,18 +94,15 @@ interface PlanFactory extends Function {} */ public static final int MAX_QUERY_DEPTH = 500; - public LogicalPlanBuilder(ParsingContext context) { - super(context); + public LogicalPlanBuilder(QueryParams params) { + super(params); } private int queryDepth = 0; protected LogicalPlan plan(ParseTree ctx) { LogicalPlan p = ParserUtils.typedParsing(this, ctx, LogicalPlan.class); - if (p instanceof TelemetryAware ma) { - this.context.telemetry().command(ma); - } - var errors = this.context.params().parsingErrors(); + var errors = this.params.parsingErrors(); if (errors.hasNext() == false) { return p; } else { @@ -486,7 +482,8 @@ public LogicalPlan visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) false, List.of(new MetadataAttribute(source, MetadataAttribute.TSID_FIELD, DataType.KEYWORD, false)), IndexMode.TIME_SERIES, - null + null, + "FROM TS" ); return new Aggregate(source, relation, Aggregate.AggregateType.METRICS, stats.groupings, stats.aggregates); } @@ -546,7 +543,8 @@ public PlanFactory visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { false, emptyList(), IndexMode.LOOKUP, - null + null, + "???" ); var condition = ctx.joinCondition(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java index 5c40bfce32064..0111d23fac281 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -40,7 +39,7 @@ import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; import static org.elasticsearch.xpack.esql.plan.logical.Filter.checkFilterConditionDataType; -public class Aggregate extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware { +public class Aggregate extends UnaryPlan implements PostAnalysisVerificationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "Aggregate", @@ -143,7 +142,7 @@ public List aggregates() { } @Override - public String telemetryLabel() { + public String commandName() { return switch (aggregateType) { case STANDARD -> "STATS"; case METRICS -> "METRICS"; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java index 9200850b2f9db..a83e102e51005 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.dissect.DissectParser; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; @@ -26,7 +25,7 @@ import java.util.List; import java.util.Objects; -public class Dissect extends RegexExtract implements TelemetryAware { +public class Dissect extends RegexExtract { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Dissect", Dissect::new); private final Parser parser; @@ -124,6 +123,11 @@ public boolean equals(Object o) { return Objects.equals(parser, dissect.parser); } + @Override + public String commandName() { + return "DISSECT"; + } + @Override public int hashCode() { return Objects.hash(super.hashCode(), parser); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java index 483c3508013ab..add5a2d576c00 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -17,7 +16,7 @@ import java.util.List; import java.util.Objects; -public class Drop extends UnaryPlan implements TelemetryAware { +public class Drop extends UnaryPlan { private final List removals; public Drop(Source source, LogicalPlan child, List removals) { @@ -39,6 +38,10 @@ public List removals() { return removals; } + public String commandName() { + return "DROP"; + } + @Override public boolean expressionsResolved() { return Resolvables.resolved(removals); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java index 4e9fc87318029..9b81060349815 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java @@ -18,7 +18,6 @@ import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -49,7 +48,7 @@ import static org.elasticsearch.xpack.esql.core.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; -public class Enrich extends UnaryPlan implements GeneratingPlan, PostAnalysisPlanVerificationAware, TelemetryAware { +public class Enrich extends UnaryPlan implements GeneratingPlan, PostAnalysisPlanVerificationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "Enrich", @@ -203,6 +202,10 @@ protected AttributeSet computeReferences() { return matchField.references(); } + public String commandName() { + return "ENRICH"; + } + @Override public boolean expressionsResolved() { return policyName.resolved() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index 448085df1e831..90b3aa8625087 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -172,6 +172,11 @@ public Set concreteIndices() { return indexNameWithModes.keySet(); } + @Override + public String commandName() { + return "FROM"; + } + @Override public boolean expressionsResolved() { // For unresolved expressions to exist in EsRelation is fine, as long as they are not used in later operations diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java index 7c437dac03409..cbd79011032df 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -38,7 +37,7 @@ import static org.elasticsearch.xpack.esql.core.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; -public class Eval extends UnaryPlan implements GeneratingPlan, PostAnalysisVerificationAware, TelemetryAware { +public class Eval extends UnaryPlan implements GeneratingPlan, PostAnalysisVerificationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Eval", Eval::new); private final List fields; @@ -132,6 +131,11 @@ private List renameAliases(List originalAttributes, List n return newFieldsWithUpdatedRefs; } + @Override + public String commandName() { + return "EVAL"; + } + @Override public boolean expressionsResolved() { return Resolvables.resolved(fields); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java index bd49ed04881cc..38e7c19522df6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -18,7 +17,7 @@ import java.util.List; import java.util.Objects; -public class Explain extends LeafPlan implements TelemetryAware { +public class Explain extends LeafPlan { public enum Type { PARSED, @@ -70,6 +69,11 @@ public List output() { ); } + @Override + public String commandName() { + return "EXPLAIN"; + } + @Override public boolean expressionsResolved() { return true; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java index 6931c320007fe..0fae5e5831fc7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -29,7 +28,7 @@ * {@code SELECT x FROM y WHERE z ..} the "WHERE" clause is a Filter. A * {@code Filter} has a "condition" Expression that does the filtering. */ -public class Filter extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware { +public class Filter extends UnaryPlan implements PostAnalysisVerificationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Filter", Filter::new); private final Expression condition; @@ -70,7 +69,7 @@ public Expression condition() { } @Override - public String telemetryLabel() { + public String commandName() { return "WHERE"; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java index 1fab2cbecd034..fcfd1ac0f04da 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java @@ -15,7 +15,6 @@ import org.elasticsearch.grok.GrokCaptureType; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; @@ -32,7 +31,7 @@ import java.util.Objects; import java.util.stream.Collectors; -public class Grok extends RegexExtract implements TelemetryAware { +public class Grok extends RegexExtract { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Grok", Grok::readFrom); public record Parser(String pattern, org.elasticsearch.grok.Grok grok) { @@ -149,6 +148,11 @@ public boolean equals(Object o) { return Objects.equals(parser, grok.parser); } + @Override + public String commandName() { + return "GROK"; + } + @Override public int hashCode() { return Objects.hash(super.hashCode(), parser); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java index 527ba28d377f1..4211f8a0d45b6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; @@ -37,7 +36,7 @@ * underlying aggregate. *

*/ -public class InlineStats extends UnaryPlan implements NamedWriteable, SurrogateLogicalPlan, TelemetryAware { +public class InlineStats extends UnaryPlan implements NamedWriteable, SurrogateLogicalPlan { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "InlineStats", @@ -81,6 +80,11 @@ public Aggregate aggregate() { return aggregate; } + @Override + public String commandName() { + return "INLINESTATS"; + } + @Override public boolean expressionsResolved() { return aggregate.expressionsResolved(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java index 67108afb94668..4c03d68e6e6f7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.plan.logical; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -15,7 +14,7 @@ import java.util.List; import java.util.Objects; -public class Keep extends Project implements TelemetryAware { +public class Keep extends Project { public Keep(Source source, LogicalPlan child, List projections) { super(source, child, projections); @@ -45,4 +44,9 @@ public int hashCode() { public boolean equals(Object obj) { return super.equals(obj); } + + @Override + public String commandName() { + return "KEEP"; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java index 1bb89acf1942d..ea64b7687f4c0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -18,7 +17,7 @@ import java.io.IOException; import java.util.Objects; -public class Limit extends UnaryPlan implements TelemetryAware { +public class Limit extends UnaryPlan { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Limit", Limit::new); private final Expression limit; @@ -58,6 +57,11 @@ public Expression limit() { return limit; } + @Override + public String commandName() { + return "LIMIT"; + } + @Override public boolean expressionsResolved() { return limit.resolved(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java index ac4baea8bc853..e845c25bd3b32 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java @@ -75,6 +75,8 @@ public boolean resolved() { return lazyResolved; } + public abstract String commandName(); + public abstract boolean expressionsResolved(); @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java index 1c05ceb124529..6e7f421003292 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -32,7 +31,7 @@ * Looks up values from the associated {@code tables}. * The class is supposed to be substituted by a {@link Join}. */ -public class Lookup extends UnaryPlan implements SurrogateLogicalPlan, TelemetryAware { +public class Lookup extends UnaryPlan implements SurrogateLogicalPlan { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Lookup", Lookup::new); private final Expression tableName; @@ -118,6 +117,11 @@ public JoinConfig joinConfig() { return new JoinConfig(JoinTypes.LEFT, matchFields, leftFields, rightFields); } + @Override + public String commandName() { + return "LOOKUP"; + } + @Override public boolean expressionsResolved() { return tableName.resolved() && Resolvables.resolved(matchFields) && localRelation != null; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java index f5a3c8230b124..949e4906e5033 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -23,7 +22,7 @@ import java.util.List; import java.util.Objects; -public class MvExpand extends UnaryPlan implements TelemetryAware { +public class MvExpand extends UnaryPlan { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "MvExpand", MvExpand::new); private final NamedExpression target; @@ -96,7 +95,7 @@ protected AttributeSet computeReferences() { return target.references(); } - public String telemetryLabel() { + public String commandName() { return "MV_EXPAND"; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java index 051e2c7769bde..d927d78701c65 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -25,7 +24,7 @@ import static org.elasticsearch.xpack.esql.common.Failure.fail; -public class OrderBy extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware { +public class OrderBy extends UnaryPlan implements PostAnalysisVerificationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "OrderBy", OrderBy::new); private final List order; @@ -70,7 +69,7 @@ public List order() { } @Override - public String telemetryLabel() { + public String commandName() { return "SORT"; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java index e12a8cb557fde..841e7fbe81896 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java @@ -78,6 +78,14 @@ public boolean resolved() { return super.resolved() && Expressions.anyMatch(projections, Functions::isAggregate) == false; } + @Override + public String commandName() { + // this could represent multiple commands (KEEP, DROP, RENAME) + // and should not be present in a pre-analyzed plan. + // maybe it should throw exception? + return ""; + } + @Override public boolean expressionsResolved() { return Resolvables.resolved(projections); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java index 7887d8ed66b99..773d3fd015e5f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.analysis.Analyzer.ResolveRefs; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expressions; @@ -21,7 +20,7 @@ import java.util.List; import java.util.Objects; -public class Rename extends UnaryPlan implements TelemetryAware { +public class Rename extends UnaryPlan { private final List renamings; @@ -52,6 +51,11 @@ public List output() { return Expressions.asAttributes(projectionsAfterResolution); } + @Override + public String commandName() { + return "RENAME"; + } + @Override public boolean expressionsResolved() { for (var alias : renamings) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java index 005ca45d19131..65d1adf5e2799 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -24,7 +23,7 @@ import static org.elasticsearch.xpack.esql.common.Failure.fail; -public class Row extends LeafPlan implements PostAnalysisVerificationAware, TelemetryAware { +public class Row extends LeafPlan implements PostAnalysisVerificationAware { private final List fields; @@ -52,6 +51,11 @@ public List output() { return Expressions.asAttributes(fields); } + @Override + public String commandName() { + return "ROW"; + } + @Override public boolean expressionsResolved() { return Resolvables.resolved(fields); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java index a9a5dbddc544f..d6e0e4334bd47 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java @@ -55,6 +55,13 @@ public String getWriteableName() { return ENTRY.name; } + @Override + public String commandName() { + // this is the result of optimizations, it will never appear in a pre-analyzed plan + // maybe we should throw exception? + return ""; + } + @Override public boolean expressionsResolved() { return limit.resolved() && Resolvables.resolved(order); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java index 5d22a86b2cdf7..0a20e1dd9080d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java @@ -8,13 +8,11 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.IndexMode; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.plan.IndexPattern; -import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import java.util.Collections; import java.util.List; @@ -22,7 +20,7 @@ import static java.util.Collections.singletonList; -public class UnresolvedRelation extends LeafPlan implements Unresolvable, TelemetryAware { +public class UnresolvedRelation extends LeafPlan implements Unresolvable { private final IndexPattern indexPattern; private final boolean frozen; @@ -58,17 +56,6 @@ public UnresolvedRelation( this.commandName = commandName; } - public UnresolvedRelation( - Source source, - IndexPattern table, - boolean frozen, - List metadataFields, - IndexMode indexMode, - String unresolvedMessage - ) { - this(source, table, frozen, metadataFields, indexMode, unresolvedMessage, null); - } - @Override public void writeTo(StreamOutput out) { throw new UnsupportedOperationException("not serialized"); @@ -99,7 +86,7 @@ public boolean resolved() { /** * - * This is used by {@link PlanTelemetry} to collect query statistics + * This is used by {@link org.elasticsearch.xpack.esql.stats.PlanningMetrics} to collect query statistics * It can return *
    *
  • "FROM" if this a |FROM idx command
  • @@ -108,7 +95,7 @@ public boolean resolved() { *
*/ @Override - public String telemetryLabel() { + public String commandName() { return commandName; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java index 997bff70663bd..a541142f952e0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -189,6 +189,11 @@ public Join replaceChildren(LogicalPlan left, LogicalPlan right) { return new Join(source(), left, right, config); } + @Override + public String commandName() { + return "JOIN"; + } + @Override public int hashCode() { return Objects.hash(config, left(), right()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java index 5f1f569e3671b..c29cf0ec7f414 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java @@ -9,7 +9,6 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -28,7 +27,7 @@ /** * Lookup join - specialized LEFT (OUTER) JOIN between the main left side and a lookup index (index_mode = lookup) on the right. */ -public class LookupJoin extends Join implements SurrogateLogicalPlan, PostAnalysisVerificationAware, TelemetryAware { +public class LookupJoin extends Join implements SurrogateLogicalPlan, PostAnalysisVerificationAware { public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, List joinFields) { this(source, left, right, new UsingJoinType(LEFT, joinFields), emptyList(), emptyList(), emptyList()); @@ -78,11 +77,6 @@ protected NodeInfo info() { ); } - @Override - public String telemetryLabel() { - return "LOOKUP JOIN"; - } - @Override public void postAnalysisVerification(Failures failures) { super.postAnalysisVerification(failures); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java index 33e1f385f9eec..4f04024d61d46 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java @@ -67,6 +67,11 @@ protected NodeInfo info() { return NodeInfo.create(this, StubRelation::new, output); } + @Override + public String commandName() { + return ""; + } + @Override public int hashCode() { return Objects.hash(StubRelation.class, output); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java index d6106bae6b6b8..07432481d2341 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java @@ -63,6 +63,14 @@ public LocalSupplier supplier() { return supplier; } + @Override + public String commandName() { + // this colud be an empty source, a lookup table or something else + // but it should not be present in a pre-analyzed plan + // maybe we sholud throw exception? + return ""; + } + @Override public boolean expressionsResolved() { return true; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java index 99c917ba803a9..fa432537d27e3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Build; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -23,7 +22,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; -public class ShowInfo extends LeafPlan implements TelemetryAware { +public class ShowInfo extends LeafPlan { private final List attributes; @@ -60,7 +59,7 @@ public List> values() { } @Override - public String telemetryLabel() { + public String commandName() { return "SHOW"; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 8c95992cf9f5a..0505955e450d7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -73,7 +73,7 @@ import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.mapper.Mapper; -import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; +import org.elasticsearch.xpack.esql.stats.PlanningMetrics; import java.util.ArrayList; import java.util.Arrays; @@ -112,7 +112,7 @@ public interface PlanRunner { private final Mapper mapper; private final PhysicalPlanOptimizer physicalPlanOptimizer; - private final PlanTelemetry planTelemetry; + private final PlanningMetrics planningMetrics; private final IndicesExpressionGrouper indicesExpressionGrouper; private final QueryBuilderResolver queryBuilderResolver; @@ -126,7 +126,7 @@ public EsqlSession( LogicalPlanOptimizer logicalPlanOptimizer, Mapper mapper, Verifier verifier, - PlanTelemetry planTelemetry, + PlanningMetrics planningMetrics, IndicesExpressionGrouper indicesExpressionGrouper, QueryBuilderResolver queryBuilderResolver ) { @@ -140,7 +140,7 @@ public EsqlSession( this.mapper = mapper; this.logicalPlanOptimizer = logicalPlanOptimizer; this.physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(configuration)); - this.planTelemetry = planTelemetry; + this.planningMetrics = planningMetrics; this.indicesExpressionGrouper = indicesExpressionGrouper; this.queryBuilderResolver = queryBuilderResolver; } @@ -280,7 +280,7 @@ private LocalRelation resultToPlan(LogicalPlan plan, Result result) { } private LogicalPlan parse(String query, QueryParams params) { - var parsed = new EsqlParser().createStatement(query, params, planTelemetry); + var parsed = new EsqlParser().createStatement(query, params); LOGGER.debug("Parsed logical plan:\n{}", parsed); return parsed; } @@ -297,6 +297,7 @@ public void analyzedPlan( } Function analyzeAction = (l) -> { + planningMetrics.gatherPreAnalysisMetrics(parsed); Analyzer analyzer = new Analyzer( new AnalyzerContext(configuration, functionRegistry, l.indices, l.lookupIndices, l.enrichResolution), verifier diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/FeatureMetric.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java similarity index 98% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/FeatureMetric.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java index 3a36f5b0d7c04..4cae2a9c247f3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/FeatureMetric.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.telemetry; +package org.elasticsearch.xpack.esql.stats; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/Metrics.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java similarity index 99% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/Metrics.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java index b8962b47809a0..092fecb3142db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/Metrics.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.telemetry; +package org.elasticsearch.xpack.esql.stats; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.util.Maps; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java new file mode 100644 index 0000000000000..7b452e50fd525 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.stats; + +import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; + +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +/** + * This class is responsible for collecting metrics related to ES|QL planning. + */ +public class PlanningMetrics { + private Map commands = new HashMap<>(); + private Map functions = new HashMap<>(); + + public void gatherPreAnalysisMetrics(LogicalPlan plan) { + plan.forEachDown(p -> add(commands, p.commandName())); + plan.forEachExpressionDown(UnresolvedFunction.class, p -> add(functions, p.name().toUpperCase(Locale.ROOT))); + } + + private void add(Map map, String key) { + Integer cmd = map.get(key); + map.put(key, cmd == null ? 1 : cmd + 1); + } + + public Map commands() { + return commands; + } + + public Map functions() { + return functions; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetryManager.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetricsManager.java similarity index 89% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetryManager.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetricsManager.java index 2cd536daf389c..a2d00a1f530e9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetryManager.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetricsManager.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.telemetry; +package org.elasticsearch.xpack.esql.stats; import org.elasticsearch.telemetry.metric.LongCounter; import org.elasticsearch.telemetry.metric.MeterRegistry; @@ -17,7 +17,7 @@ * * @see METERING */ -public class PlanTelemetryManager { +public class PlanningMetricsManager { // APM counters private final LongCounter featuresCounter; @@ -59,7 +59,7 @@ public class PlanTelemetryManager { */ public static final String SUCCESS = "success"; - public PlanTelemetryManager(MeterRegistry meterRegistry) { + public PlanningMetricsManager(MeterRegistry meterRegistry) { featuresCounter = meterRegistry.registerLongCounter( FEATURE_METRICS, "ESQL features, total number of queries that use them", @@ -77,9 +77,9 @@ public PlanTelemetryManager(MeterRegistry meterRegistry) { /** * Publishes the collected metrics to the meter registry */ - public void publish(PlanTelemetry metrics, boolean success) { - metrics.commands().forEach((key, value) -> incCommand(key, value, success)); - metrics.functions().forEach((key, value) -> incFunction(key, value, success)); + public void publish(PlanningMetrics metrics, boolean success) { + metrics.commands().entrySet().forEach(x -> incCommand(x.getKey(), x.getValue(), success)); + metrics.functions().entrySet().forEach(x -> incFunction(x.getKey(), x.getValue(), success)); } private void incCommand(String name, int count, boolean success) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/QueryMetric.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/QueryMetric.java similarity index 93% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/QueryMetric.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/QueryMetric.java index 567b4b0a84937..e862006d058ac 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/QueryMetric.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/QueryMetric.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.telemetry; +package org.elasticsearch.xpack.esql.stats; import java.util.Locale; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java deleted file mode 100644 index 6fe1314524f10..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.telemetry; - -import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; -import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.function.Function; -import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.Locale; -import java.util.Map; -import java.util.Set; - -import static org.elasticsearch.common.Strings.format; - -/** - * This class is responsible for collecting metrics related to ES|QL planning. - */ -public class PlanTelemetry { - private final EsqlFunctionRegistry functionRegistry; - private final Set telemetryAwares = new HashSet<>(); - private final Map commands = new HashMap<>(); - private final Map functions = new HashMap<>(); - - public PlanTelemetry(EsqlFunctionRegistry functionRegistry) { - this.functionRegistry = functionRegistry; - } - - private void add(Map map, String key) { - map.compute(key.toUpperCase(Locale.ROOT), (k, count) -> count == null ? 1 : count + 1); - } - - public void command(TelemetryAware command) { - if (telemetryAwares.add(command)) { - if (command.telemetryLabel() == null) { - throw new QlIllegalArgumentException(format("TelemetryAware [{}] has no metric name", command)); - } - add(commands, command.telemetryLabel()); - } - } - - public void function(String name) { - var functionName = functionRegistry.resolveAlias(name); - if (functionRegistry.functionExists(functionName)) { - // The metrics have been collected initially with their uppercase spelling - add(functions, functionName); - } - } - - public void function(Class clazz) { - add(functions, functionRegistry.functionName(clazz)); - } - - public Map commands() { - return commands; - } - - public Map functions() { - return functions; - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index bae20bb9b26d3..350befc219f6e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -90,7 +90,7 @@ import org.elasticsearch.xpack.esql.session.EsqlSession.PlanRunner; import org.elasticsearch.xpack.esql.session.Result; import org.elasticsearch.xpack.esql.stats.DisabledSearchStats; -import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; +import org.elasticsearch.xpack.esql.stats.PlanningMetrics; import org.junit.After; import org.junit.Before; import org.mockito.Mockito; @@ -514,7 +514,7 @@ private ActualResults executePlan(BigArrays bigArrays) throws Exception { new LogicalPlanOptimizer(new LogicalOptimizerContext(configuration, foldCtx)), mapper, TEST_VERIFIER, - new PlanTelemetry(functionRegistry), + new PlanningMetrics(), null, EsqlTestUtils.MOCK_QUERY_BUILDER_RESOLVER ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java index cf2de30e44456..e507640c7b23c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Limit; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.telemetry.Metrics; +import org.elasticsearch.xpack.esql.stats.Metrics; import java.util.List; import java.util.Objects; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index d99118df7e684..310d680cfbf41 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -248,6 +248,11 @@ public UnaryPlan replaceChild(LogicalPlan newChild) { return new MockFieldAttributeCommand(source(), newChild, field); } + @Override + public String commandName() { + return "MOCK"; + } + @Override public boolean expressionsResolved() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 8bdd7a4e1645f..aae2d012fc3a6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -74,9 +74,9 @@ import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.rule.Rule; import org.elasticsearch.xpack.esql.session.Configuration; +import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchContextStats; import org.elasticsearch.xpack.esql.stats.SearchStats; -import org.elasticsearch.xpack.esql.telemetry.Metrics; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import org.elasticsearch.xpack.kql.query.KqlQueryBuilder; import org.junit.Before; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java index f9732272dbd74..57210fda07f2b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.xpack.esql.optimizer.TestPlannerOptimizer; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; -import org.elasticsearch.xpack.esql.telemetry.Metrics; +import org.elasticsearch.xpack.esql.stats.Metrics; import org.hamcrest.Matcher; import org.junit.BeforeClass; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java similarity index 99% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java index 4c2913031271f..a3c5cd9168b4f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.telemetry; +package org.elasticsearch.xpack.esql.stats; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java similarity index 93% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java index de377fe78588c..eda906b147956 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.telemetry; +package org.elasticsearch.xpack.esql.stats; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; @@ -22,23 +22,23 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyzer; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.DISSECT; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.DROP; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.ENRICH; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.EVAL; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.FROM; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.GROK; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.KEEP; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.LIMIT; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.MV_EXPAND; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.RENAME; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.ROW; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.SHOW; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.SORT; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.STATS; -import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.WHERE; -import static org.elasticsearch.xpack.esql.telemetry.Metrics.FPREFIX; -import static org.elasticsearch.xpack.esql.telemetry.Metrics.FUNC_PREFIX; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.DISSECT; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.DROP; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.ENRICH; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.EVAL; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.FROM; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.GROK; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.KEEP; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.MV_EXPAND; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.RENAME; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.ROW; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.SHOW; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.SORT; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.STATS; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.WHERE; +import static org.elasticsearch.xpack.esql.stats.Metrics.FPREFIX; +import static org.elasticsearch.xpack.esql.stats.Metrics.FUNC_PREFIX; public class VerifierMetricsTests extends ESTestCase { From 8e2044de150ca6b57dfe643e82fe94b76ca846ae Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Tue, 28 Jan 2025 16:56:47 +0200 Subject: [PATCH 132/383] Normalize negative scores for text_similarity_reranker retriever (#120930) --- docs/changelog/120930.yaml | 6 +++++ docs/reference/search/retriever.asciidoc | 17 +++++++++++++ .../retriever/rankdoc/RankDocsQuery.java | 11 +++++++- .../query/RankDocsQueryBuilderTests.java | 12 +++++++++ .../rank/rerank/AbstractRerankerIT.java | 16 +++++++++--- .../mock/AbstractTestInferenceService.java | 7 ++++++ .../mock/TestRerankingServiceExtension.java | 6 ++++- ...ankFeaturePhaseRankCoordinatorContext.java | 25 ++++++++++++++----- .../TextSimilarityRankRetrieverBuilder.java | 1 + .../TextSimilarityRankMultiNodeTests.java | 5 ++++ .../TextSimilarityRankTests.java | 17 +++++++------ .../xpack/inference/InferenceRestIT.java | 1 + .../70_text_similarity_rank_retriever.yml | 6 ----- 13 files changed, 105 insertions(+), 25 deletions(-) create mode 100644 docs/changelog/120930.yaml diff --git a/docs/changelog/120930.yaml b/docs/changelog/120930.yaml new file mode 100644 index 0000000000000..376edb7632a0b --- /dev/null +++ b/docs/changelog/120930.yaml @@ -0,0 +1,6 @@ +pr: 120930 +summary: Normalize negative scores for `text_similarity_reranker` retriever +area: Ranking +type: bug +issues: + - 120201 diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index 21892b4efe5a8..4cccf4d204d99 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -523,6 +523,23 @@ You have the following options: ** Then set up an <> with the `rerank` task type. ** Refer to the <> on this page for a step-by-step guide. +[IMPORTANT] +==== +Scores from the re-ranking process are normalized using the following formula before returned to the user, +to avoid having negative scores. +[source,text] +---- +score = max(score, 0) + min(exp(score), 1) +---- +Using the above, any initially negative scores are projected to (0, 1) and positive scores to [1, infinity). +To revert back if needed, one can use: +[source, text] +---- +score = score - 1, if score >= 0 +score = ln(score), if score < 0 +---- +==== + ===== Parameters `retriever`:: diff --git a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java index ebbdf58cc8c4f..5920567646030 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java @@ -57,6 +57,11 @@ public static class TopQuery extends Query { this.queryNames = queryNames; this.segmentStarts = segmentStarts; this.contextIdentity = contextIdentity; + for (RankDoc doc : docs) { + if (false == doc.score >= 0) { + throw new IllegalArgumentException("RankDoc scores must be positive values. Missing a normalization step?"); + } + } } @Override @@ -160,7 +165,11 @@ public float getMaxScore(int docId) { @Override public float score() { - return docs[upTo].score; + // We could still end up with a valid 0 score for a RankDoc + // so here we want to differentiate between this and all the tailQuery matches + // that would also produce a 0 score due to filtering, by setting the score to `Float.MIN_VALUE` instead for + // RankDoc matches. + return Math.max(docs[upTo].score, Float.MIN_VALUE); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/query/RankDocsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/RankDocsQueryBuilderTests.java index ba39702d3d162..9f1d2fbfdefff 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RankDocsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RankDocsQueryBuilderTests.java @@ -251,4 +251,16 @@ public void testUnknownField() throws IOException { public void testValidOutput() throws IOException { // no-op since RankDocsQueryBuilder is an internal only API } + + public void shouldThrowForNegativeScores() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + iw.addDocument(new Document()); + try (IndexReader reader = iw.getReader()) { + SearchExecutionContext context = createSearchExecutionContext(newSearcher(reader)); + RankDocsQueryBuilder queryBuilder = new RankDocsQueryBuilder(new RankDoc[] { new RankDoc(0, -1.0f, 0) }, null, false); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> queryBuilder.doToQuery(context)); + assertEquals("RankDoc scores must be positive values. Missing a normalization step?", ex.getMessage()); + } + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/rank/rerank/AbstractRerankerIT.java b/test/framework/src/main/java/org/elasticsearch/search/rank/rerank/AbstractRerankerIT.java index 06763c27a3536..ad4e5842629e7 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/rank/rerank/AbstractRerankerIT.java +++ b/test/framework/src/main/java/org/elasticsearch/search/rank/rerank/AbstractRerankerIT.java @@ -56,6 +56,10 @@ public enum ThrowingRankBuilderType { protected abstract Collection> pluginsNeeded(); + protected boolean shouldCheckScores() { + return true; + } + @Override protected Collection> nodePlugins() { return pluginsNeeded(); @@ -95,9 +99,11 @@ public void testRerankerNoExceptions() throws Exception { int rank = 1; for (SearchHit searchHit : response.getHits().getHits()) { assertThat(searchHit, hasId(String.valueOf(5 - (rank - 1)))); - assertEquals(0.5f - ((rank - 1) * 0.1f), searchHit.getScore(), 1e-5f); assertThat(searchHit, hasRank(rank)); assertNotNull(searchHit.getFields().get(searchField)); + if (shouldCheckScores()) { + assertEquals(0.5f - ((rank - 1) * 0.1f), searchHit.getScore(), 1e-5f); + } rank++; } } @@ -140,9 +146,11 @@ public void testRerankerPagination() throws Exception { int rank = 3; for (SearchHit searchHit : response.getHits().getHits()) { assertThat(searchHit, hasId(String.valueOf(5 - (rank - 1)))); - assertEquals(0.5f - ((rank - 1) * 0.1f), searchHit.getScore(), 1e-5f); assertThat(searchHit, hasRank(rank)); assertNotNull(searchHit.getFields().get(searchField)); + if (shouldCheckScores()) { + assertEquals(0.5f - ((rank - 1) * 0.1f), searchHit.getScore(), 1e-5f); + } rank++; } } @@ -222,9 +230,11 @@ public void testNotAllShardsArePresentInFetchPhase() throws Exception { int rank = 1; for (SearchHit searchHit : response.getHits().getHits()) { assertThat(searchHit, hasId(String.valueOf(5 - (rank - 1)))); - assertEquals(0.5f - ((rank - 1) * 0.1f), searchHit.getScore(), 1e-5f); assertThat(searchHit, hasRank(rank)); assertNotNull(searchHit.getFields().get(searchField)); + if (shouldCheckScores()) { + assertEquals(0.5f - ((rank - 1) * 0.1f), searchHit.getScore(), 1e-5f); + } rank++; } } diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java index 3be85ee857bbb..3c29cef47d628 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java @@ -26,9 +26,16 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; +import java.util.Random; public abstract class AbstractTestInferenceService implements InferenceService { + protected static final Random random = new Random( + System.getProperty("tests.seed") == null + ? System.currentTimeMillis() + : Long.parseUnsignedLong(System.getProperty("tests.seed").split(":")[0], 16) + ); + protected static int stringWeight(String input, int position) { int hashCode = input.hashCode(); if (hashCode < 0) { diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java index e79c8b9bad522..765c69e28a9ad 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java @@ -42,6 +42,7 @@ import java.util.Map; public class TestRerankingServiceExtension implements InferenceServiceExtension { + @Override public List getInferenceServiceFactories() { return List.of(TestInferenceService::new); @@ -149,9 +150,12 @@ public void chunkedInfer( private RankedDocsResults makeResults(List input) { List results = new ArrayList<>(); int totalResults = input.size(); + float minScore = random.nextFloat(-1f, 1f); float resultDiff = 0.2f; for (int i = 0; i < input.size(); i++) { - results.add(new RankedDocsResults.RankedDoc(totalResults - 1 - i, resultDiff * (totalResults - i), input.get(i))); + results.add( + new RankedDocsResults.RankedDoc(totalResults - 1 - i, minScore + resultDiff * (totalResults - i), input.get(i)) + ); } return new RankedDocsResults(results); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContext.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContext.java index 0ff48bfd493ba..63274e5104207 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContext.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContext.java @@ -20,8 +20,8 @@ import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankTaskSettings; import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankTaskSettings; +import java.util.ArrayList; import java.util.Arrays; -import java.util.Comparator; import java.util.List; import java.util.Map; @@ -130,10 +130,15 @@ protected void computeScores(RankFeatureDoc[] featureDocs, ActionListener minScore == null || doc.score >= minScore) - .sorted(Comparator.comparing((RankFeatureDoc doc) -> doc.score).reversed()) - .toArray(RankFeatureDoc[]::new); + List docs = new ArrayList<>(); + for (RankFeatureDoc doc : originalDocs) { + if (minScore == null || doc.score >= minScore) { + doc.score = normalizeScore(doc.score); + docs.add(doc); + } + } + docs.sort(RankFeatureDoc::compareTo); + return docs.toArray(new RankFeatureDoc[0]); } protected InferenceAction.Request generateRequest(List docFeatures) { @@ -154,7 +159,15 @@ private float[] extractScoresFromRankedDocs(List ra for (RankedDocsResults.RankedDoc rankedDoc : rankedDocs) { scores[rankedDoc.index()] = rankedDoc.relevanceScore(); } - return scores; } + + private static float normalizeScore(float score) { + // As some models might produce negative scores, we want to ensure that all scores will be positive + // so we will make use of the following normalization formula: + // score = max(score, 0) + min(exp(score), 1) + // this will ensure that all positive scores lie in the [1, inf) range, + // while negative values (and 0) will be shifted to (0, 1] + return Math.max(score, 0) + Math.min((float) Math.exp(score), 1); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java index 10a1bc324fd2b..165c42fdb7d1f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java @@ -142,6 +142,7 @@ protected RankDoc[] combineInnerRetrieverResults(List rankResults, b TextSimilarityRankDoc[] textSimilarityRankDocs = new TextSimilarityRankDoc[scoreDocs.length]; for (int i = 0; i < scoreDocs.length; i++) { ScoreDoc scoreDoc = scoreDocs[i]; + assert scoreDoc.score >= 0; if (explain) { textSimilarityRankDocs[i] = new TextSimilarityRankDoc( scoreDoc.doc, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java index daed03c198e0d..27a8f0e962761 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java @@ -50,4 +50,9 @@ public void testQueryPhaseShardThrowingAllShardsFail() throws Exception { public void testQueryPhaseCoordinatorThrowingAllShardsFail() throws Exception { // no-op } + + @Override + protected boolean shouldCheckScores() { + return false; + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java index f81f2965c392e..0969a902870b6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java @@ -131,11 +131,12 @@ public void testRerank() { // Verify order, rank and score of results SearchHit[] hits = response.getHits().getHits(); assertEquals(5, hits.length); - assertHitHasRankScoreAndText(hits[0], 1, 4.0f, "4"); - assertHitHasRankScoreAndText(hits[1], 2, 3.0f, "3"); - assertHitHasRankScoreAndText(hits[2], 3, 2.0f, "2"); - assertHitHasRankScoreAndText(hits[3], 4, 1.0f, "1"); - assertHitHasRankScoreAndText(hits[4], 5, 0.0f, "0"); + // we add + 1 to all expected scores due to the default normalization being applied which shifts positive scores to by 1 + assertHitHasRankScoreAndText(hits[0], 1, 4.0f + 1f, "4"); + assertHitHasRankScoreAndText(hits[1], 2, 3.0f + 1f, "3"); + assertHitHasRankScoreAndText(hits[2], 3, 2.0f + 1f, "2"); + assertHitHasRankScoreAndText(hits[3], 4, 1.0f + 1f, "1"); + assertHitHasRankScoreAndText(hits[4], 5, 0.0f + 1f, "0"); } ); } @@ -150,9 +151,9 @@ public void testRerankWithMinScore() { // Verify order, rank and score of results SearchHit[] hits = response.getHits().getHits(); assertEquals(3, hits.length); - assertHitHasRankScoreAndText(hits[0], 1, 4.0f, "4"); - assertHitHasRankScoreAndText(hits[1], 2, 3.0f, "3"); - assertHitHasRankScoreAndText(hits[2], 3, 2.0f, "2"); + assertHitHasRankScoreAndText(hits[0], 1, 4.0f + 1f, "4"); + assertHitHasRankScoreAndText(hits[1], 2, 3.0f + 1f, "3"); + assertHitHasRankScoreAndText(hits[2], 3, 2.0f + 1f, "2"); } ); } diff --git a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java index 8d8ad94d608d7..da01459b057b6 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java +++ b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java @@ -20,6 +20,7 @@ public class InferenceRestIT extends ESClientYamlSuiteTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .systemProperty("tests.seed", System.getProperty("tests.seed")) .setting("xpack.security.enabled", "false") .setting("xpack.security.http.ssl.enabled", "false") .setting("xpack.license.self_generated.type", "trial") diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml index 88569daaa6070..9a6ecffe29d4d 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml @@ -89,10 +89,7 @@ setup: - length: { hits.hits: 2 } - match: { hits.hits.0._id: "doc_2" } - - close_to: { hits.hits.0._score: { value: 0.4, error: 0.001 } } - - match: { hits.hits.1._id: "doc_1" } - - close_to: { hits.hits.1._score: { value: 0.2, error: 0.001 } } --- "Simple text similarity rank retriever and filtering": @@ -123,8 +120,6 @@ setup: - length: { hits.hits: 1 } - match: { hits.hits.0._id: "doc_1" } - - close_to: { hits.hits.0._score: { value: 0.2, error: 0.001 } } - --- "Text similarity reranking fails if the inference ID does not exist": @@ -211,7 +206,6 @@ setup: - contains: { hits.hits: { _id: "doc_2" } } - contains: { hits.hits: { _id: "doc_1" } } - - close_to: { hits.hits.0._explanation.value: { value: 0.4, error: 0.000001 } } - match: {hits.hits.0._explanation.description: "/text_similarity_reranker.match.using.inference.endpoint:.\\[my-rerank-model\\].on.document.field:.\\[text\\].*/" } - match: {hits.hits.0._explanation.details.0.description: "/weight.*science.*/" } From 1fa1ba79a2f7618249a27cbbe15c8b0d83781ce3 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Tue, 28 Jan 2025 09:57:01 -0500 Subject: [PATCH 133/383] [ML] Add default Elastic Inference Service chat completion endpoint (#120847) * Starting new auth class implementation * Fixing some tests * Working tests * Refactoring * Addressing feedback and pull main --- .../inference/MinimalServiceSettings.java | 5 + .../inference/BaseMockEISAuthServerTest.java | 66 ++++++ .../inference/InferenceBaseRestTest.java | 21 +- ...etModelsWithElasticInferenceServiceIT.java | 41 ++++ .../inference/InferenceGetServicesIT.java | 54 +---- ...icInferenceServiceAuthorizationServer.java | 10 +- .../TransportPutInferenceModelAction.java | 11 + .../inference/registry/ModelRegistry.java | 10 + .../elastic/ElasticInferenceService.java | 136 ++++++++++-- .../ElasticInferenceServiceAuthorization.java | 95 ++++++-- ...lasticInferenceServiceCompletionModel.java | 2 +- .../registry/ModelRegistryTests.java | 14 ++ .../elastic/ElasticInferenceServiceTests.java | 174 ++++++++++++++- ...renceServiceAuthorizationHandlerTests.java | 21 +- ...ticInferenceServiceAuthorizationTests.java | 209 ++++++++++++++++-- 15 files changed, 732 insertions(+), 137 deletions(-) create mode 100644 x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java create mode 100644 x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java diff --git a/server/src/main/java/org/elasticsearch/inference/MinimalServiceSettings.java b/server/src/main/java/org/elasticsearch/inference/MinimalServiceSettings.java index be380d74093af..4c81296725809 100644 --- a/server/src/main/java/org/elasticsearch/inference/MinimalServiceSettings.java +++ b/server/src/main/java/org/elasticsearch/inference/MinimalServiceSettings.java @@ -21,6 +21,7 @@ import java.util.Objects; import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; +import static org.elasticsearch.inference.TaskType.CHAT_COMPLETION; import static org.elasticsearch.inference.TaskType.COMPLETION; import static org.elasticsearch.inference.TaskType.RERANK; import static org.elasticsearch.inference.TaskType.SPARSE_EMBEDDING; @@ -97,6 +98,10 @@ public static MinimalServiceSettings completion() { return new MinimalServiceSettings(COMPLETION, null, null, null); } + public static MinimalServiceSettings chatCompletion() { + return new MinimalServiceSettings(CHAT_COMPLETION, null, null, null); + } + public MinimalServiceSettings(Model model) { this( model.getTaskType(), diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java new file mode 100644 index 0000000000000..230b7ff576296 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file has been contributed to by a Generative AI + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +public class BaseMockEISAuthServerTest extends ESRestTestCase { + + // The reason we're retrying is there's a race condition between the node retrieving the + // authorization response and running the test. Retrieving the authorization should be very fast since + // we're hosting a local mock server but it's possible it could respond slower. So in the even of a test failure + // we'll automatically retry after waiting a second. + @Rule + public RetryRule retry = new RetryRule(3, TimeValue.timeValueSeconds(1)); + + private static final MockElasticInferenceServiceAuthorizationServer mockEISServer = MockElasticInferenceServiceAuthorizationServer + .enabledWithRainbowSprinklesAndElser(); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "true") + // Adding both settings unless one feature flag is disabled in a particular environment + .setting("xpack.inference.elastic.url", mockEISServer::getUrl) + // TODO remove this once we've removed DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG and EIS_GATEWAY_URL + .setting("xpack.inference.eis.gateway.url", mockEISServer::getUrl) + // This plugin is located in the inference/qa/test-service-plugin package, look for TestInferenceServicePlugin + .plugin("inference-service-test") + .user("x_pack_rest_user", "x-pack-test-password") + .feature(FeatureFlag.INFERENCE_UNIFIED_API_ENABLED) + .build(); + + // The reason we're doing this is to make sure the mock server is initialized first so we can get the address before communicating + // it to the cluster as a setting. + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(mockEISServer).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } +} diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index 49b2f5b041b9e..5174b5bbb8cb4 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -171,20 +171,20 @@ static String mockDenseServiceModelConfig() { """; } - protected void deleteModel(String modelId) throws IOException { + static void deleteModel(String modelId) throws IOException { var request = new Request("DELETE", "_inference/" + modelId); var response = client().performRequest(request); assertStatusOkOrCreated(response); } - protected Response deleteModel(String modelId, String queryParams) throws IOException { + static Response deleteModel(String modelId, String queryParams) throws IOException { var request = new Request("DELETE", "_inference/" + modelId + "?" + queryParams); var response = client().performRequest(request); assertStatusOkOrCreated(response); return response; } - protected void deleteModel(String modelId, TaskType taskType) throws IOException { + static void deleteModel(String modelId, TaskType taskType) throws IOException { var request = new Request("DELETE", Strings.format("_inference/%s/%s", taskType, modelId)); var response = client().performRequest(request); assertStatusOkOrCreated(response); @@ -229,12 +229,12 @@ protected void putSemanticText(String endpointId, String searchEndpointId, Strin assertStatusOkOrCreated(response); } - protected Map putModel(String modelId, String modelConfig, TaskType taskType) throws IOException { + static Map putModel(String modelId, String modelConfig, TaskType taskType) throws IOException { String endpoint = Strings.format("_inference/%s/%s?error_trace", taskType, modelId); return putRequest(endpoint, modelConfig); } - protected Map updateEndpoint(String inferenceID, String modelConfig, TaskType taskType) throws IOException { + static Map updateEndpoint(String inferenceID, String modelConfig, TaskType taskType) throws IOException { String endpoint = Strings.format("_inference/%s/%s/_update", taskType, inferenceID); return putRequest(endpoint, modelConfig); } @@ -265,12 +265,12 @@ protected void deletePipeline(String pipelineId) throws IOException { /** * Task type should be in modelConfig */ - protected Map putModel(String modelId, String modelConfig) throws IOException { + static Map putModel(String modelId, String modelConfig) throws IOException { String endpoint = Strings.format("_inference/%s", modelId); return putRequest(endpoint, modelConfig); } - Map putRequest(String endpoint, String body) throws IOException { + static Map putRequest(String endpoint, String body) throws IOException { var request = new Request("PUT", endpoint); request.setJsonEntity(body); var response = client().performRequest(request); @@ -318,18 +318,17 @@ protected Map getModel(String modelId) throws IOException { } @SuppressWarnings("unchecked") - protected List> getModels(String modelId, TaskType taskType) throws IOException { + static List> getModels(String modelId, TaskType taskType) throws IOException { var endpoint = Strings.format("_inference/%s/%s", taskType, modelId); return (List>) getInternalAsMap(endpoint).get("endpoints"); } @SuppressWarnings("unchecked") - protected List> getAllModels() throws IOException { - var endpoint = Strings.format("_inference/_all"); + static List> getAllModels() throws IOException { return (List>) getInternalAsMap("_inference/_all").get("endpoints"); } - private Map getInternalAsMap(String endpoint) throws IOException { + private static Map getInternalAsMap(String endpoint) throws IOException { var request = new Request("GET", endpoint); var response = client().performRequest(request); assertStatusOkOrCreated(response); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java new file mode 100644 index 0000000000000..76483a5f62fec --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file has been contributed to by a Generative AI + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature; + +import java.io.IOException; + +import static org.elasticsearch.xpack.inference.InferenceBaseRestTest.getAllModels; +import static org.elasticsearch.xpack.inference.InferenceBaseRestTest.getModels; +import static org.hamcrest.Matchers.hasSize; + +public class InferenceGetModelsWithElasticInferenceServiceIT extends BaseMockEISAuthServerTest { + + public void testGetDefaultEndpoints() throws IOException { + var allModels = getAllModels(); + var chatCompletionModels = getModels("_all", TaskType.CHAT_COMPLETION); + + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { + assertThat(allModels, hasSize(4)); + assertThat(chatCompletionModels, hasSize(1)); + + for (var model : chatCompletionModels) { + assertEquals("chat_completion", model.get("task_type")); + } + } else { + assertThat(allModels, hasSize(3)); + assertThat(chatCompletionModels, hasSize(0)); + } + + } +} diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java index b448acd5f4a74..856fdeb6287e9 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java @@ -11,20 +11,8 @@ import org.elasticsearch.client.Request; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.FeatureFlag; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.rules.RuleChain; -import org.junit.rules.TestRule; import java.io.IOException; import java.util.ArrayList; @@ -35,47 +23,7 @@ import static org.elasticsearch.xpack.inference.InferenceBaseRestTest.assertStatusOkOrCreated; import static org.hamcrest.Matchers.equalTo; -public class InferenceGetServicesIT extends ESRestTestCase { - - // The reason we're retrying is there's a race condition between the node retrieving the - // authorization response and running the test. Retrieving the authorization should be very fast since - // we're hosting a local mock server but it's possible it could respond slower. So in the even of a test failure - // we'll automatically retry after waiting a second. - @Rule - public RetryRule retry = new RetryRule(3, TimeValue.timeValueSeconds(1)); - - private static final MockElasticInferenceServiceAuthorizationServer mockEISServer = MockElasticInferenceServiceAuthorizationServer - .enabledWithSparseEmbeddingsAndChatCompletion(); - - private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .setting("xpack.license.self_generated.type", "trial") - .setting("xpack.security.enabled", "true") - // Adding both settings unless one feature flag is disabled in a particular environment - .setting("xpack.inference.elastic.url", mockEISServer::getUrl) - // TODO remove this once we've removed DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG and EIS_GATEWAY_URL - .setting("xpack.inference.eis.gateway.url", mockEISServer::getUrl) - // This plugin is located in the inference/qa/test-service-plugin package, look for TestInferenceServicePlugin - .plugin("inference-service-test") - .user("x_pack_rest_user", "x-pack-test-password") - .feature(FeatureFlag.INFERENCE_UNIFIED_API_ENABLED) - .build(); - - // The reason we're doing this is to make sure the mock server is initialized first so we can get the address before communicating - // it to the cluster as a setting. - @ClassRule - public static TestRule ruleChain = RuleChain.outerRule(mockEISServer).around(cluster); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - @Override - protected Settings restClientSettings() { - String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); - } +public class InferenceGetServicesIT extends BaseMockEISAuthServerTest { @SuppressWarnings("unchecked") public void testGetServicesWithoutTaskType() throws IOException { diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockElasticInferenceServiceAuthorizationServer.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockElasticInferenceServiceAuthorizationServer.java index 8960a7e1b0258..3ea011c1317cc 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockElasticInferenceServiceAuthorizationServer.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockElasticInferenceServiceAuthorizationServer.java @@ -23,15 +23,19 @@ public class MockElasticInferenceServiceAuthorizationServer implements TestRule private static final Logger logger = LogManager.getLogger(MockElasticInferenceServiceAuthorizationServer.class); private final MockWebServer webServer = new MockWebServer(); - public static MockElasticInferenceServiceAuthorizationServer enabledWithSparseEmbeddingsAndChatCompletion() { + public static MockElasticInferenceServiceAuthorizationServer enabledWithRainbowSprinklesAndElser() { var server = new MockElasticInferenceServiceAuthorizationServer(); String responseJson = """ { "models": [ { - "model_name": "model-a", - "task_types": ["embed/text/sparse", "chat"] + "model_name": "rainbow-sprinkles", + "task_types": ["chat"] + }, + { + "model_name": "elser-v2", + "task_types": ["embed/text/sparse"] } ] } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index 6168edeca4820..73af12dacfadf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -108,6 +108,17 @@ protected void masterOperation( return; } + if (modelRegistry.containsDefaultConfigId(request.getInferenceEntityId())) { + listener.onFailure( + new ElasticsearchStatusException( + "[{}] is a reserved inference ID. Cannot create a new inference endpoint with a reserved ID.", + RestStatus.BAD_REQUEST, + request.getInferenceEntityId() + ) + ); + return; + } + var requestAsMap = requestToMap(request); var resolvedTaskType = ServiceUtils.resolveTaskType(request.getTaskType(), (String) requestAsMap.remove(TaskType.NAME)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 1369ebf7dd87b..a9642a685aec9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -114,6 +114,16 @@ public ModelRegistry(Client client) { defaultConfigIds = new HashMap<>(); } + /** + * Returns true if the provided inference entity id is the same as one of the default + * endpoints ids. + * @param inferenceEntityId the id to search for + * @return true if we find a match and false if not + */ + public boolean containsDefaultConfigId(String inferenceEntityId) { + return defaultConfigIds.containsKey(inferenceEntityId); + } + /** * Set the default inference ids provided by the services * @param defaultConfigId The default diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index f96d3cb325b09..8b8723b54d683 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference.services.elastic; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; @@ -16,9 +18,12 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInference; +import org.elasticsearch.inference.EmptySecretSettings; +import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -47,11 +52,14 @@ import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorization; import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticInferenceServiceAuthorizationHandler; import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionModel; +import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionServiceSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.elasticsearch.xpack.inference.telemetry.TraceContext; +import java.util.ArrayList; import java.util.EnumSet; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -77,8 +85,11 @@ public class ElasticInferenceService extends SenderService { public static final String NAME = "elastic"; public static final String ELASTIC_INFERENCE_SERVICE_IDENTIFIER = "Elastic Inference Service"; + private static final Logger logger = LogManager.getLogger(ElasticInferenceService.class); private static final EnumSet IMPLEMENTED_TASK_TYPES = EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION); private static final String SERVICE_NAME = "Elastic"; + static final String DEFAULT_CHAT_COMPLETION_MODEL_ID_V1 = "rainbow-sprinkles"; + static final String DEFAULT_CHAT_COMPLETION_ENDPOINT_ID_V1 = Strings.format(".%s-elastic", DEFAULT_CHAT_COMPLETION_MODEL_ID_V1); /** * The task types that the {@link InferenceAction.Request} can accept. @@ -87,10 +98,13 @@ public class ElasticInferenceService extends SenderService { private final ElasticInferenceServiceComponents elasticInferenceServiceComponents; private Configuration configuration; - private final AtomicReference> enabledTaskTypesRef = new AtomicReference<>(EnumSet.noneOf(TaskType.class)); + private final AtomicReference authRef = new AtomicReference<>(AuthorizedContent.empty()); private final ModelRegistry modelRegistry; private final ElasticInferenceServiceAuthorizationHandler authorizationHandler; private final CountDownLatch authorizationCompletedLatch = new CountDownLatch(1); + // model ids to model information, used for the default config methods to return the list of models and default + // configs + private final Map defaultModelsConfigs; public ElasticInferenceService( HttpRequestSender.Factory factory, @@ -104,15 +118,48 @@ public ElasticInferenceService( this.modelRegistry = Objects.requireNonNull(modelRegistry); this.authorizationHandler = Objects.requireNonNull(authorizationHandler); - configuration = new Configuration(enabledTaskTypesRef.get()); + configuration = new Configuration(authRef.get().taskTypesAndModels.getAuthorizedTaskTypes()); + defaultModelsConfigs = initDefaultEndpoints(elasticInferenceServiceComponents); getAuthorization(); } + private static Map initDefaultEndpoints( + ElasticInferenceServiceComponents elasticInferenceServiceComponents + ) { + return Map.of( + DEFAULT_CHAT_COMPLETION_MODEL_ID_V1, + new DefaultModelConfig( + new ElasticInferenceServiceCompletionModel( + DEFAULT_CHAT_COMPLETION_ENDPOINT_ID_V1, + TaskType.CHAT_COMPLETION, + NAME, + new ElasticInferenceServiceCompletionServiceSettings(DEFAULT_CHAT_COMPLETION_MODEL_ID_V1, null), + EmptyTaskSettings.INSTANCE, + EmptySecretSettings.INSTANCE, + elasticInferenceServiceComponents + ), + MinimalServiceSettings.chatCompletion() + ) + ); + } + + private record DefaultModelConfig(Model model, MinimalServiceSettings settings) {} + + private record AuthorizedContent( + ElasticInferenceServiceAuthorization taskTypesAndModels, + List configIds, + List defaultModelConfigs + ) { + static AuthorizedContent empty() { + return new AuthorizedContent(ElasticInferenceServiceAuthorization.newDisabledService(), List.of(), List.of()); + } + } + private void getAuthorization() { try { ActionListener listener = ActionListener.wrap(result -> { - setEnabledTaskTypes(result); + setAuthorizedContent(result); authorizationCompletedLatch.countDown(); }, e -> { // we don't need to do anything if there was a failure, everything is disabled by default @@ -126,17 +173,63 @@ private void getAuthorization() { } } - private synchronized void setEnabledTaskTypes(ElasticInferenceServiceAuthorization auth) { - enabledTaskTypesRef.set(filterTaskTypesByAuthorization(auth)); - configuration = new Configuration(enabledTaskTypesRef.get()); + private synchronized void setAuthorizedContent(ElasticInferenceServiceAuthorization auth) { + var authorizedTaskTypesAndModels = auth.newLimitedToTaskTypes(EnumSet.copyOf(IMPLEMENTED_TASK_TYPES)); + + // recalculate which default config ids and models are authorized now + var authorizedDefaultConfigIds = getAuthorizedDefaultConfigIds(auth); + var authorizedDefaultModelObjects = getAuthorizedDefaultModelsObjects(auth); + authRef.set(new AuthorizedContent(authorizedTaskTypesAndModels, authorizedDefaultConfigIds, authorizedDefaultModelObjects)); + + configuration = new Configuration(authRef.get().taskTypesAndModels.getAuthorizedTaskTypes()); defaultConfigIds().forEach(modelRegistry::addDefaultIds); } - private static EnumSet filterTaskTypesByAuthorization(ElasticInferenceServiceAuthorization auth) { - var implementedTaskTypes = EnumSet.copyOf(IMPLEMENTED_TASK_TYPES); - implementedTaskTypes.retainAll(auth.enabledTaskTypes()); - return implementedTaskTypes; + private List getAuthorizedDefaultConfigIds(ElasticInferenceServiceAuthorization auth) { + var authorizedDefaultModelIds = getAuthorizedDefaultModelIds(auth); + + var authorizedConfigIds = new ArrayList(); + for (var id : authorizedDefaultModelIds) { + var modelConfig = defaultModelsConfigs.get(id); + if (modelConfig != null) { + if (auth.getAuthorizedTaskTypes().contains(modelConfig.model.getTaskType()) == false) { + logger.warn( + Strings.format( + "The authorization response included the default model: %s, " + + "but did not authorize the assumed task type of the model: %s. Enabling model.", + id, + modelConfig.model.getTaskType() + ) + ); + } + authorizedConfigIds.add(new DefaultConfigId(modelConfig.model.getInferenceEntityId(), modelConfig.settings(), this)); + } + } + + return authorizedConfigIds; + } + + private Set getAuthorizedDefaultModelIds(ElasticInferenceServiceAuthorization auth) { + var authorizedModels = auth.getAuthorizedModelIds(); + var authorizedDefaultModelIds = new HashSet<>(defaultModelsConfigs.keySet()); + authorizedDefaultModelIds.retainAll(authorizedModels); + + return authorizedDefaultModelIds; + } + + private List getAuthorizedDefaultModelsObjects(ElasticInferenceServiceAuthorization auth) { + var authorizedDefaultModelIds = getAuthorizedDefaultModelIds(auth); + + var authorizedModels = new ArrayList(); + for (var id : authorizedDefaultModelIds) { + var modelConfig = defaultModelsConfigs.get(id); + if (modelConfig != null) { + authorizedModels.add(modelConfig); + } + } + + return authorizedModels; } // Default for testing @@ -152,20 +245,25 @@ void waitForAuthorizationToComplete(TimeValue waitTime) { @Override public synchronized Set supportedStreamingTasks() { - var enabledStreamingTaskTypes = EnumSet.of(TaskType.CHAT_COMPLETION); - enabledStreamingTaskTypes.retainAll(enabledTaskTypesRef.get()); + var authorizedStreamingTaskTypes = EnumSet.of(TaskType.CHAT_COMPLETION); + authorizedStreamingTaskTypes.retainAll(authRef.get().taskTypesAndModels.getAuthorizedTaskTypes()); - if (enabledStreamingTaskTypes.isEmpty() == false) { - enabledStreamingTaskTypes.add(TaskType.ANY); + if (authorizedStreamingTaskTypes.isEmpty() == false) { + authorizedStreamingTaskTypes.add(TaskType.ANY); } - return enabledStreamingTaskTypes; + return authorizedStreamingTaskTypes; } @Override public synchronized List defaultConfigIds() { - // TODO once we have the enabledTaskTypes figure out which default endpoints we should expose - return List.of(); + return authRef.get().configIds; + } + + @Override + public synchronized void defaultConfigs(ActionListener> defaultsListener) { + var models = authRef.get().defaultModelConfigs.stream().map(config -> config.model).toList(); + defaultsListener.onResponse(models); } @Override @@ -298,12 +396,12 @@ public synchronized InferenceServiceConfiguration getConfiguration() { @Override public synchronized EnumSet supportedTaskTypes() { - return enabledTaskTypesRef.get(); + return authRef.get().taskTypesAndModels.getAuthorizedTaskTypes(); } @Override public synchronized boolean hideFromConfigurationApi() { - return enabledTaskTypesRef.get().isEmpty(); + return authRef.get().taskTypesAndModels.isAuthorized() == false; } private static ElasticInferenceServiceModel createModel( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorization.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorization.java index eac64021ac85a..76721bb6dcd7b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorization.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorization.java @@ -12,16 +12,20 @@ import java.util.EnumSet; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; /** - * Provides a structure for governing which models (if any) a cluster has access to according to the upstream Elastic Inference Service. - * @param enabledModels a mapping of model ids to a set of {@link TaskType} to indicate which models are available and for which task types + * This is a helper class for managing the response from {@link ElasticInferenceServiceAuthorizationHandler}. */ -public record ElasticInferenceServiceAuthorization(Map> enabledModels) { +public class ElasticInferenceServiceAuthorization { + + private final Map> taskTypeToModels; + private final EnumSet authorizedTaskTypes; + private final Set authorizedModelIds; /** * Converts an authorization response from Elastic Inference Service into the {@link ElasticInferenceServiceAuthorization} format. @@ -30,45 +34,98 @@ public record ElasticInferenceServiceAuthorization(Map * @return a new {@link ElasticInferenceServiceAuthorization} */ public static ElasticInferenceServiceAuthorization of(ElasticInferenceServiceAuthorizationResponseEntity responseEntity) { - var enabledModels = new HashMap>(); + var taskTypeToModelsMap = new HashMap>(); + var enabledTaskTypesSet = EnumSet.noneOf(TaskType.class); + var enabledModelsSet = new HashSet(); for (var model : responseEntity.getAuthorizedModels()) { // if there are no task types we'll ignore the model because it's likely we didn't understand // the task type and don't support it anyway if (model.taskTypes().isEmpty() == false) { - enabledModels.put(model.modelName(), model.taskTypes()); + for (var taskType : model.taskTypes()) { + taskTypeToModelsMap.merge(taskType, Set.of(model.modelName()), (existingModelIds, newModelIds) -> { + var combinedNames = new HashSet<>(existingModelIds); + combinedNames.addAll(newModelIds); + return combinedNames; + }); + enabledTaskTypesSet.add(taskType); + } + enabledModelsSet.add(model.modelName()); } } - return new ElasticInferenceServiceAuthorization(enabledModels); + return new ElasticInferenceServiceAuthorization(taskTypeToModelsMap, enabledModelsSet, enabledTaskTypesSet); } /** * Returns an object indicating that the cluster has no access to Elastic Inference Service. */ public static ElasticInferenceServiceAuthorization newDisabledService() { - return new ElasticInferenceServiceAuthorization(); + return new ElasticInferenceServiceAuthorization(Map.of(), Set.of(), EnumSet.noneOf(TaskType.class)); + } + + private ElasticInferenceServiceAuthorization( + Map> taskTypeToModels, + Set authorizedModelIds, + EnumSet authorizedTaskTypes + ) { + this.taskTypeToModels = Objects.requireNonNull(taskTypeToModels); + this.authorizedModelIds = Objects.requireNonNull(authorizedModelIds); + this.authorizedTaskTypes = Objects.requireNonNull(authorizedTaskTypes); + } + + /** + * Returns true if at least one task type and model is authorized. + * @return true if this cluster is authorized for at least one model and task type. + */ + public boolean isAuthorized() { + return authorizedModelIds.isEmpty() == false && taskTypeToModels.isEmpty() == false && authorizedTaskTypes.isEmpty() == false; } - public ElasticInferenceServiceAuthorization { - Objects.requireNonNull(enabledModels); + public Set getAuthorizedModelIds() { + return Set.copyOf(authorizedModelIds); + } - for (var taskTypes : enabledModels.values()) { - if (taskTypes.isEmpty()) { - throw new IllegalArgumentException("Authorization task types must not be empty"); + public EnumSet getAuthorizedTaskTypes() { + return EnumSet.copyOf(authorizedTaskTypes); + } + + /** + * Returns a new {@link ElasticInferenceServiceAuthorization} object retaining only the specified task types + * and applicable models that leverage those task types. Any task types not specified in the passed in set will be + * excluded from the returned object. This is essentially an intersection. + * @param taskTypes the task types to retain in the newly created object + * @return a new object containing models and task types limited to the specified set. + */ + public ElasticInferenceServiceAuthorization newLimitedToTaskTypes(EnumSet taskTypes) { + var newTaskTypeToModels = new HashMap>(); + var taskTypesThatHaveModels = EnumSet.noneOf(TaskType.class); + + for (var taskType : taskTypes) { + var models = taskTypeToModels.get(taskType); + if (models != null) { + newTaskTypeToModels.put(taskType, models); + // we only want task types that correspond to actual models to ensure we're only enabling valid task types + taskTypesThatHaveModels.add(taskType); } } - } - private ElasticInferenceServiceAuthorization() { - this(Map.of()); + Set newEnabledModels = newTaskTypeToModels.values().stream().flatMap(Set::stream).collect(Collectors.toSet()); + + return new ElasticInferenceServiceAuthorization(newTaskTypeToModels, newEnabledModels, taskTypesThatHaveModels); } - public boolean isEnabled() { - return enabledModels.isEmpty() == false; + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) return false; + ElasticInferenceServiceAuthorization that = (ElasticInferenceServiceAuthorization) o; + return Objects.equals(taskTypeToModels, that.taskTypeToModels) + && Objects.equals(authorizedTaskTypes, that.authorizedTaskTypes) + && Objects.equals(authorizedModelIds, that.authorizedModelIds); } - public EnumSet enabledTaskTypes() { - return enabledModels.values().stream().flatMap(Set::stream).collect(Collectors.toCollection(() -> EnumSet.noneOf(TaskType.class))); + @Override + public int hashCode() { + return Objects.hash(taskTypeToModels, authorizedTaskTypes, authorizedModelIds); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionModel.java index b26f80efb1930..5125ade21339d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionModel.java @@ -74,7 +74,7 @@ public ElasticInferenceServiceCompletionModel( } - ElasticInferenceServiceCompletionModel( + public ElasticInferenceServiceCompletionModel( String inferenceEntityId, TaskType taskType, String service, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index 1e47a9b8d5ab6..162bcc8f09713 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -310,6 +310,20 @@ public void testIdMatchedDefault() { assertFalse(matched.isPresent()); } + public void testContainsDefaultConfigId() { + var client = mockClient(); + var registry = new ModelRegistry(client); + + registry.addDefaultIds( + new InferenceService.DefaultConfigId("foo", MinimalServiceSettings.sparseEmbedding(), mock(InferenceService.class)) + ); + registry.addDefaultIds( + new InferenceService.DefaultConfigId("bar", MinimalServiceSettings.sparseEmbedding(), mock(InferenceService.class)) + ); + assertTrue(registry.containsDefaultConfigId("foo")); + assertFalse(registry.containsDefaultConfigId("baz")); + } + public void testTaskTypeMatchedDefaults() { var defaultConfigIds = new ArrayList(); defaultConfigIds.add( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index 334119f999e4c..5a3a9a29d7564 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -20,9 +20,11 @@ import org.elasticsearch.inference.ChunkedInference; import org.elasticsearch.inference.EmptySecretSettings; import org.elasticsearch.inference.EmptyTaskSettings; +import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; @@ -38,6 +40,7 @@ import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.response.elastic.ElasticInferenceServiceAuthorizationResponseEntity; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; @@ -554,7 +557,16 @@ public void testHideFromConfigurationApi_ReturnsTrue_WithNoAvailableModels() thr public void testHideFromConfigurationApi_ReturnsTrue_WithModelTaskTypesThatAreNotImplemented() throws Exception { try ( var service = createServiceWithMockSender( - new ElasticInferenceServiceAuthorization(Map.of("model-1", EnumSet.of(TaskType.TEXT_EMBEDDING))) + ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-1", + EnumSet.of(TaskType.TEXT_EMBEDDING) + ) + ) + ) + ) ) ) { assertTrue(service.hideFromConfigurationApi()); @@ -564,7 +576,16 @@ public void testHideFromConfigurationApi_ReturnsTrue_WithModelTaskTypesThatAreNo public void testHideFromConfigurationApi_ReturnsFalse_WithAvailableModels() throws Exception { try ( var service = createServiceWithMockSender( - new ElasticInferenceServiceAuthorization(Map.of("model-1", EnumSet.of(TaskType.CHAT_COMPLETION))) + ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-1", + EnumSet.of(TaskType.CHAT_COMPLETION) + ) + ) + ) + ) ) ) { assertFalse(service.hideFromConfigurationApi()); @@ -574,7 +595,16 @@ public void testHideFromConfigurationApi_ReturnsFalse_WithAvailableModels() thro public void testGetConfiguration() throws Exception { try ( var service = createServiceWithMockSender( - new ElasticInferenceServiceAuthorization(Map.of("model-1", EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION))) + ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-1", + EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION) + ) + ) + ) + ) ) ) { String content = XContentHelper.stripWhitespace(""" @@ -685,7 +715,16 @@ public void testGetConfiguration_WithoutSupportedTaskTypes_WhenModelsReturnTaskO try ( var service = createServiceWithMockSender( // this service doesn't yet support text embedding so we should still have no task types - new ElasticInferenceServiceAuthorization(Map.of("model-1", EnumSet.of(TaskType.TEXT_EMBEDDING))) + ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-1", + EnumSet.of(TaskType.TEXT_EMBEDDING) + ) + ) + ) + ) ) ) { String content = XContentHelper.stripWhitespace(""" @@ -758,6 +797,60 @@ public void testSupportedStreamingTasks_ReturnsChatCompletion_WhenAuthRespondsWi service.waitForAuthorizationToComplete(TIMEOUT); assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.CHAT_COMPLETION, TaskType.ANY))); assertTrue(service.defaultConfigIds().isEmpty()); + + PlainActionFuture> listener = new PlainActionFuture<>(); + service.defaultConfigs(listener); + assertTrue(listener.actionGet(TIMEOUT).isEmpty()); + } + } + + public void testSupportedTaskTypes_Returns_TheAuthorizedTaskTypes_IgnoresUnimplementedTaskTypes() throws Exception { + String responseJson = """ + { + "models": [ + { + "model_name": "model-a", + "task_types": ["embed/text/sparse"] + }, + { + "model_name": "model-b", + "task_types": ["embed"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = createServiceWithAuthHandler(senderFactory, getUrl(webServer))) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.SPARSE_EMBEDDING))); + } + } + + public void testSupportedTaskTypes_Returns_TheAuthorizedTaskTypes() throws Exception { + String responseJson = """ + { + "models": [ + { + "model_name": "model-a", + "task_types": ["embed/text/sparse"] + }, + { + "model_name": "model-b", + "task_types": ["chat"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = createServiceWithAuthHandler(senderFactory, getUrl(webServer))) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION))); } } @@ -780,6 +873,79 @@ public void testSupportedStreamingTasks_ReturnsEmpty_WhenAuthRespondsWithoutChat service.waitForAuthorizationToComplete(TIMEOUT); assertThat(service.supportedStreamingTasks(), is(EnumSet.noneOf(TaskType.class))); assertTrue(service.defaultConfigIds().isEmpty()); + assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.SPARSE_EMBEDDING))); + + PlainActionFuture> listener = new PlainActionFuture<>(); + service.defaultConfigs(listener); + assertTrue(listener.actionGet(TIMEOUT).isEmpty()); + } + } + + public void testDefaultConfigs_Returns_DefaultChatCompletion_V1_WhenTaskTypeIsIncorrect() throws Exception { + String responseJson = """ + { + "models": [ + { + "model_name": "rainbow-sprinkles", + "task_types": ["embed/text/sparse"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = createServiceWithAuthHandler(senderFactory, getUrl(webServer))) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedStreamingTasks(), is(EnumSet.noneOf(TaskType.class))); + assertThat( + service.defaultConfigIds(), + is( + List.of( + new InferenceService.DefaultConfigId(".rainbow-sprinkles-elastic", MinimalServiceSettings.chatCompletion(), service) + ) + ) + ); + assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.SPARSE_EMBEDDING))); + + PlainActionFuture> listener = new PlainActionFuture<>(); + service.defaultConfigs(listener); + assertThat(listener.actionGet(TIMEOUT).get(0).getConfigurations().getInferenceEntityId(), is(".rainbow-sprinkles-elastic")); + } + } + + public void testDefaultConfigs_Returns_DefaultChatCompletion_V1_WhenTaskTypeIsCorrect() throws Exception { + String responseJson = """ + { + "models": [ + { + "model_name": "rainbow-sprinkles", + "task_types": ["chat"] + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = createServiceWithAuthHandler(senderFactory, getUrl(webServer))) { + service.waitForAuthorizationToComplete(TIMEOUT); + assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.CHAT_COMPLETION, TaskType.ANY))); + assertThat( + service.defaultConfigIds(), + is( + List.of( + new InferenceService.DefaultConfigId(".rainbow-sprinkles-elastic", MinimalServiceSettings.chatCompletion(), service) + ) + ) + ); + assertThat(service.supportedTaskTypes(), is(EnumSet.of(TaskType.CHAT_COMPLETION))); + + PlainActionFuture> listener = new PlainActionFuture<>(); + service.defaultConfigs(listener); + assertThat(listener.actionGet(TIMEOUT).get(0).getConfigurations().getInferenceEntityId(), is(".rainbow-sprinkles-elastic")); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandlerTests.java index 43cac4c54aa3c..a819bf1b4a513 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandlerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationHandlerTests.java @@ -31,6 +31,7 @@ import java.io.IOException; import java.util.EnumSet; import java.util.List; +import java.util.Set; import java.util.concurrent.TimeUnit; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; @@ -77,8 +78,9 @@ public void testDoesNotAttempt_ToRetrieveAuthorization_IfBaseUrlIsNull() throws authHandler.getAuthorization(listener, sender); var authResponse = listener.actionGet(TIMEOUT); - assertTrue(authResponse.enabledTaskTypes().isEmpty()); - assertFalse(authResponse.isEnabled()); + assertTrue(authResponse.getAuthorizedTaskTypes().isEmpty()); + assertTrue(authResponse.getAuthorizedModelIds().isEmpty()); + assertFalse(authResponse.isAuthorized()); var loggerArgsCaptor = ArgumentCaptor.forClass(String.class); verify(logger).warn(loggerArgsCaptor.capture()); @@ -97,8 +99,9 @@ public void testDoesNotAttempt_ToRetrieveAuthorization_IfBaseUrlIsEmpty() throws authHandler.getAuthorization(listener, sender); var authResponse = listener.actionGet(TIMEOUT); - assertTrue(authResponse.enabledTaskTypes().isEmpty()); - assertFalse(authResponse.isEnabled()); + assertTrue(authResponse.getAuthorizedTaskTypes().isEmpty()); + assertTrue(authResponse.getAuthorizedModelIds().isEmpty()); + assertFalse(authResponse.isAuthorized()); var loggerArgsCaptor = ArgumentCaptor.forClass(String.class); verify(logger).warn(loggerArgsCaptor.capture()); @@ -131,8 +134,9 @@ public void testGetAuthorization_FailsWhenAnInvalidFieldIsFound() throws IOExcep authHandler.getAuthorization(listener, sender); var authResponse = listener.actionGet(TIMEOUT); - assertTrue(authResponse.enabledTaskTypes().isEmpty()); - assertFalse(authResponse.isEnabled()); + assertTrue(authResponse.getAuthorizedTaskTypes().isEmpty()); + assertTrue(authResponse.getAuthorizedModelIds().isEmpty()); + assertFalse(authResponse.isAuthorized()); var loggerArgsCaptor = ArgumentCaptor.forClass(String.class); verify(logger).warn(loggerArgsCaptor.capture()); @@ -181,8 +185,9 @@ public void testGetAuthorization_ReturnsAValidResponse() throws IOException { authHandler.getAuthorization(listener, sender); var authResponse = listener.actionGet(TIMEOUT); - assertThat(authResponse.enabledTaskTypes(), is(EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION))); - assertTrue(authResponse.isEnabled()); + assertThat(authResponse.getAuthorizedTaskTypes(), is(EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.CHAT_COMPLETION))); + assertThat(authResponse.getAuthorizedModelIds(), is(Set.of("model-a"))); + assertTrue(authResponse.isAuthorized()); var loggerArgsCaptor = ArgumentCaptor.forClass(String.class); verify(logger, times(1)).debug(loggerArgsCaptor.capture()); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationTests.java index 20b52cb7bb314..559de47232a7b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/authorization/ElasticInferenceServiceAuthorizationTests.java @@ -13,17 +13,23 @@ import java.util.EnumSet; import java.util.List; -import java.util.Map; +import java.util.Set; import static org.hamcrest.Matchers.is; public class ElasticInferenceServiceAuthorizationTests extends ESTestCase { public static ElasticInferenceServiceAuthorization createEnabledAuth() { - return new ElasticInferenceServiceAuthorization(Map.of("model-1", EnumSet.of(TaskType.TEXT_EMBEDDING))); + return ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel("model-1", EnumSet.of(TaskType.TEXT_EMBEDDING)) + ) + ) + ); } - public void testIsEnabled_ReturnsFalse_WithEmptyMap() { - assertFalse(ElasticInferenceServiceAuthorization.newDisabledService().isEnabled()); + public void testIsAuthorized_ReturnsFalse_WithEmptyMap() { + assertFalse(ElasticInferenceServiceAuthorization.newDisabledService().isAuthorized()); } public void testExcludes_ModelsWithoutTaskTypes() { @@ -31,31 +37,196 @@ public void testExcludes_ModelsWithoutTaskTypes() { List.of(new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel("model-1", EnumSet.noneOf(TaskType.class))) ); var auth = ElasticInferenceServiceAuthorization.of(response); - assertTrue(auth.enabledTaskTypes().isEmpty()); - assertFalse(auth.isEnabled()); + assertTrue(auth.getAuthorizedTaskTypes().isEmpty()); + assertFalse(auth.isAuthorized()); } - public void testConstructor_WithModelWithoutTaskTypes_ThrowsException() { - expectThrows( - IllegalArgumentException.class, - () -> new ElasticInferenceServiceAuthorization(Map.of("model-1", EnumSet.noneOf(TaskType.class))) + public void testEnabledTaskTypes_MergesFromSeparateModels() { + var auth = ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel("model-1", EnumSet.of(TaskType.TEXT_EMBEDDING)), + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel("model-2", EnumSet.of(TaskType.SPARSE_EMBEDDING)) + ) + ) ); + assertThat(auth.getAuthorizedTaskTypes(), is(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING))); + assertThat(auth.getAuthorizedModelIds(), is(Set.of("model-1", "model-2"))); } - public void testEnabledTaskTypes_MergesFromSeparateModels() { + public void testEnabledTaskTypes_FromSingleEntry() { + var auth = ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-1", + EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING) + ) + ) + ) + ); + + assertThat(auth.getAuthorizedTaskTypes(), is(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING))); + assertThat(auth.getAuthorizedModelIds(), is(Set.of("model-1"))); + } + + public void testNewLimitToTaskTypes_SingleModel() { + var auth = ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-1", + EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING) + ), + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel("model-2", EnumSet.of(TaskType.CHAT_COMPLETION)) + ) + ) + ); + assertThat( - new ElasticInferenceServiceAuthorization( - Map.of("model-1", EnumSet.of(TaskType.TEXT_EMBEDDING), "model-2", EnumSet.of(TaskType.SPARSE_EMBEDDING)) - ).enabledTaskTypes(), - is(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING)) + auth.newLimitedToTaskTypes(EnumSet.of(TaskType.TEXT_EMBEDDING)), + is( + ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-1", + EnumSet.of(TaskType.TEXT_EMBEDDING) + ) + ) + ) + ) + ) ); } - public void testEnabledTaskTypes_FromSingleEntry() { + public void testNewLimitToTaskTypes_MultipleModels_OnlyTextEmbedding() { + var auth = ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-1", + EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING) + ), + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel("model-2", EnumSet.of(TaskType.TEXT_EMBEDDING)) + ) + ) + ); + assertThat( - new ElasticInferenceServiceAuthorization(Map.of("model-1", EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING))) - .enabledTaskTypes(), - is(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING)) + auth.newLimitedToTaskTypes(EnumSet.of(TaskType.TEXT_EMBEDDING)), + is( + ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-1", + EnumSet.of(TaskType.TEXT_EMBEDDING) + ), + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-2", + EnumSet.of(TaskType.TEXT_EMBEDDING) + ) + ) + ) + ) + ) ); } + + public void testNewLimitToTaskTypes_MultipleModels_MultipleTaskTypes() { + var auth = ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-text-sparse", + EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING) + ), + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-sparse", + EnumSet.of(TaskType.SPARSE_EMBEDDING) + ), + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-chat-completion", + EnumSet.of(TaskType.CHAT_COMPLETION) + ) + ) + ) + ); + + var a = auth.newLimitedToTaskTypes(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.CHAT_COMPLETION)); + assertThat( + a, + is( + ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-text-sparse", + EnumSet.of(TaskType.TEXT_EMBEDDING) + ), + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-chat-completion", + EnumSet.of(TaskType.CHAT_COMPLETION) + ) + ) + ) + ) + ) + ); + } + + public void testNewLimitToTaskTypes_DuplicateModelNames() { + var auth = ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-1", + EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING) + ), + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-1", + EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.TEXT_EMBEDDING, TaskType.RERANK) + ) + ) + ) + ); + + var a = auth.newLimitedToTaskTypes(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING, TaskType.RERANK)); + assertThat( + a, + is( + ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-1", + EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING, TaskType.RERANK) + ) + ) + ) + ) + ) + ); + } + + public void testNewLimitToTaskTypes_ReturnsDisabled_WhenNoOverlapForTaskTypes() { + var auth = ElasticInferenceServiceAuthorization.of( + new ElasticInferenceServiceAuthorizationResponseEntity( + List.of( + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-1", + EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING) + ), + new ElasticInferenceServiceAuthorizationResponseEntity.AuthorizedModel( + "model-2", + EnumSet.of(TaskType.SPARSE_EMBEDDING, TaskType.TEXT_EMBEDDING) + ) + ) + ) + ); + + var a = auth.newLimitedToTaskTypes(EnumSet.of(TaskType.CHAT_COMPLETION, TaskType.RERANK)); + assertThat(a, is(ElasticInferenceServiceAuthorization.newDisabledService())); + } } From 06664626895d1b8defbaa68b4480a40afacfced7 Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Tue, 28 Jan 2025 16:15:44 +0100 Subject: [PATCH 134/383] Use the system index descriptor in the snapshot blob cache cleanup task (#120937) Clean up of the `.snapshot-blob-cache*` system index is done only on the node that hosts the primary of the shard 0 of that index. When the index is migrated as part of an upgrade test e.g. v7 -> v8, the index is reindexed to a new index `.snapshot-blob-cache-reindexed-for-9`. The code scheduling this clean up task is not able to locate the shard and would never trigger a clean up after the upgrade. This change uses the system index descriptor to find the matching shard and would work for future versions too. Closes https://github.com/elastic/elasticsearch/issues/120518 --- docs/changelog/120937.yaml | 6 +++ ...tsBlobStoreCacheMaintenanceIntegTests.java | 46 ++++++++++++++++++- .../SearchableSnapshots.java | 9 +++- .../BlobStoreCacheMaintenanceService.java | 32 +++++++------ 4 files changed, 77 insertions(+), 16 deletions(-) create mode 100644 docs/changelog/120937.yaml diff --git a/docs/changelog/120937.yaml b/docs/changelog/120937.yaml new file mode 100644 index 0000000000000..6a25690136987 --- /dev/null +++ b/docs/changelog/120937.yaml @@ -0,0 +1,6 @@ +pr: 120937 +summary: Use the system index descriptor in the snapshot blob cache cleanup task +area: Snapshot/Restore +type: bug +issues: + - 120518 diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java index 7eaf5d8f060c6..d8b0d5138a93d 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java @@ -23,8 +23,11 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.reindex.ReindexAction; +import org.elasticsearch.index.reindex.ReindexRequest; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.LuceneFilesExtensions; +import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.repositories.IndexId; @@ -63,9 +66,11 @@ import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_INDEX_NAME_SETTING; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_SNAPSHOT_ID_SETTING; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_SNAPSHOT_NAME_SETTING; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; public class SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests extends BaseFrozenSearchableSnapshotsIntegTestCase { @@ -194,7 +199,6 @@ public void testCleanUpAfterIndicesAreDeleted() throws Exception { } }); } - logger.info("--> deleting indices, maintenance service should clean up snapshot blob cache index"); assertAcked(indicesAdmin().prepareDelete("mounted-*")); assertBusy(() -> { @@ -311,6 +315,46 @@ public void testPeriodicMaintenance() throws Exception { } } + public void testCleanUpMigratedSystemIndexAfterIndicesAreDeleted() throws Exception { + final String repositoryName = "repository"; + createRepository(repositoryName, FsRepository.TYPE); + + final Map> mountedIndices = mountRandomIndicesWithCache(repositoryName, 3, 10); + ensureYellow(SNAPSHOT_BLOB_CACHE_INDEX); + refreshSystemIndex(true); + + final long numberOfEntriesInCache = numberOfEntriesInCache(); + logger.info("--> found [{}] entries in snapshot blob cache", numberOfEntriesInCache); + assertThat(numberOfEntriesInCache, equalTo(mountedIndices.values().stream().mapToLong(Tuple::v2).sum())); + + migrateTheSystemIndex(); + + logger.info("--> deleting indices, maintenance service should clean up snapshot blob cache index"); + assertAcked(indicesAdmin().prepareDelete("mounted-*")); + assertBusy(() -> { + refreshSystemIndex(true); + assertHitCount(systemClient().prepareSearch(SNAPSHOT_BLOB_CACHE_INDEX).setSize(0), 0L); + }); + } + + /** + * Mimics migration of the {@link SearchableSnapshots#SNAPSHOT_BLOB_CACHE_INDEX} as done in + * {@link org.elasticsearch.upgrades.SystemIndexMigrator}, where the index is re-indexed, and replaced by an alias. + */ + private void migrateTheSystemIndex() { + final var migratedSnapshotBlobCache = SNAPSHOT_BLOB_CACHE_INDEX + SystemIndices.UPGRADED_INDEX_SUFFIX; + logger.info("--> migrating {} system index to {}", SNAPSHOT_BLOB_CACHE_INDEX, migratedSnapshotBlobCache); + var reindexRequest = new ReindexRequest().setSourceIndices(SNAPSHOT_BLOB_CACHE_INDEX) + .setDestIndex(migratedSnapshotBlobCache) + .setRefresh(true); + var resp = safeGet(client().execute(ReindexAction.INSTANCE, reindexRequest)); + assertThat(resp.getBulkFailures(), is(empty())); + indicesAdmin().prepareAliases() + .removeIndex(SNAPSHOT_BLOB_CACHE_INDEX) + .addAlias(migratedSnapshotBlobCache, SNAPSHOT_BLOB_CACHE_INDEX) + .get(); + } + /** * @return a {@link Client} that can be used to query the blob store cache system index */ diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java index 778fd3045f7cc..f6a35fb98203d 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java @@ -337,7 +337,14 @@ public Collection createComponents(PluginServices services) { final BlobStoreCacheService blobStoreCacheService = new BlobStoreCacheService(client, SNAPSHOT_BLOB_CACHE_INDEX); this.blobStoreCacheService.set(blobStoreCacheService); clusterService.addListener( - new BlobStoreCacheMaintenanceService(settings, clusterService, threadPool, client, SNAPSHOT_BLOB_CACHE_INDEX) + new BlobStoreCacheMaintenanceService( + settings, + clusterService, + threadPool, + client, + services.systemIndices(), + SNAPSHOT_BLOB_CACHE_INDEX + ) ); components.add(blobStoreCacheService); } else { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java index 21e67212f1f51..44a5bc88abc3b 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java @@ -36,7 +36,6 @@ import org.elasticsearch.cluster.metadata.RepositoriesMetadata; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.routing.IndexRoutingTable; -import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; @@ -47,7 +46,6 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.ThrottledTaskRunner; import org.elasticsearch.core.AbstractRefCounted; -import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; @@ -57,6 +55,8 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -145,6 +145,7 @@ public class BlobStoreCacheMaintenanceService implements ClusterStateListener { private final Client clientWithOrigin; private final String systemIndexName; private final ThreadPool threadPool; + private final SystemIndexDescriptor systemIndexDescriptor; private volatile Scheduler.Cancellable periodicTask; private volatile TimeValue periodicTaskInterval; @@ -158,10 +159,12 @@ public BlobStoreCacheMaintenanceService( ClusterService clusterService, ThreadPool threadPool, Client client, + SystemIndices systemIndices, String systemIndexName ) { this.clientWithOrigin = new OriginSettingClient(Objects.requireNonNull(client), SEARCHABLE_SNAPSHOTS_ORIGIN); this.systemIndexName = Objects.requireNonNull(systemIndexName); + this.systemIndexDescriptor = Objects.requireNonNull(systemIndices.findMatchingDescriptor(systemIndexName)); this.clusterService = Objects.requireNonNull(clusterService); this.threadPool = Objects.requireNonNull(threadPool); this.periodicTaskInterval = SNAPSHOT_SNAPSHOT_CLEANUP_INTERVAL_SETTING.get(settings); @@ -181,10 +184,7 @@ public void clusterChanged(ClusterChangedEvent event) { if (state.getBlocks().hasGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) { return; // state not fully recovered } - final ShardRouting primary = systemIndexPrimaryShard(state); - if (primary == null - || primary.active() == false - || Objects.equals(state.nodes().getLocalNodeId(), primary.currentNodeId()) == false) { + if (systemIndexPrimaryShardActiveAndAssignedToLocalNode(state) == false) { // system index primary shard does not exist or is not assigned to this data node stopPeriodicTask(); return; @@ -242,16 +242,20 @@ private synchronized void stopPeriodicTask() { } } - @Nullable - private ShardRouting systemIndexPrimaryShard(final ClusterState state) { - final IndexMetadata indexMetadata = state.metadata().index(systemIndexName); - if (indexMetadata != null) { - final IndexRoutingTable indexRoutingTable = state.routingTable().index(indexMetadata.getIndex()); - if (indexRoutingTable != null) { - return indexRoutingTable.shard(0).primaryShard(); + private boolean systemIndexPrimaryShardActiveAndAssignedToLocalNode(final ClusterState state) { + for (IndexMetadata indexMetadata : state.metadata()) { + if (indexMetadata.isSystem() && systemIndexDescriptor.matchesIndexPattern(indexMetadata.getIndex().getName())) { + final IndexRoutingTable indexRoutingTable = state.routingTable().index(indexMetadata.getIndex()); + if (indexRoutingTable == null || indexRoutingTable.shard(0) == null) { + continue; + } + final var primary = indexRoutingTable.shard(0).primaryShard(); + if (primary != null && primary.active() && Objects.equals(state.nodes().getLocalNodeId(), primary.currentNodeId())) { + return true; + } } } - return null; + return false; } private static boolean hasSearchableSnapshotWith(final ClusterState state, final String snapshotId, final String indexId) { From 343ec0bcc30708bfd69dcbbb4c789bd6caa01279 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Tue, 28 Jan 2025 07:18:22 -0800 Subject: [PATCH 135/383] Restrict apm agent entitlements to the apm package in an unnamed module (#120546) This change closes a hole where we assumed any check against an unnamed-module from any classloader was for one of our apm agent. This was not the case and made it so scripts could in theory have the same entitlements as apm agent. Instead we now check to see if a class is part of the apm package in an unnamed module to ensure it's actually for the apm agent. Relates to ES-10192 --- docs/changelog/120546.yaml | 5 + .../EntitlementInitialization.java | 3 +- .../runtime/policy/PolicyManager.java | 11 ++- .../runtime/policy/PolicyManagerTests.java | 93 +++++++++++++++++-- .../runtime/policy/agent/TestAgent.java | 15 +++ .../policy/agent/inner/TestInnerAgent.java | 15 +++ 6 files changed, 132 insertions(+), 10 deletions(-) create mode 100644 docs/changelog/120546.yaml create mode 100644 libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/agent/TestAgent.java create mode 100644 libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/agent/inner/TestInnerAgent.java diff --git a/docs/changelog/120546.yaml b/docs/changelog/120546.yaml new file mode 100644 index 0000000000000..ec89cb1830311 --- /dev/null +++ b/docs/changelog/120546.yaml @@ -0,0 +1,5 @@ +pr: 120546 +summary: Restrict agent entitlements to the system classloader unnamed module +area: Infra/Plugins +type: bug +issues: [] diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index a8938c16955a7..4bc7c54e4cfda 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -46,6 +46,7 @@ */ public class EntitlementInitialization { + private static final String AGENTS_PACKAGE_NAME = "co.elastic.apm.agent"; private static final Module ENTITLEMENTS_MODULE = PolicyManager.class.getModule(); private static ElasticsearchEntitlementChecker manager; @@ -107,7 +108,7 @@ private static PolicyManager createPolicyManager() { // this should be removed once https://github.com/elastic/elasticsearch/issues/109335 is completed List agentEntitlements = List.of(new CreateClassLoaderEntitlement()); var resolver = EntitlementBootstrap.bootstrapArgs().pluginResolver(); - return new PolicyManager(serverPolicy, agentEntitlements, pluginPolicies, resolver, ENTITLEMENTS_MODULE); + return new PolicyManager(serverPolicy, agentEntitlements, pluginPolicies, resolver, AGENTS_PACKAGE_NAME, ENTITLEMENTS_MODULE); } private static ElasticsearchEntitlementChecker initChecker() { diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index f6448bc455d14..dcdc7d1a47f9f 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -84,6 +84,11 @@ private static Set findSystemModules() { .collect(Collectors.toUnmodifiableSet()); } + /** + * The package name containing agent classes. + */ + private final String agentsPackageName; + /** * Frames originating from this module are ignored in the permission logic. */ @@ -94,6 +99,7 @@ public PolicyManager( List agentEntitlements, Map pluginPolicies, Function, String> pluginResolver, + String agentsPackageName, Module entitlementsModule ) { this.serverEntitlements = buildScopeEntitlementsMap(requireNonNull(serverPolicy)); @@ -102,6 +108,7 @@ public PolicyManager( .stream() .collect(toUnmodifiableMap(Map.Entry::getKey, e -> buildScopeEntitlementsMap(e.getValue()))); this.pluginResolver = pluginResolver; + this.agentsPackageName = agentsPackageName; this.entitlementsModule = entitlementsModule; } @@ -318,8 +325,8 @@ private ModuleEntitlements computeEntitlements(Class requestingClass) { } } - if (requestingModule.isNamed() == false) { - // agents are the only thing running non-modular + if (requestingModule.isNamed() == false && requestingClass.getPackageName().startsWith(agentsPackageName)) { + // agents are the only thing running non-modular in the system classloader return ModuleEntitlements.from(agentEntitlements); } diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index c3acefbbb323b..f50cd217696de 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -10,6 +10,8 @@ package org.elasticsearch.entitlement.runtime.policy; import org.elasticsearch.entitlement.runtime.policy.PolicyManager.ModuleEntitlements; +import org.elasticsearch.entitlement.runtime.policy.agent.TestAgent; +import org.elasticsearch.entitlement.runtime.policy.agent.inner.TestInnerAgent; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.compiler.InMemoryJavaCompiler; import org.elasticsearch.test.jar.JarUtils; @@ -18,6 +20,8 @@ import java.io.IOException; import java.lang.module.Configuration; import java.lang.module.ModuleFinder; +import java.net.URL; +import java.net.URLClassLoader; import java.nio.file.Path; import java.util.Arrays; import java.util.List; @@ -36,6 +40,12 @@ @ESTestCase.WithoutSecurityManager public class PolicyManagerTests extends ESTestCase { + + /** + * A test agent package name for use in tests. + */ + private static final String TEST_AGENTS_PACKAGE_NAME = "org.elasticsearch.entitlement.runtime.policy.agent"; + /** * A module you can use for test cases that don't actually care about the * entitlement module. @@ -59,6 +69,7 @@ public void testGetEntitlementsThrowsOnMissingPluginUnnamedModule() { List.of(), Map.of("plugin1", createPluginPolicy("plugin.module")), c -> "plugin1", + TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE ); @@ -72,7 +83,14 @@ public void testGetEntitlementsThrowsOnMissingPluginUnnamedModule() { } public void testGetEntitlementsThrowsOnMissingPolicyForPlugin() { - var policyManager = new PolicyManager(createEmptyTestServerPolicy(), List.of(), Map.of(), c -> "plugin1", NO_ENTITLEMENTS_MODULE); + var policyManager = new PolicyManager( + createEmptyTestServerPolicy(), + List.of(), + Map.of(), + c -> "plugin1", + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ); // Any class from the current module (unnamed) will do var callerClass = this.getClass(); @@ -84,7 +102,14 @@ public void testGetEntitlementsThrowsOnMissingPolicyForPlugin() { } public void testGetEntitlementsFailureIsCached() { - var policyManager = new PolicyManager(createEmptyTestServerPolicy(), List.of(), Map.of(), c -> "plugin1", NO_ENTITLEMENTS_MODULE); + var policyManager = new PolicyManager( + createEmptyTestServerPolicy(), + List.of(), + Map.of(), + c -> "plugin1", + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ); // Any class from the current module (unnamed) will do var callerClass = this.getClass(); @@ -106,6 +131,7 @@ public void testGetEntitlementsReturnsEntitlementsForPluginUnnamedModule() { List.of(), Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))), c -> "plugin2", + TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE ); @@ -117,7 +143,14 @@ public void testGetEntitlementsReturnsEntitlementsForPluginUnnamedModule() { } public void testGetEntitlementsThrowsOnMissingPolicyForServer() throws ClassNotFoundException { - var policyManager = new PolicyManager(createTestServerPolicy("example"), List.of(), Map.of(), c -> null, NO_ENTITLEMENTS_MODULE); + var policyManager = new PolicyManager( + createTestServerPolicy("example"), + List.of(), + Map.of(), + c -> null, + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ); // Tests do not run modular, so we cannot use a server class. // But we know that in production code the server module and its classes are in the boot layer. @@ -137,6 +170,7 @@ public void testGetEntitlementsReturnsEntitlementsForServerModule() throws Class List.of(), Map.of(), c -> null, + TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE ); @@ -161,6 +195,7 @@ public void testGetEntitlementsReturnsEntitlementsForPluginModule() throws IOExc List.of(), Map.of("mock-plugin", createPluginPolicy("org.example.plugin")), c -> "mock-plugin", + TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE ); @@ -181,6 +216,7 @@ public void testGetEntitlementsResultIsCached() { List.of(), Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))), c -> "plugin2", + TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE ); @@ -200,16 +236,17 @@ public void testGetEntitlementsResultIsCached() { public void testRequestingClassFastPath() throws IOException, ClassNotFoundException { var callerClass = makeClassInItsOwnModule(); - assertEquals(callerClass, policyManagerWithEntitlementsModule(NO_ENTITLEMENTS_MODULE).requestingClass(callerClass)); + assertEquals(callerClass, policyManager(TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE).requestingClass(callerClass)); } public void testRequestingModuleWithStackWalk() throws IOException, ClassNotFoundException { + var agentsClass = new TestAgent(); var entitlementsClass = makeClassInItsOwnModule(); // A class in the entitlements library itself var requestingClass = makeClassInItsOwnModule(); // This guy is always the right answer var instrumentedClass = makeClassInItsOwnModule(); // The class that called the check method var ignorableClass = makeClassInItsOwnModule(); - var policyManager = policyManagerWithEntitlementsModule(entitlementsClass.getModule()); + var policyManager = policyManager(TEST_AGENTS_PACKAGE_NAME, entitlementsClass.getModule()); assertEquals( "Skip entitlement library and the instrumented method", @@ -229,6 +266,31 @@ public void testRequestingModuleWithStackWalk() throws IOException, ClassNotFoun ); } + public void testAgentsEntitlements() throws IOException, ClassNotFoundException { + Path home = createTempDir(); + Path unnamedJar = createMockPluginJarForUnnamedModule(home); + var notAgentClass = makeClassInItsOwnModule(); + var policyManager = new PolicyManager( + createEmptyTestServerPolicy(), + List.of(new CreateClassLoaderEntitlement()), + Map.of(), + c -> "test", + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ); + ModuleEntitlements agentsEntitlements = policyManager.getEntitlements(TestAgent.class); + assertThat(agentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); + agentsEntitlements = policyManager.getEntitlements(TestInnerAgent.class); + assertThat(agentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); + ModuleEntitlements notAgentsEntitlements = policyManager.getEntitlements(notAgentClass); + assertThat(notAgentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(false)); + try (URLClassLoader classLoader = new URLClassLoader(new URL[] { unnamedJar.toUri().toURL() }, getClass().getClassLoader())) { + var unnamedNotAgentClass = classLoader.loadClass("q.B"); + notAgentsEntitlements = policyManager.getEntitlements(unnamedNotAgentClass); + assertThat(notAgentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(false)); + } + } + private static Class makeClassInItsOwnModule() throws IOException, ClassNotFoundException { final Path home = createTempDir(); Path jar = createMockPluginJar(home); @@ -236,8 +298,15 @@ private static Class makeClassInItsOwnModule() throws IOException, ClassNotFo return layer.findLoader("org.example.plugin").loadClass("q.B"); } - private static PolicyManager policyManagerWithEntitlementsModule(Module entitlementsModule) { - return new PolicyManager(createEmptyTestServerPolicy(), List.of(), Map.of(), c -> "test", entitlementsModule); + private static Class makeClassInItsOwnUnnamedModule() throws IOException, ClassNotFoundException { + final Path home = createTempDir(); + Path jar = createMockPluginJar(home); + var layer = createLayerForJar(jar, "org.example.plugin"); + return layer.findLoader("org.example.plugin").loadClass("q.B"); + } + + private static PolicyManager policyManager(String agentsPackageName, Module entitlementsModule) { + return new PolicyManager(createEmptyTestServerPolicy(), List.of(), Map.of(), c -> "test", agentsPackageName, entitlementsModule); } private static Policy createEmptyTestServerPolicy() { @@ -262,6 +331,16 @@ private static Policy createPluginPolicy(String... pluginModules) { ); } + private static Path createMockPluginJarForUnnamedModule(Path home) throws IOException { + Path jar = home.resolve("unnamed-mock-plugin.jar"); + + Map sources = Map.ofEntries(entry("q.B", "package q; public class B { }")); + + var classToBytes = InMemoryJavaCompiler.compile(sources); + JarUtils.createJarWithEntries(jar, Map.ofEntries(entry("q/B.class", classToBytes.get("q.B")))); + return jar; + } + private static Path createMockPluginJar(Path home) throws IOException { Path jar = home.resolve("mock-plugin.jar"); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/agent/TestAgent.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/agent/TestAgent.java new file mode 100644 index 0000000000000..2935fdd6faf31 --- /dev/null +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/agent/TestAgent.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy.agent; + +/** + * Dummy class for testing agent entitlements. + */ +public class TestAgent {} diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/agent/inner/TestInnerAgent.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/agent/inner/TestInnerAgent.java new file mode 100644 index 0000000000000..d4a9531bafbb2 --- /dev/null +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/agent/inner/TestInnerAgent.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy.agent.inner; + +/** + * Dummy class for testing agent entitlements. + */ +public class TestInnerAgent {} From 3939198477af67ff8ef50892b7b877a55d9cce47 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 28 Jan 2025 16:19:14 +0100 Subject: [PATCH 136/383] Update match-phrase-query.asciidoc (#118828) (#121033) (cherry picked from commit 8e9cccba6a9aa2eef2e109d250409bcb679881bc) Co-authored-by: Damien RENIER <153135842+damien-renier-elastic@users.noreply.github.com> --- .../query-dsl/match-phrase-query.asciidoc | 38 ++++++++++++++++++- 1 file changed, 36 insertions(+), 2 deletions(-) diff --git a/docs/reference/query-dsl/match-phrase-query.asciidoc b/docs/reference/query-dsl/match-phrase-query.asciidoc index f6b0fa19001f6..88046bc009e7d 100644 --- a/docs/reference/query-dsl/match-phrase-query.asciidoc +++ b/docs/reference/query-dsl/match-phrase-query.asciidoc @@ -19,9 +19,45 @@ GET /_search } -------------------------------------------------- +[[match-phrase-field-params]] +==== Parameters for `` +`query`:: ++ +-- +(Required) Text, number, boolean value or date you wish to find in the provided +``. +-- + +`analyzer`:: +(Optional, string) <> used to convert the text in the `query` +value into tokens. Defaults to the <> mapped for the ``. If no analyzer is mapped, the index's +default analyzer is used. + +`slop`:: +(Optional, integer) Maximum number of positions allowed between matching tokens. +Defaults to `0`. Transposed terms have a slop of `2`. + +`zero_terms_query`:: ++ +-- +(Optional, string) Indicates whether no documents are returned if the `analyzer` +removes all tokens, such as when using a `stop` filter. Valid values are: + + `none` (Default):: +No documents are returned if the `analyzer` removes all tokens. + + `all`:: +Returns all documents, similar to a <> +query. +-- + A phrase query matches terms up to a configurable `slop` (which defaults to 0) in any order. Transposed terms have a slop of 2. +[[query-dsl-match-query-phrase-analyzer]] +===== Analyzer in the match phrase query + The `analyzer` can be set to control which analyzer will perform the analysis process on the text. It defaults to the field explicit mapping definition, or the default search analyzer, for example: @@ -40,5 +76,3 @@ GET /_search } } -------------------------------------------------- - -This query also accepts `zero_terms_query`, as explained in <>. From 953f1749a4f106acac634772ca1bf3303924ce00 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Tue, 28 Jan 2025 16:29:50 +0100 Subject: [PATCH 137/383] [Docs] Update Query Roles API documentation (#120740) The query role API now returns built-in roles as well. This PR notes this and adds an example on how the built-in roles can be filtered out. --- docs/build.gradle | 1 - .../rest-api/security/query-role.asciidoc | 19 +++++++++++++++++-- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/docs/build.gradle b/docs/build.gradle index 3a1070b3fc356..cdb879485ae3c 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -120,7 +120,6 @@ testClusters.matching { it.name == "yamlRestTest"}.configureEach { // TODO: remove this once cname is prepended to transport.publish_address by default in 8.0 systemProperty 'es.transport.cname_in_publish_address', 'true' - systemProperty 'es.queryable_built_in_roles_enabled', 'false' requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") requiresFeature 'es.failure_store_feature_flag_enabled', Version.fromString("8.12.0") diff --git a/docs/reference/rest-api/security/query-role.asciidoc b/docs/reference/rest-api/security/query-role.asciidoc index acdfbb45b84f6..907e77866cd02 100644 --- a/docs/reference/rest-api/security/query-role.asciidoc +++ b/docs/reference/rest-api/security/query-role.asciidoc @@ -31,9 +31,13 @@ Retrieves roles with <> in a <>. -The query roles API does not retrieve roles that are defined in roles files, nor <> ones. +The query roles API does not retrieve roles that are defined in `roles.yml` files. You can optionally filter the results with a query. Also, the results can be paginated and sorted. +NOTE: This API automatically returns <> roles as well. +The built-in roles can be filtered out by using the `metadata._reserved` field in the query. +See <> below. + [[security-api-query-role-request-body]] ==== {api-request-body-title} @@ -127,12 +131,21 @@ It contains the array of values that have been used for sorting. [[security-api-query-role-example]] ==== {api-examples-title} -The following request lists all roles, sorted by the role name: +The following request lists all roles (except built-in ones), sorted by the role name: [source,console] ---- POST /_security/_query/role { + "query": { + "bool": { + "must_not": { + "term": { + "metadata._reserved": true + } + } + } + }, "sort": ["name"] } ---- @@ -222,6 +235,7 @@ retrieved for one or more roles: ] } ---- +// TESTRESPONSE[s/"total": 2/"total" : $body.total/] // TEST[continued] <1> The list of roles that were retrieved for this request @@ -287,3 +301,4 @@ POST /_security/_query/role ] } ---- +// TESTRESPONSE[s/"total": 2/"total" : $body.total/] From 0569fc7f2439213ab36107a4559d78ce1aef3234 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Tue, 28 Jan 2025 16:40:01 +0100 Subject: [PATCH 138/383] Forbid the removal of the write block if the index is read-only (#120648) Ensure that a `write` block cannot be removed on a read-only compatible index in version N-2, while allowing to change a `read_only` block into a `write` block if needed as well as closing/reopening such indices. Requires #120647 to be merged on `8.x`. Relates ES-10320 --- .../AbstractIndexCompatibilityTestCase.java | 57 +++++- ...sterRestartLuceneIndexCompatibilityIT.java | 166 +++++++++------- ...gradeLuceneIndexCompatibilityTestCase.java | 187 +++++++++++++----- .../indices/settings/UpdateSettingsIT.java | 33 ++++ .../cluster/block/ClusterBlocks.java | 8 + .../MetadataUpdateSettingsService.java | 71 ++++++- .../test/rest/ESRestTestCase.java | 2 +- 7 files changed, 393 insertions(+), 131 deletions(-) diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/AbstractIndexCompatibilityTestCase.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/AbstractIndexCompatibilityTestCase.java index ac4e1d9175885..9bb5b7e944389 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/AbstractIndexCompatibilityTestCase.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/AbstractIndexCompatibilityTestCase.java @@ -12,8 +12,12 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.InputStreamEntity; import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.MetadataIndexStateService; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MapperService; @@ -24,6 +28,7 @@ import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentType; +import org.hamcrest.Matcher; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; @@ -32,16 +37,18 @@ import org.junit.rules.TestRule; import java.io.IOException; +import java.util.Comparator; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.stream.IntStream; -import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING; import static org.elasticsearch.test.cluster.util.Version.CURRENT; import static org.elasticsearch.test.cluster.util.Version.fromString; import static org.elasticsearch.test.rest.ObjectPath.createFromResponse; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; @@ -272,9 +279,51 @@ protected void addIndexBlock(String indexName, IndexMetadata.APIBlock apiBlock) assertAcknowledged(client().performRequest(request)); } - protected void assertThatIndexBlock(String indexName, IndexMetadata.APIBlock apiBlock) throws Exception { + private static ClusterBlock toIndexBlock(String blockId) { + int block = Integer.parseInt(blockId); + for (var indexBlock : List.of( + IndexMetadata.INDEX_READ_ONLY_BLOCK, + IndexMetadata.INDEX_READ_BLOCK, + IndexMetadata.INDEX_WRITE_BLOCK, + IndexMetadata.INDEX_METADATA_BLOCK, + IndexMetadata.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK, + IndexMetadata.INDEX_REFRESH_BLOCK, + MetadataIndexStateService.INDEX_CLOSED_BLOCK + )) { + if (block == indexBlock.id()) { + return indexBlock; + } + } + throw new AssertionError("No index block found with id [" + blockId + ']'); + } + + @SuppressWarnings("unchecked") + protected static List indexBlocks(String indexName) throws Exception { + var responseBody = createFromResponse(client().performRequest(new Request("GET", "_cluster/state/blocks/" + indexName))); + var blocks = (Map) responseBody.evaluate("blocks.indices." + indexName); + if (blocks == null || blocks.isEmpty()) { + return List.of(); + } + return blocks.keySet() + .stream() + .map(AbstractIndexCompatibilityTestCase::toIndexBlock) + .sorted(Comparator.comparing(ClusterBlock::id)) + .toList(); + } + + @SuppressWarnings("unchecked") + protected static void assertIndexSetting(String indexName, Setting setting, Matcher matcher) throws Exception { var indexSettings = getIndexSettingsAsMap(indexName); - assertThat(indexSettings.get(VERIFIED_READ_ONLY_SETTING.getKey()), equalTo(Boolean.TRUE.toString())); - assertThat(indexSettings.get(apiBlock.settingName()), equalTo(Boolean.TRUE.toString())); + assertThat(Boolean.parseBoolean((String) indexSettings.get(setting.getKey())), matcher); + } + + protected static ResponseException expectUpdateIndexSettingsThrows(String indexName, Settings.Builder settings) { + var exception = expectThrows(ResponseException.class, () -> updateIndexSettings(indexName, settings)); + assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(400)); + return exception; + } + + protected static Matcher containsStringCannotRemoveBlockOnReadOnlyIndex(String indexName) { + return allOf(containsString("Can't remove the write block on read-only compatible index"), containsString(indexName)); } } diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java index d7829d8225034..a442bc2f53736 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java @@ -9,13 +9,23 @@ package org.elasticsearch.lucene; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.test.cluster.util.Version; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_READ_ONLY_BLOCK; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_WRITE_BLOCK; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.INDEX_CLOSED_BLOCK; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.VERIFIED_BEFORE_CLOSE_SETTING; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class FullClusterRestartLuceneIndexCompatibilityIT extends FullClusterRestartIndexCompatibilityTestCase { @@ -28,14 +38,13 @@ public FullClusterRestartLuceneIndexCompatibilityIT(Version version) { } /** - * Creates an index on N-2, upgrades to N -1 and marks as read-only, then upgrades to N. + * Creates an index on N-2, upgrades to N-1 and marks as read-only, then upgrades to N. */ public void testIndexUpgrade() throws Exception { final String index = suffix("index"); final int numDocs = 2431; if (isFullyUpgradedTo(VERSION_MINUS_2)) { - logger.debug("--> creating index [{}]", index); createIndex( client(), index, @@ -45,29 +54,85 @@ public void testIndexUpgrade() throws Exception { .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) .build() ); - - logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); indexDocs(index, numDocs); return; } - if (isFullyUpgradedTo(VERSION_MINUS_1)) { - ensureGreen(index); + assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + ensureGreen(index); - assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + if (isIndexClosed(index) == false) { assertDocCount(client(), index, numDocs); + } - addIndexBlock(index, IndexMetadata.APIBlock.WRITE); + if (isFullyUpgradedTo(VERSION_MINUS_1)) { + final boolean maybeClose = randomBoolean(); + if (maybeClose) { + logger.debug("--> closing index [{}] before upgrade", index); + closeIndex(index); + } + + final var block = randomFrom(IndexMetadata.APIBlock.WRITE, IndexMetadata.APIBlock.READ_ONLY); + addIndexBlock(index, block); + + assertThat(indexBlocks(index), maybeClose ? contains(INDEX_CLOSED_BLOCK, block.getBlock()) : contains(block.getBlock())); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(maybeClose)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); return; } if (isFullyUpgradedTo(VERSION_CURRENT)) { - ensureGreen(index); + final var isClosed = isIndexClosed(index); + logger.debug("--> upgraded index [{}] is in [{}] state", index, isClosed ? "closed" : "open"); + assertThat( + indexBlocks(index), + isClosed + ? either(contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK)).or(contains(INDEX_CLOSED_BLOCK, INDEX_READ_ONLY_BLOCK)) + : either(contains(INDEX_WRITE_BLOCK)).or(contains(INDEX_READ_ONLY_BLOCK)) + ); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(isClosed)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + + if (isClosed == false) { + logger.debug("--> write/read_only API blocks cannot be removed on an opened index"); + var ex = expectUpdateIndexSettingsThrows( + index, + Settings.builder() + .putNull(IndexMetadata.APIBlock.WRITE.settingName()) + .putNull(IndexMetadata.APIBlock.READ_ONLY.settingName()) + ); + assertThat(ex.getMessage(), containsStringCannotRemoveBlockOnReadOnlyIndex(index)); + + } else if (randomBoolean()) { + logger.debug("--> write/read_only API blocks can be removed on a closed index: INDEX_CLOSED_BLOCK already blocks writes"); + updateIndexSettings( + index, + Settings.builder() + .putNull(IndexMetadata.APIBlock.WRITE.settingName()) + .putNull(IndexMetadata.APIBlock.READ_ONLY.settingName()) + ); + logger.debug("--> but attempts to re-opening [{}] should fail due to the missing block", index); + var ex = expectThrows(ResponseException.class, () -> openIndex(index)); + assertThat(ex.getMessage(), containsString("must be marked as read-only")); + + // TODO this could be randomized once we support recovering verified-before-close closed indices with no write/ro block + addIndexBlock(index, IndexMetadata.APIBlock.WRITE); + } - assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); - assertDocCount(client(), index, numDocs); + var block = indexBlocks(index).stream().filter(c -> c.equals(INDEX_WRITE_BLOCK) || c.equals(INDEX_READ_ONLY_BLOCK)).findFirst(); + if (block.isPresent() && block.get().equals(INDEX_READ_ONLY_BLOCK)) { + logger.debug("--> read_only API block can be replaced by a write block (required for the remaining tests)"); + updateIndexSettings( + index, + Settings.builder() + .putNull(IndexMetadata.APIBlock.READ_ONLY.settingName()) + .put(IndexMetadata.APIBlock.WRITE.settingName(), true) + ); + } - assertThatIndexBlock(index, IndexMetadata.APIBlock.WRITE); + assertThat(indexBlocks(index), isClosed ? contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK) : contains(INDEX_WRITE_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(isClosed)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); var numberOfReplicas = getNumberOfReplicas(index); if (0 < numberOfReplicas) { @@ -82,66 +147,29 @@ public void testIndexUpgrade() throws Exception { updateIndexSettings(index, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)); ensureGreen(index); - logger.debug("--> closing restored index [{}]", index); - closeIndex(index); - ensureGreen(index); + if (isClosed) { + logger.debug("--> re-opening index [{}]", index); + openIndex(index); + ensureGreen(index); - logger.debug("--> adding replica to test peer-recovery for closed shards"); - updateIndexSettings(index, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 2)); - ensureGreen(index); + assertDocCount(client(), index, numDocs); + } else { + logger.debug("--> closing index [{}]", index); + closeIndex(index); + ensureGreen(index); + } - logger.debug("--> re-opening restored index [{}]", index); - openIndex(index); + logger.debug("--> adding more replicas to test peer-recovery"); + updateIndexSettings(index, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 2)); ensureGreen(index); - assertDocCount(client(), index, numDocs); - - logger.debug("--> deleting index [{}]", index); - deleteIndex(index); - } - } - - /** - * Similar to {@link #testIndexUpgrade()} but with a read_only block. - */ - public void testIndexUpgradeReadOnlyBlock() throws Exception { - final String index = suffix("index"); - final int numDocs = 2531; - - if (isFullyUpgradedTo(VERSION_MINUS_2)) { - logger.debug("--> creating index [{}]", index); - createIndex( - client(), - index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, randomInt(2)) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + assertThat( + indexBlocks(index), + isIndexClosed(index) ? contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK) : contains(INDEX_WRITE_BLOCK) ); - logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); - indexDocs(index, numDocs); - return; - } - - if (isFullyUpgradedTo(VERSION_MINUS_1)) { - ensureGreen(index); - - assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); - assertDocCount(client(), index, numDocs); - - addIndexBlock(index, IndexMetadata.APIBlock.READ_ONLY); - return; - } - - if (isFullyUpgradedTo(VERSION_CURRENT)) { - ensureGreen(index); - - assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); - assertDocCount(client(), index, numDocs); - - assertThatIndexBlock(index, IndexMetadata.APIBlock.READ_ONLY); + deleteIndex(index); } } @@ -196,7 +224,8 @@ public void testRestoreIndex() throws Exception { restoreIndex(repository, snapshot, index, restoredIndex); ensureGreen(restoredIndex); - assertThatIndexBlock(restoredIndex, IndexMetadata.APIBlock.WRITE); + assertIndexSetting(restoredIndex, VERIFIED_READ_ONLY_SETTING, is(true)); + assertThat(indexBlocks(restoredIndex), contains(INDEX_WRITE_BLOCK)); assertThat(indexVersion(restoredIndex), equalTo(VERSION_MINUS_2)); assertDocCount(client(), restoredIndex, numDocs); @@ -277,7 +306,8 @@ public void testRestoreIndexOverClosedIndex() throws Exception { if (isFullyUpgradedTo(VERSION_CURRENT)) { assertThat(isIndexClosed(index), equalTo(true)); - assertThatIndexBlock(index, IndexMetadata.APIBlock.WRITE); + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); logger.debug("--> restoring index [{}] over existing closed index", index); restoreIndex(repository, snapshot, index, index); diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java index b145b1e08c71d..98054cb4b4f39 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java @@ -19,7 +19,19 @@ import java.util.List; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_READ_ONLY_BLOCK; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_WRITE_BLOCK; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.INDEX_CLOSED_BLOCK; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.VERIFIED_BEFORE_CLOSE_SETTING; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; public class RollingUpgradeLuceneIndexCompatibilityTestCase extends RollingUpgradeIndexCompatibilityTestCase { @@ -39,7 +51,6 @@ public void testIndexUpgrade() throws Exception { final int numDocs = 2543; if (isFullyUpgradedTo(VERSION_MINUS_2)) { - logger.debug("--> creating index [{}]", index); createIndex( client(), index, @@ -49,26 +60,108 @@ public void testIndexUpgrade() throws Exception { .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) .build() ); - - logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); indexDocs(index, numDocs); return; } + assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); ensureGreen(index); - if (isFullyUpgradedTo(VERSION_MINUS_1)) { - assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + if (isIndexClosed(index) == false) { assertDocCount(client(), index, numDocs); + } - addIndexBlock(index, IndexMetadata.APIBlock.WRITE); + if (isFullyUpgradedTo(VERSION_MINUS_1)) { + final var maybeClose = randomBoolean(); + if (maybeClose) { + logger.debug("--> closing index [{}] before upgrade", index); + closeIndex(index); + } + + final var randomBlocks = randomFrom( + List.of(IndexMetadata.APIBlock.WRITE, IndexMetadata.APIBlock.READ_ONLY), + List.of(IndexMetadata.APIBlock.READ_ONLY), + List.of(IndexMetadata.APIBlock.WRITE) + ); + for (var randomBlock : randomBlocks) { + addIndexBlock(index, randomBlock); + assertThat(indexBlocks(index), hasItem(randomBlock.getBlock())); + } + + assertThat(indexBlocks(index), maybeClose ? hasItem(INDEX_CLOSED_BLOCK) : not(hasItem(INDEX_CLOSED_BLOCK))); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(maybeClose)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); return; } if (nodesVersions().values().stream().anyMatch(v -> v.onOrAfter(VERSION_CURRENT))) { - assertThatIndexBlock(index, IndexMetadata.APIBlock.WRITE); + final var isClosed = isIndexClosed(index); + logger.debug("--> upgraded index [{}] is now in [{}] state", index, isClosed ? "closed" : "open"); + assertThat( + indexBlocks(index), + allOf( + either(hasItem(INDEX_READ_ONLY_BLOCK)).or(hasItem(INDEX_WRITE_BLOCK)), + isClosed ? hasItem(INDEX_CLOSED_BLOCK) : not(hasItem(INDEX_CLOSED_BLOCK)) + ) + ); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(isClosed)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + + var blocks = indexBlocks(index).stream().filter(c -> c.equals(INDEX_WRITE_BLOCK) || c.equals(INDEX_READ_ONLY_BLOCK)).toList(); + if (blocks.size() == 2) { + switch (randomInt(2)) { + case 0: + updateIndexSettings( + index, + Settings.builder() + .putNull(IndexMetadata.APIBlock.WRITE.settingName()) + .put(IndexMetadata.APIBlock.READ_ONLY.settingName(), true) + ); + assertThat( + indexBlocks(index), + isClosed ? contains(INDEX_CLOSED_BLOCK, INDEX_READ_ONLY_BLOCK) : contains(INDEX_READ_ONLY_BLOCK) + ); + break; + case 1: + updateIndexSettings( + index, + Settings.builder() + .putNull(IndexMetadata.APIBlock.READ_ONLY.settingName()) + .put(IndexMetadata.APIBlock.WRITE.settingName(), true) + ); + assertThat( + indexBlocks(index), + isClosed ? contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK) : contains(INDEX_WRITE_BLOCK) + ); + break; + case 2: + updateIndexSettings(index, Settings.builder().put(IndexMetadata.APIBlock.READ_ONLY.settingName(), false)); + assertThat( + indexBlocks(index), + isClosed ? contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK) : contains(INDEX_WRITE_BLOCK) + ); + break; + default: + throw new AssertionError(); + } + } - if (isIndexClosed(index)) { + blocks = indexBlocks(index).stream().filter(c -> c.equals(INDEX_WRITE_BLOCK) || c.equals(INDEX_READ_ONLY_BLOCK)).toList(); + if (blocks.contains(INDEX_READ_ONLY_BLOCK)) { + logger.debug("--> read_only API block can be replaced by a write block (required for the remaining tests)"); + updateIndexSettings( + index, + Settings.builder() + .putNull(IndexMetadata.APIBlock.READ_ONLY.settingName()) + .put(IndexMetadata.APIBlock.WRITE.settingName(), true) + ); + } + + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(isClosed)); + assertThat(indexBlocks(index), isClosed ? contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK) : contains(INDEX_WRITE_BLOCK)); + + if (isClosed) { logger.debug("--> re-opening index [{}] after upgrade", index); openIndex(index); ensureGreen(index); @@ -88,48 +181,6 @@ public void testIndexUpgrade() throws Exception { } } - /** - * Similar to {@link #testIndexUpgrade()} but with a read_only block. - */ - public void testIndexUpgradeReadOnlyBlock() throws Exception { - final String index = suffix("index-"); - final int numDocs = 2573; - - if (isFullyUpgradedTo(VERSION_MINUS_2)) { - logger.debug("--> creating index [{}]", index); - createIndex( - client(), - index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() - ); - - logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); - indexDocs(index, numDocs); - return; - } - - ensureGreen(index); - - if (isFullyUpgradedTo(VERSION_MINUS_1)) { - assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); - assertDocCount(client(), index, numDocs); - - addIndexBlock(index, IndexMetadata.APIBlock.READ_ONLY); - return; - } - - if (nodesVersions().values().stream().anyMatch(v -> v.onOrAfter(VERSION_CURRENT))) { - assertThatIndexBlock(index, IndexMetadata.APIBlock.READ_ONLY); - - assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); - assertDocCount(client(), index, numDocs); - } - } - /** * Creates an index on N-2, marks as read-only on N-1 and creates a snapshot, then restores the snapshot during rolling upgrades to N. */ @@ -174,16 +225,24 @@ public void testRestoreIndex() throws Exception { deleteIndex(index); return; } + if (nodesVersions().values().stream().anyMatch(v -> v.onOrAfter(VERSION_CURRENT))) { var restoredIndex = suffix("index-restored-rolling"); boolean success = false; try { - logger.debug("--> restoring index [{}] as [{}]", index, restoredIndex); restoreIndex(repository, snapshot, index, restoredIndex); ensureGreen(restoredIndex); - assertThatIndexBlock(restoredIndex, IndexMetadata.APIBlock.WRITE); + assertThat(indexBlocks(restoredIndex), contains(INDEX_WRITE_BLOCK)); + assertIndexSetting(restoredIndex, VERIFIED_READ_ONLY_SETTING, is(true)); + + var ex = expectUpdateIndexSettingsThrows( + restoredIndex, + Settings.builder().putNull(IndexMetadata.APIBlock.WRITE.settingName()) + ); + assertThat(ex.getMessage(), containsStringCannotRemoveBlockOnReadOnlyIndex(restoredIndex)); + assertThat(indexVersion(restoredIndex), equalTo(VERSION_MINUS_2)); assertDocCount(client(), restoredIndex, numDocs); @@ -194,6 +253,15 @@ public void testRestoreIndex() throws Exception { closeIndex(restoredIndex); ensureGreen(restoredIndex); + logger.debug("--> write API block can be removed on a closed index: INDEX_CLOSED_BLOCK already blocks writes"); + updateIndexSettings(restoredIndex, Settings.builder().putNull(IndexMetadata.APIBlock.WRITE.settingName())); + + logger.debug("--> but attempts to re-opening [{}] should fail due to the missing block", restoredIndex); + ex = expectThrows(ResponseException.class, () -> openIndex(restoredIndex)); + assertThat(ex.getMessage(), containsString("must be marked as read-only")); + + addIndexBlock(restoredIndex, IndexMetadata.APIBlock.WRITE); + logger.debug("--> re-opening restored index [{}]", restoredIndex); openIndex(restoredIndex); ensureGreen(restoredIndex); @@ -214,5 +282,20 @@ public void testRestoreIndex() throws Exception { } } } + + if (isFullyUpgradedTo(VERSION_CURRENT)) { + var exception = expectThrows( + ResponseException.class, + () -> restoreIndex( + repository, + snapshot, + index, + suffix("unrestorable"), + Settings.builder().put(IndexMetadata.APIBlock.WRITE.settingName(), false).build() + ) + ); + assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(500)); + assertThat(exception.getMessage(), containsString("must be marked as read-only using the setting")); + } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index 9a7a77bf77a87..dc4dfc88b2c12 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; @@ -23,6 +24,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.indices.IndicesService; @@ -720,4 +722,35 @@ private void assertEqualsAndStringsInterned(List queryFieldsSetting, Set } } + public void testMultipleSettingsUpdateWithMetadataWriteBlock() { + final var indexName = randomIdentifier(); + createIndex(indexName, Settings.builder().put(IndexMetadata.APIBlock.READ_ONLY.settingName(), true).build()); + + // Metadata writes are blocked by the READ_ONLY block + expectThrows( + ClusterBlockException.class, + () -> updateIndexSettings(Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "12s"), indexName) + ); + + var randomSetting = randomFrom(IndexMetadata.APIBlock.READ_ONLY, IndexMetadata.APIBlock.READ_ONLY_ALLOW_DELETE).settingName(); + updateIndexSettings( + Settings.builder() + .put(randomSetting, true) // still has the metadata write block... + .put(IndexMetadata.APIBlock.WRITE.settingName(), true) + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "12s"), // should not be allowed + indexName + ); + + assertThat( + indicesAdmin().prepareGetSettings(indexName) + .get() + .getIndexToSettings() + .get(indexName) + .get(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()), + equalTo("12s") + ); + + // Updating the setting alone should always work + updateIndexSettings(Settings.builder().put(IndexMetadata.APIBlock.READ_ONLY.settingName(), false)); + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java index aa3a6a201eac4..659e78f99c21a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java +++ b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java @@ -146,6 +146,10 @@ public boolean hasIndexBlock(String index, ClusterBlock block) { return indicesBlocks.containsKey(index) && indicesBlocks.get(index).contains(block); } + public boolean hasIndexBlockLevel(String index, ClusterBlockLevel level) { + return blocksForIndex(level, index).isEmpty() == false; + } + public boolean hasIndexBlockWithId(String index, int blockId) { final Set clusterBlocks = indicesBlocks.get(index); if (clusterBlocks != null) { @@ -398,6 +402,10 @@ public boolean hasIndexBlock(String index, ClusterBlock block) { return indices.getOrDefault(index, Set.of()).contains(block); } + public boolean hasIndexBlockLevel(String index, ClusterBlockLevel level) { + return indices.getOrDefault(index, Set.of()).stream().anyMatch(clusterBlock -> clusterBlock.contains(level)); + } + public Builder removeIndexBlock(String index, ClusterBlock block) { if (indices.containsKey(index) == false) { return this; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java index e984768277d27..c11fa06d83c4d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -51,7 +52,9 @@ import java.util.Objects; import java.util.Set; import java.util.function.BiFunction; +import java.util.function.Function; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING; import static org.elasticsearch.index.IndexSettings.same; /** @@ -181,11 +184,14 @@ ClusterState execute(ClusterState currentState) { RoutingTable.Builder routingTableBuilder = null; Metadata.Builder metadataBuilder = Metadata.builder(currentState.metadata()); + final var minSupportedIndexVersion = currentState.nodes().getMinSupportedIndexVersion(); // allow to change any settings to a closed index, and only allow dynamic settings to be changed // on an open index Set openIndices = new HashSet<>(); Set closedIndices = new HashSet<>(); + Set readOnlyIndices = null; + final String[] actualIndices = new String[request.indices().length]; for (int i = 0; i < request.indices().length; i++) { Index index = request.indices()[i]; @@ -197,6 +203,12 @@ ClusterState execute(ClusterState currentState) { } else { closedIndices.add(index); } + if (metadata.getCompatibilityVersion().before(minSupportedIndexVersion)) { + if (readOnlyIndices == null) { + readOnlyIndices = new HashSet<>(); + } + readOnlyIndices.add(index); + } } if (skippedSettings.isEmpty() == false && openIndices.isEmpty() == false) { @@ -327,10 +339,21 @@ ClusterState execute(ClusterState currentState) { } } + final Function verifiedReadOnly = indexName -> VERIFIED_READ_ONLY_SETTING.get( + currentState.metadata().index(indexName).getSettings() + ); final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); boolean changedBlocks = false; for (IndexMetadata.APIBlock block : IndexMetadata.APIBlock.values()) { - changedBlocks |= maybeUpdateClusterBlock(actualIndices, blocks, block.block, block.setting, openSettings, metadataBuilder); + changedBlocks |= maybeUpdateClusterBlock( + actualIndices, + blocks, + block.block, + block.setting, + openSettings, + metadataBuilder, + verifiedReadOnly + ); } changed |= changedBlocks; @@ -359,6 +382,7 @@ ClusterState execute(ClusterState currentState) { // This step is mandatory since we allow to update non-dynamic settings on closed indices. indicesService.verifyIndexMetadata(updatedMetadata, updatedMetadata); } + verifyReadOnlyIndices(readOnlyIndices, updatedState.blocks()); } catch (IOException ex) { throw ExceptionsHelper.convertToElastic(ex); } @@ -417,6 +441,24 @@ public static void updateIndexSettings( } } + /** + * Verifies that read-only compatible indices always have a write block. + * + * @param readOnlyIndices the read-only compatible indices + * @param blocks the updated cluster state blocks + */ + private static void verifyReadOnlyIndices(@Nullable Set readOnlyIndices, ClusterBlocks blocks) { + if (readOnlyIndices != null) { + for (Index readOnlyIndex : readOnlyIndices) { + if (blocks.hasIndexBlockLevel(readOnlyIndex.getName(), ClusterBlockLevel.WRITE) == false) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "Can't remove the write block on read-only compatible index %s", readOnlyIndex) + ); + } + } + } + } + /** * Updates the cluster block only iff the setting exists in the given settings */ @@ -426,7 +468,8 @@ private static boolean maybeUpdateClusterBlock( ClusterBlock block, Setting setting, Settings openSettings, - Metadata.Builder metadataBuilder + Metadata.Builder metadataBuilder, + Function verifiedReadOnlyBeforeBlockChanges ) { boolean changed = false; if (setting.exists(openSettings)) { @@ -436,16 +479,32 @@ private static boolean maybeUpdateClusterBlock( if (blocks.hasIndexBlock(index, block) == false) { blocks.addIndexBlock(index, block); changed = true; + if (block.contains(ClusterBlockLevel.WRITE)) { + var isVerifiedReadOnly = verifiedReadOnlyBeforeBlockChanges.apply(index); + if (isVerifiedReadOnly) { + var indexMetadata = metadataBuilder.get(index); + metadataBuilder.put( + IndexMetadata.builder(indexMetadata) + .settings( + Settings.builder() + .put(indexMetadata.getSettings()) + .put(VERIFIED_READ_ONLY_SETTING.getKey(), true) + ) + ); + } + } } } else { if (blocks.hasIndexBlock(index, block)) { blocks.removeIndexBlock(index, block); changed = true; if (block.contains(ClusterBlockLevel.WRITE)) { - IndexMetadata indexMetadata = metadataBuilder.get(index); - Settings.Builder indexSettings = Settings.builder().put(indexMetadata.getSettings()); - indexSettings.remove(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey()); - metadataBuilder.put(IndexMetadata.builder(indexMetadata).settings(indexSettings)); + if (blocks.hasIndexBlockLevel(index, ClusterBlockLevel.WRITE) == false) { + var indexMetadata = metadataBuilder.get(index); + var indexSettings = Settings.builder().put(indexMetadata.getSettings()); + indexSettings.remove(VERIFIED_READ_ONLY_SETTING.getKey()); + metadataBuilder.put(IndexMetadata.builder(indexMetadata).settings(indexSettings)); + } } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 2647e21d34bc5..bedddd4f381f5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1929,7 +1929,7 @@ protected static Map getIndexSettings(String index, boolean incl } @SuppressWarnings("unchecked") - protected Map getIndexSettingsAsMap(String index) throws IOException { + protected static Map getIndexSettingsAsMap(String index) throws IOException { Map indexSettings = getIndexSettings(index); return (Map) ((Map) indexSettings.get(index)).get("settings"); } From 7400a149951e43525d6dc92e56cac7418b375bbe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Tue, 28 Jan 2025 17:01:18 +0100 Subject: [PATCH 139/383] [DOCS] Documents that deployment_id can be used as inference_id in certain cases. (#121055) --- docs/reference/query-dsl/sparse-vector-query.asciidoc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/reference/query-dsl/sparse-vector-query.asciidoc b/docs/reference/query-dsl/sparse-vector-query.asciidoc index d46d649079d70..2df9bfd04e77f 100644 --- a/docs/reference/query-dsl/sparse-vector-query.asciidoc +++ b/docs/reference/query-dsl/sparse-vector-query.asciidoc @@ -62,11 +62,14 @@ GET _search (Required, string) The name of the field that contains the token-weight pairs to be searched against. `inference_id`:: -(Optional, string) The <> to use to convert the query text into token-weight pairs. +(Optional, string) +The <> to use to convert the query text into token-weight pairs. It must be the same inference ID that was used to create the tokens from the input text. Only one of `inference_id` and `query_vector` is allowed. If `inference_id` is specified, `query` must also be specified. If all queried fields are of type <>, the inference ID associated with the `semantic_text` field will be inferred. +You can reference a `deployment_id` of a {ml} trained model deployment as an `inference_id`. +For example, if you download and deploy the ELSER model in the {ml-cap} trained models UI in {kib}, you can use the `deployment_id` of that deployment as the `inference_id`. `query`:: (Optional, string) The query text you want to use for search. From 08255da9ac784559bd29d1f3dd632113b1851e1e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Tue, 28 Jan 2025 17:10:08 +0100 Subject: [PATCH 140/383] [DOCS] Fixes max_chunk_size parameter name. (#121052) --- docs/reference/inference/elastic-infer-service.asciidoc | 2 +- docs/reference/inference/inference-shared.asciidoc | 2 +- .../reference/inference/service-alibabacloud-ai-search.asciidoc | 2 +- docs/reference/inference/service-amazon-bedrock.asciidoc | 2 +- docs/reference/inference/service-anthropic.asciidoc | 2 +- docs/reference/inference/service-azure-ai-studio.asciidoc | 2 +- docs/reference/inference/service-azure-openai.asciidoc | 2 +- docs/reference/inference/service-cohere.asciidoc | 2 +- docs/reference/inference/service-elasticsearch.asciidoc | 2 +- docs/reference/inference/service-elser.asciidoc | 2 +- docs/reference/inference/service-google-ai-studio.asciidoc | 2 +- docs/reference/inference/service-google-vertex-ai.asciidoc | 2 +- docs/reference/inference/service-hugging-face.asciidoc | 2 +- docs/reference/inference/service-jinaai.asciidoc | 2 +- docs/reference/inference/service-mistral.asciidoc | 2 +- docs/reference/inference/service-openai.asciidoc | 2 +- 16 files changed, 16 insertions(+), 16 deletions(-) diff --git a/docs/reference/inference/elastic-infer-service.asciidoc b/docs/reference/inference/elastic-infer-service.asciidoc index f78bfa967cceb..24ae7e20deec6 100644 --- a/docs/reference/inference/elastic-infer-service.asciidoc +++ b/docs/reference/inference/elastic-infer-service.asciidoc @@ -49,7 +49,7 @@ include::inference-shared.asciidoc[tag=chat-completion-docs] ==== {api-request-body-title} -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/inference-shared.asciidoc b/docs/reference/inference/inference-shared.asciidoc index b133c54082810..9f595bc7b0491 100644 --- a/docs/reference/inference/inference-shared.asciidoc +++ b/docs/reference/inference/inference-shared.asciidoc @@ -48,7 +48,7 @@ tag::chunking-settings-overlap[] Only for `word` chunking strategy. Specifies the number of overlapping words for chunks. Defaults to `100`. -This value cannot be higher than the half of `max_chunking_size`. +This value cannot be higher than the half of `max_chunk_size`. end::chunking-settings-overlap[] tag::chunking-settings-sentence-overlap[] diff --git a/docs/reference/inference/service-alibabacloud-ai-search.asciidoc b/docs/reference/inference/service-alibabacloud-ai-search.asciidoc index eea0e094dce5a..129a51bed415e 100644 --- a/docs/reference/inference/service-alibabacloud-ai-search.asciidoc +++ b/docs/reference/inference/service-alibabacloud-ai-search.asciidoc @@ -44,7 +44,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-amazon-bedrock.asciidoc b/docs/reference/inference/service-amazon-bedrock.asciidoc index d4ae3895b7c76..e8c3f0dc4c811 100644 --- a/docs/reference/inference/service-amazon-bedrock.asciidoc +++ b/docs/reference/inference/service-amazon-bedrock.asciidoc @@ -42,7 +42,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-anthropic.asciidoc b/docs/reference/inference/service-anthropic.asciidoc index 08d8ca43daea8..9eaf407044d7a 100644 --- a/docs/reference/inference/service-anthropic.asciidoc +++ b/docs/reference/inference/service-anthropic.asciidoc @@ -42,7 +42,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-azure-ai-studio.asciidoc b/docs/reference/inference/service-azure-ai-studio.asciidoc index b179a87de1594..fbc70ae01f69f 100644 --- a/docs/reference/inference/service-azure-ai-studio.asciidoc +++ b/docs/reference/inference/service-azure-ai-studio.asciidoc @@ -43,7 +43,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-azure-openai.asciidoc b/docs/reference/inference/service-azure-openai.asciidoc index eded44b7ab0b0..8aedac80fbdca 100644 --- a/docs/reference/inference/service-azure-openai.asciidoc +++ b/docs/reference/inference/service-azure-openai.asciidoc @@ -43,7 +43,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-cohere.asciidoc b/docs/reference/inference/service-cohere.asciidoc index e95f0810fd29d..289f03787580f 100644 --- a/docs/reference/inference/service-cohere.asciidoc +++ b/docs/reference/inference/service-cohere.asciidoc @@ -44,7 +44,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-elasticsearch.asciidoc b/docs/reference/inference/service-elasticsearch.asciidoc index 745b14904dd6d..2db26e4f6c405 100644 --- a/docs/reference/inference/service-elasticsearch.asciidoc +++ b/docs/reference/inference/service-elasticsearch.asciidoc @@ -49,7 +49,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-elser.asciidoc b/docs/reference/inference/service-elser.asciidoc index 6a509ec850903..417a9181d658b 100644 --- a/docs/reference/inference/service-elser.asciidoc +++ b/docs/reference/inference/service-elser.asciidoc @@ -55,7 +55,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-google-ai-studio.asciidoc b/docs/reference/inference/service-google-ai-studio.asciidoc index a6f7d914decfa..8ed49ad0ba7bd 100644 --- a/docs/reference/inference/service-google-ai-studio.asciidoc +++ b/docs/reference/inference/service-google-ai-studio.asciidoc @@ -43,7 +43,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-google-vertex-ai.asciidoc b/docs/reference/inference/service-google-vertex-ai.asciidoc index f9499de7e5602..7e5611d16fb07 100644 --- a/docs/reference/inference/service-google-vertex-ai.asciidoc +++ b/docs/reference/inference/service-google-vertex-ai.asciidoc @@ -43,7 +43,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-hugging-face.asciidoc b/docs/reference/inference/service-hugging-face.asciidoc index 40fb2002975dd..7f9db4e21f02a 100644 --- a/docs/reference/inference/service-hugging-face.asciidoc +++ b/docs/reference/inference/service-hugging-face.asciidoc @@ -42,7 +42,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-jinaai.asciidoc b/docs/reference/inference/service-jinaai.asciidoc index 1470c58315430..e000b3caf52a4 100644 --- a/docs/reference/inference/service-jinaai.asciidoc +++ b/docs/reference/inference/service-jinaai.asciidoc @@ -37,7 +37,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-mistral.asciidoc b/docs/reference/inference/service-mistral.asciidoc index 20e1133e8a83c..afabb2199984f 100644 --- a/docs/reference/inference/service-mistral.asciidoc +++ b/docs/reference/inference/service-mistral.asciidoc @@ -42,7 +42,7 @@ Available task types: (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] diff --git a/docs/reference/inference/service-openai.asciidoc b/docs/reference/inference/service-openai.asciidoc index 8d7c6c937333d..511632736a35b 100644 --- a/docs/reference/inference/service-openai.asciidoc +++ b/docs/reference/inference/service-openai.asciidoc @@ -51,7 +51,7 @@ include::inference-shared.asciidoc[tag=chat-completion-docs] (Optional, object) include::inference-shared.asciidoc[tag=chunking-settings] -`max_chunking_size`::: +`max_chunk_size`::: (Optional, integer) include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size] From 6718774eeef2760d611d980dc6801a617f257001 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Tue, 28 Jan 2025 11:17:36 -0500 Subject: [PATCH 141/383] [Deprecation] Add transform_ids to outdated index (#120821) When a transform is writing to an outdated destination index, the transform's id will show up in the index's deprecation warning under `index_settings._meta.transform_ids`. --- docs/changelog/120821.yaml | 5 + .../deprecation/IndexDeprecationChecker.java | 61 ++++++---- .../TransformDeprecationChecker.java | 50 +++----- .../TransportDeprecationInfoAction.java | 110 +++++++++++++----- .../IndexDeprecationCheckerTests.java | 102 +++++++++++++--- 5 files changed, 222 insertions(+), 106 deletions(-) create mode 100644 docs/changelog/120821.yaml diff --git a/docs/changelog/120821.yaml b/docs/changelog/120821.yaml new file mode 100644 index 0000000000000..403c186d9f102 --- /dev/null +++ b/docs/changelog/120821.yaml @@ -0,0 +1,5 @@ +pr: 120821 +summary: "[Deprecation] Add `transform_ids` to outdated index" +area: Transform +type: enhancement +issues: [] diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java index e941ebfc05e49..46e634e6d3899 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -39,21 +38,13 @@ public class IndexDeprecationChecker implements ResourceDeprecationChecker { public static final String NAME = "index_settings"; - private static final List> INDEX_SETTINGS_CHECKS = List.of( - IndexDeprecationChecker::oldIndicesCheck, - IndexDeprecationChecker::ignoredOldIndicesCheck, - IndexDeprecationChecker::translogRetentionSettingCheck, - IndexDeprecationChecker::checkIndexDataPath, - IndexDeprecationChecker::storeTypeSettingCheck, - IndexDeprecationChecker::frozenIndexSettingCheck, - IndexDeprecationChecker::deprecatedCamelCasePattern, - IndexDeprecationChecker::legacyRoutingSettingCheck - ); private final IndexNameExpressionResolver indexNameExpressionResolver; + private final Map> indexToTransformIds; - public IndexDeprecationChecker(IndexNameExpressionResolver indexNameExpressionResolver) { + public IndexDeprecationChecker(IndexNameExpressionResolver indexNameExpressionResolver, Map> indexToTransformIds) { this.indexNameExpressionResolver = indexNameExpressionResolver; + this.indexToTransformIds = indexToTransformIds; } @Override @@ -62,7 +53,7 @@ public Map> check(ClusterState clusterState, Depr String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(clusterState, request); for (String concreteIndex : concreteIndexNames) { IndexMetadata indexMetadata = clusterState.getMetadata().index(concreteIndex); - List singleIndexIssues = filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetadata, clusterState)); + List singleIndexIssues = filterChecks(indexSettingsChecks(), c -> c.apply(indexMetadata, clusterState)); if (singleIndexIssues.isEmpty() == false) { indexSettingsIssues.put(concreteIndex, singleIndexIssues); } @@ -73,12 +64,25 @@ public Map> check(ClusterState clusterState, Depr return indexSettingsIssues; } + private List> indexSettingsChecks() { + return List.of( + this::oldIndicesCheck, + this::ignoredOldIndicesCheck, + IndexDeprecationChecker::translogRetentionSettingCheck, + IndexDeprecationChecker::checkIndexDataPath, + IndexDeprecationChecker::storeTypeSettingCheck, + IndexDeprecationChecker::frozenIndexSettingCheck, + IndexDeprecationChecker::deprecatedCamelCasePattern, + IndexDeprecationChecker::legacyRoutingSettingCheck + ); + } + @Override public String getName() { return NAME; } - static DeprecationIssue oldIndicesCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private DeprecationIssue oldIndicesCheck(IndexMetadata indexMetadata, ClusterState clusterState) { // TODO: this check needs to be revised. It's trivially true right now. IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion(); // We intentionally exclude indices that are in data streams because they will be picked up by DataStreamDeprecationChecks @@ -89,13 +93,22 @@ static DeprecationIssue oldIndicesCheck(IndexMetadata indexMetadata, ClusterStat "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", "This index has version: " + currentCompatibilityVersion.toReleaseVersion(), false, - Collections.singletonMap("reindex_required", true) + meta(indexMetadata) ); } return null; } - static DeprecationIssue ignoredOldIndicesCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private Map meta(IndexMetadata indexMetadata) { + var transforms = indexToTransformIds.getOrDefault(indexMetadata.getIndex().getName(), List.of()); + if (transforms.isEmpty()) { + return Map.of("reindex_required", true); + } else { + return Map.of("reindex_required", true, "transform_ids", transforms); + } + } + + private DeprecationIssue ignoredOldIndicesCheck(IndexMetadata indexMetadata, ClusterState clusterState) { IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion(); // We intentionally exclude indices that are in data streams because they will be picked up by DataStreamDeprecationChecks if (DeprecatedIndexPredicate.reindexRequired(indexMetadata, true) && isNotDataStreamIndex(indexMetadata, clusterState)) { @@ -107,7 +120,7 @@ static DeprecationIssue ignoredOldIndicesCheck(IndexMetadata indexMetadata, Clus + currentCompatibilityVersion.toReleaseVersion() + " and will be supported as read-only in 9.0", false, - Collections.singletonMap("reindex_required", true) + meta(indexMetadata) ); } return null; @@ -117,7 +130,7 @@ private static boolean isNotDataStreamIndex(IndexMetadata indexMetadata, Cluster return clusterState.metadata().findDataStreams(indexMetadata.getIndex().getName()).isEmpty(); } - static DeprecationIssue translogRetentionSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private static DeprecationIssue translogRetentionSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { final boolean softDeletesEnabled = IndexSettings.INDEX_SOFT_DELETES_SETTING.get(indexMetadata.getSettings()); if (softDeletesEnabled) { if (IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.exists(indexMetadata.getSettings()) @@ -144,7 +157,7 @@ static DeprecationIssue translogRetentionSettingCheck(IndexMetadata indexMetadat return null; } - static DeprecationIssue checkIndexDataPath(IndexMetadata indexMetadata, ClusterState clusterState) { + private static DeprecationIssue checkIndexDataPath(IndexMetadata indexMetadata, ClusterState clusterState) { if (IndexMetadata.INDEX_DATA_PATH_SETTING.exists(indexMetadata.getSettings())) { final String message = String.format( Locale.ROOT, @@ -159,7 +172,7 @@ static DeprecationIssue checkIndexDataPath(IndexMetadata indexMetadata, ClusterS return null; } - static DeprecationIssue storeTypeSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private static DeprecationIssue storeTypeSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { final String storeType = IndexModule.INDEX_STORE_TYPE_SETTING.get(indexMetadata.getSettings()); if (IndexModule.Type.SIMPLEFS.match(storeType)) { return new DeprecationIssue( @@ -176,7 +189,7 @@ static DeprecationIssue storeTypeSettingCheck(IndexMetadata indexMetadata, Clust return null; } - static DeprecationIssue frozenIndexSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private static DeprecationIssue frozenIndexSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { Boolean isIndexFrozen = FrozenEngine.INDEX_FROZEN.get(indexMetadata.getSettings()); if (Boolean.TRUE.equals(isIndexFrozen)) { String indexName = indexMetadata.getIndex().getName(); @@ -194,7 +207,7 @@ static DeprecationIssue frozenIndexSettingCheck(IndexMetadata indexMetadata, Clu return null; } - static DeprecationIssue legacyRoutingSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { + private static DeprecationIssue legacyRoutingSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { List deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(indexMetadata.getSettings()); if (deprecatedSettings.isEmpty()) { return null; @@ -228,7 +241,7 @@ private static void fieldLevelMappingIssue(IndexMetadata indexMetadata, BiConsum * @return a list of issues found in fields */ @SuppressWarnings("unchecked") - static List findInPropertiesRecursively( + private static List findInPropertiesRecursively( String type, Map parentMap, Function, Boolean> predicate, @@ -282,7 +295,7 @@ static List findInPropertiesRecursively( return issues; } - static DeprecationIssue deprecatedCamelCasePattern(IndexMetadata indexMetadata, ClusterState clusterState) { + private static DeprecationIssue deprecatedCamelCasePattern(IndexMetadata indexMetadata, ClusterState clusterState) { List fields = new ArrayList<>(); fieldLevelMappingIssue( indexMetadata, diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java index 0b5eb7ada7655..57c4fae960854 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java @@ -8,19 +8,21 @@ package org.elasticsearch.xpack.deprecation; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; -import org.elasticsearch.xpack.core.transform.action.GetTransformAction; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import java.util.ArrayList; import java.util.List; -public class TransformDeprecationChecker implements DeprecationChecker { +class TransformDeprecationChecker implements DeprecationChecker { public static final String TRANSFORM_DEPRECATION_KEY = "transform_settings"; + private final List transformConfigs; + + TransformDeprecationChecker(List transformConfigs) { + this.transformConfigs = transformConfigs; + } @Override public boolean enabled(Settings settings) { @@ -30,43 +32,17 @@ public boolean enabled(Settings settings) { @Override public void check(Components components, ActionListener deprecationIssueListener) { - - PageParams startPage = new PageParams(0, PageParams.DEFAULT_SIZE); - List issues = new ArrayList<>(); - recursiveGetTransformsAndCollectDeprecations( - components, - issues, - startPage, - deprecationIssueListener.delegateFailureAndWrap((l, allIssues) -> l.onResponse(new CheckResult(getName(), allIssues))) - ); + ActionListener.completeWith(deprecationIssueListener, () -> { + List allIssues = new ArrayList<>(); + for (var config : transformConfigs) { + allIssues.addAll(config.checkForDeprecations(components.xContentRegistry())); + } + return new CheckResult(getName(), allIssues); + }); } @Override public String getName() { return TRANSFORM_DEPRECATION_KEY; } - - private static void recursiveGetTransformsAndCollectDeprecations( - Components components, - List issues, - PageParams page, - ActionListener> listener - ) { - final GetTransformAction.Request request = new GetTransformAction.Request(Metadata.ALL); - request.setPageParams(page); - request.setAllowNoResources(true); - - components.client() - .execute(GetTransformAction.INSTANCE, request, listener.delegateFailureAndWrap((delegate, getTransformResponse) -> { - for (TransformConfig config : getTransformResponse.getTransformConfigurations()) { - issues.addAll(config.checkForDeprecations(components.xContentRegistry())); - } - if (getTransformResponse.getTransformConfigurationCount() >= (page.getFrom() + page.getSize())) { - PageParams nextPage = new PageParams(page.getFrom() + page.getSize(), PageParams.DEFAULT_SIZE); - recursiveGetTransformsAndCollectDeprecations(components, issues, nextPage, delegate); - } else { - delegate.onResponse(issues); - } - })); - } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java index 5ff1acf2c0e24..886eddf82149e 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.injection.guice.Inject; @@ -28,19 +29,23 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction; +import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.elasticsearch.xpack.deprecation.DeprecationChecks.CLUSTER_SETTINGS_CHECKS; public class TransportDeprecationInfoAction extends TransportMasterNodeReadAction< DeprecationInfoAction.Request, DeprecationInfoAction.Response> { - private static final List PLUGIN_CHECKERS = List.of(new MlDeprecationChecker(), new TransformDeprecationChecker()); + private static final DeprecationChecker ML_CHECKER = new MlDeprecationChecker(); private static final Logger logger = LogManager.getLogger(TransportDeprecationInfoAction.class); private final NodeClient client; @@ -48,7 +53,6 @@ public class TransportDeprecationInfoAction extends TransportMasterNodeReadActio private final Settings settings; private final NamedXContentRegistry xContentRegistry; private volatile List skipTheseDeprecations; - private final List resourceDeprecationCheckers; @Inject public TransportDeprecationInfoAction( @@ -75,12 +79,6 @@ public TransportDeprecationInfoAction( this.indexNameExpressionResolver = indexNameExpressionResolver; this.settings = settings; this.xContentRegistry = xContentRegistry; - this.resourceDeprecationCheckers = List.of( - new IndexDeprecationChecker(indexNameExpressionResolver), - new DataStreamDeprecationChecker(indexNameExpressionResolver), - new TemplateDeprecationChecker(), - new IlmPolicyDeprecationChecker() - ); skipTheseDeprecations = DeprecationChecks.SKIP_DEPRECATIONS_SETTING.get(settings); // Safe to register this here because it happens synchronously before the cluster service is started: clusterService.getClusterSettings() @@ -110,7 +108,7 @@ protected final void masterOperation( ClientHelper.DEPRECATION_ORIGIN, NodesDeprecationCheckAction.INSTANCE, nodeDepReq, - listener.delegateFailureAndWrap((delegate, response) -> { + listener.delegateFailureAndWrap((l, response) -> { if (response.hasFailures()) { List failedNodeIds = response.failures() .stream() @@ -121,31 +119,37 @@ protected final void masterOperation( logger.debug("node {} failed to run deprecation checks: {}", failure.nodeId(), failure); } } - - DeprecationChecker.Components components = new DeprecationChecker.Components( - xContentRegistry, - settings, - new OriginSettingClient(client, ClientHelper.DEPRECATION_ORIGIN) - ); - pluginSettingIssues( - PLUGIN_CHECKERS, - components, - new ThreadedActionListener<>( - client.threadPool().generic(), - delegate.map( - deprecationIssues -> DeprecationInfoAction.Response.from( - state, - indexNameExpressionResolver, - request, - response, - CLUSTER_SETTINGS_CHECKS, - deprecationIssues, - skipTheseDeprecations, - resourceDeprecationCheckers + transformConfigs(l.delegateFailureAndWrap((ll, transformConfigs) -> { + DeprecationChecker.Components components = new DeprecationChecker.Components( + xContentRegistry, + settings, + new OriginSettingClient(client, ClientHelper.DEPRECATION_ORIGIN) + ); + pluginSettingIssues( + List.of(ML_CHECKER, new TransformDeprecationChecker(transformConfigs)), + components, + new ThreadedActionListener<>( + client.threadPool().generic(), + ll.map( + deprecationIssues -> DeprecationInfoAction.Response.from( + state, + indexNameExpressionResolver, + request, + response, + CLUSTER_SETTINGS_CHECKS, + deprecationIssues, + skipTheseDeprecations, + List.of( + new IndexDeprecationChecker(indexNameExpressionResolver, indexToTransformIds(transformConfigs)), + new DataStreamDeprecationChecker(indexNameExpressionResolver), + new TemplateDeprecationChecker(), + new IlmPolicyDeprecationChecker() + ) + ) ) ) - ) - ); + ); + })); }) ); } @@ -176,4 +180,46 @@ static void pluginSettingIssues( } } + private void transformConfigs(ActionListener> transformConfigsListener) { + transformConfigs(new PageParams(0, PageParams.DEFAULT_SIZE), transformConfigsListener.map(Stream::toList)); + } + + private void transformConfigs(PageParams currentPage, ActionListener> currentPageListener) { + var request = new GetTransformAction.Request(Metadata.ALL); + request.setPageParams(currentPage); + request.setAllowNoResources(true); + + client.execute( + GetTransformAction.INSTANCE, + request, + new ThreadedActionListener<>( + threadPool.generic(), + currentPageListener.delegateFailureAndWrap((delegate, getTransformConfigResponse) -> { + var currentPageOfConfigs = getTransformConfigResponse.getTransformConfigurations().stream(); + var currentPageSize = currentPage.getFrom() + currentPage.getSize(); + var totalTransformConfigCount = getTransformConfigResponse.getTransformConfigurationCount(); + if (totalTransformConfigCount >= currentPageSize) { + var nextPage = new PageParams(currentPageSize, PageParams.DEFAULT_SIZE); + transformConfigs( + nextPage, + delegate.map(nextPageOfConfigs -> Stream.concat(currentPageOfConfigs, nextPageOfConfigs)) + ); + } else { + delegate.onResponse(currentPageOfConfigs); + } + }) + ) + ); + } + + private Map> indexToTransformIds(List transformConfigs) { + return transformConfigs.stream() + .collect( + Collectors.groupingBy( + config -> config.getDestination().getIndex(), + Collectors.mapping(TransformConfig::getId, Collectors.toList()) + ) + ); + } + } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java index dae7970d4a2e7..e49a6046c5c64 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.cluster.metadata.DataStreamMetadata; import org.elasticsearch.cluster.metadata.DataStreamOptions; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.MetadataIndexStateService; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -38,12 +39,14 @@ public class IndexDeprecationCheckerTests extends ESTestCase { - private final IndexDeprecationChecker checker = new IndexDeprecationChecker(TestIndexNameExpressionResolver.newInstance()); + private static final IndexVersion OLD_VERSION = IndexVersion.fromId(7170099); + + private final IndexNameExpressionResolver indexNameExpressionResolver = TestIndexNameExpressionResolver.newInstance(); + private final IndexDeprecationChecker checker = new IndexDeprecationChecker(indexNameExpressionResolver, Map.of()); public void testOldIndicesCheck() { - IndexVersion createdWith = IndexVersion.fromId(7170099); IndexMetadata indexMetadata = IndexMetadata.builder("test") - .settings(settings(createdWith)) + .settings(settings(OLD_VERSION)) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -54,7 +57,7 @@ public void testOldIndicesCheck() { DeprecationIssue.Level.CRITICAL, "Old index with a compatibility version < 9.0", "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", - "This index has version: " + createdWith.toReleaseVersion(), + "This index has version: " + OLD_VERSION.toReleaseVersion(), false, singletonMap("reindex_required", true) ); @@ -66,10 +69,86 @@ public void testOldIndicesCheck() { assertEquals(singletonList(expected), issues); } + public void testOldTransformIndicesCheck() { + var checker = new IndexDeprecationChecker(indexNameExpressionResolver, Map.of("test", List.of("test-transform"))); + var indexMetadata = indexMetadata("test", OLD_VERSION); + var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + var expected = new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + "Old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", + "This index has version: " + OLD_VERSION.toReleaseVersion(), + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform")) + ); + var issuesByIndex = checker.check(clusterState, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)); + assertEquals(singletonList(expected), issuesByIndex.get("test")); + } + + public void testOldIndicesCheckWithMultipleTransforms() { + var checker = new IndexDeprecationChecker( + indexNameExpressionResolver, + Map.of("test", List.of("test-transform1", "test-transform2")) + ); + var indexMetadata = indexMetadata("test", OLD_VERSION); + var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + var expected = new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + "Old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", + "This index has version: " + OLD_VERSION.toReleaseVersion(), + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform1", "test-transform2")) + ); + var issuesByIndex = checker.check(clusterState, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)); + assertEquals(singletonList(expected), issuesByIndex.get("test")); + } + + public void testMultipleOldIndicesCheckWithTransforms() { + var checker = new IndexDeprecationChecker( + indexNameExpressionResolver, + Map.of("test1", List.of("test-transform1"), "test2", List.of("test-transform2")) + ); + var indexMetadata1 = indexMetadata("test1", OLD_VERSION); + var indexMetadata2 = indexMetadata("test2", OLD_VERSION); + var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata1, true).put(indexMetadata2, true)) + .build(); + var expected = Map.of( + "test1", + List.of( + new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + "Old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", + "This index has version: " + OLD_VERSION.toReleaseVersion(), + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform1")) + ) + ), + "test2", + List.of( + new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + "Old index with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", + "This index has version: " + OLD_VERSION.toReleaseVersion(), + false, + Map.of("reindex_required", true, "transform_ids", List.of("test-transform2")) + ) + ) + ); + var issuesByIndex = checker.check(clusterState, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS)); + assertEquals(expected, issuesByIndex); + } + + private IndexMetadata indexMetadata(String indexName, IndexVersion indexVersion) { + return IndexMetadata.builder(indexName).settings(settings(indexVersion)).numberOfShards(1).numberOfReplicas(0).build(); + } + public void testOldIndicesCheckDataStreamIndex() { - IndexVersion createdWith = IndexVersion.fromId(7170099); IndexMetadata indexMetadata = IndexMetadata.builder(".ds-test") - .settings(settings(createdWith).put("index.hidden", true)) + .settings(settings(OLD_VERSION).put("index.hidden", true)) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -112,8 +191,7 @@ public void testOldIndicesCheckDataStreamIndex() { } public void testOldIndicesCheckSnapshotIgnored() { - IndexVersion createdWith = IndexVersion.fromId(7170099); - Settings.Builder settings = settings(createdWith); + Settings.Builder settings = settings(OLD_VERSION); settings.put(INDEX_STORE_TYPE_SETTING.getKey(), SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE); IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) @@ -128,8 +206,7 @@ public void testOldIndicesCheckSnapshotIgnored() { } public void testOldIndicesCheckClosedIgnored() { - IndexVersion createdWith = IndexVersion.fromId(7170099); - Settings.Builder settings = settings(createdWith); + Settings.Builder settings = settings(OLD_VERSION); IndexMetadata indexMetadata = IndexMetadata.builder("test") .settings(settings) .numberOfShards(1) @@ -147,8 +224,7 @@ public void testOldIndicesCheckClosedIgnored() { } public void testOldIndicesIgnoredWarningCheck() { - IndexVersion createdWith = IndexVersion.fromId(7170099); - Settings.Builder settings = settings(createdWith).put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true); + Settings.Builder settings = settings(OLD_VERSION).put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true); IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder().put(indexMetadata, true)) @@ -157,7 +233,7 @@ public void testOldIndicesIgnoredWarningCheck() { DeprecationIssue.Level.WARNING, "Old index with a compatibility version < 9.0 Has Been Ignored", "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", - "This read-only index has version: " + createdWith.toReleaseVersion() + " and will be supported as read-only in 9.0", + "This read-only index has version: " + OLD_VERSION.toReleaseVersion() + " and will be supported as read-only in 9.0", false, singletonMap("reindex_required", true) ); From b2cc9d9b8f00ee621f93ddca07ea9c671aab1578 Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Tue, 28 Jan 2025 09:26:51 -0700 Subject: [PATCH 142/383] Disable the test in release for now (#121051) --- muted-tests.yml | 3 --- .../xpack/esql/action/CrossClusterAsyncQueryIT.java | 2 ++ 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 0684e17e16adb..381adda7da8ed 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -224,9 +224,6 @@ tests: - class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncEnrichStopIT method: testEnrichAfterStop issue: https://github.com/elastic/elasticsearch/issues/120757 -- class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryIT - method: testStopQuery - issue: https://github.com/elastic/elasticsearch/issues/120767 - class: org.elasticsearch.search.fieldcaps.FieldCapabilitiesIT issue: https://github.com/elastic/elasticsearch/issues/120772 - class: org.elasticsearch.xpack.test.rest.XPackRestIT diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java index 42a96cc7b7743..6be1518f65e63 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.action; +import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionFuture; @@ -265,6 +266,7 @@ public void testAsyncQueriesWithLimit0() throws IOException { } public void testStopQuery() throws Exception { + assumeTrue("Pragme does not work in release builds", Build.current().isSnapshot()); Map testClusterInfo = setupClusters(3); int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards"); From b925b0cbcca07b36e9b5f4f924e676cff75a40bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Tue, 28 Jan 2025 17:50:37 +0100 Subject: [PATCH 143/383] [DOCS] Adds anomaly detection info to migration guide (#121015) Co-authored-by: Valeriy Khakhutskyy <1292899+valeriy42@users.noreply.github.com> --- docs/reference/migration/migrate_9_0.asciidoc | 318 ++++++++++++++++++ 1 file changed, 318 insertions(+) diff --git a/docs/reference/migration/migrate_9_0.asciidoc b/docs/reference/migration/migrate_9_0.asciidoc index 8f0b16e31b56e..71516fdd540d6 100644 --- a/docs/reference/migration/migrate_9_0.asciidoc +++ b/docs/reference/migration/migrate_9_0.asciidoc @@ -73,6 +73,7 @@ Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1). For det The change is small and should generally provide better analysis results. Existing indices for full-text use cases should be reindexed though. ==== + [discrete] [[breaking_90_cluster_and_node_setting_changes]] ==== Cluster and node setting changes @@ -318,3 +319,320 @@ The `elser` service of the inference API will be removed in an upcoming release. In the current version there is no impact. In a future version, users of the `elser` service will no longer be able to use it, and will be required to use the `elasticsearch` service to access elser through the inference API. ==== +[discrete] +[[breaking_90_anomaly_detection_results]] +=== Anomaly detection results migration + +The {anomaly-detect} result indices `.ml-anomalies-*` created in {es} 7.x must be either reindexed, marked read-only, or deleted before upgrading to 9.x. + +**Reindexing**: While anomaly detection results are being reindexed, jobs continue to run and process new data. +However, you cannot completely delete an {anomaly-job} that stores results in this index until the reindexing is complete. + +**Marking indices as read-only**: This is useful for large indexes that contain the results of only one or a few {anomaly-jobs}. +If you delete these jobs later, you will not be able to create a new job with the same name. + +**Deleting**: Delete jobs that are no longer needed in the {ml-app} in {kib}. +The result index is deleted when all jobs that store results in it have been deleted. + +[[which_indices_require_attention]] +.Which indices require attention? +[%collapsible] +==== + +To identify indices that require action, use the <>: + +[source,console] +------------------------------------------------------------ +GET /.ml-anomalies-*/_migration/deprecations +------------------------------------------------------------ +// TEST[skip:TBD] + +The response contains the list of critical deprecation warnings in the `index_settings` section: + +[source,console-result] +------------------------------------------------------------ +"index_settings": { + ".ml-anomalies-shared": [ + { + "level": "critical", + "message": "Index created before 8.0", + "url": "https://ela.st/es-deprecation-8-reindex", + "details": "This index was created with version 7.8.23 and is not compatible with 9.0. Reindex or remove the index before upgrading.", + "resolve_during_rolling_upgrade": false + } + ] + } +------------------------------------------------------------ +// NOTCONSOLE + + +==== + +[[reindex_anomaly_result_index]] +.Reindexing anomaly result indices +[%collapsible] +==== +For an index with less than 10GB that contains results from multiple jobs that are still required, we recommend reindexing into a new format using UI. +You can use the <> to obtain the size of an index: + +[source,console] +------------------------------------------------------------ +GET _cat/indices/.ml-anomalies-custom-example?v&h=index,store.size +------------------------------------------------------------ +// TEST[skip:TBD] + +The reindexing can be initiated in the Kibana Upgrade Assistant. + +If an index size is greater than 10 GB it is recommended to use the Reindex API. +Reindexing consists of the following steps: + +. Set the original index to read-only. ++ +-- +[source,console] +------------------------------------------------------------ +PUT .ml-anomalies-custom-example/_block/read_only +------------------------------------------------------------ +// TEST[skip:TBD] +-- + +. Create a new index from the legacy index. ++ +-- +[source,console] +------------------------------------------------------------ +POST _create_from/.ml-anomalies-custom-example/.reindexed-v9-ml-anomalies-custom-example +------------------------------------------------------------ +// TEST[skip:TBD] +-- + +. Reindex documents. +To accelerate the reindexing process, it is recommended that the number of replicas be set to `0` before the reindexing and then set back to the original number once it is completed. +.. Get the number of replicas. ++ +-- +[source,console] +------------------------------------------------------------ +GET /.reindexed-v9-ml-anomalies-custom-example/_settings +------------------------------------------------------------ +// TEST[skip:TBD] +Note the number of replicas in the response. For example: +[source,console-result] +------------------------------------------------------------ +{ + ".reindexed-v9-ml-anomalies-custom-example": { + "settings": { + "index": { + "number_of_replicas": "1", + "number_of_shards": "1" + } + } + } +} +------------------------------------------------------------ +// NOTCONSOLE +-- +.. Set the number of replicas to `0`. ++ +-- +[source,console] +------------------------------------------------------------ +PUT /.reindexed-v9-ml-anomalies-custom-example/_settings +{ + "index": { + "number_of_replicas": 0 + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +-- +.. Start the reindexing process in asynchronous mode. ++ +-- +[source,console] +------------------------------------------------------------ +POST _reindex?wait_for_completion=false +{ + "source": { + "index": ".ml-anomalies-custom-example" + }, + "dest": { + "index": ".reindexed-v9-ml-anomalies-custom-example" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +The response will contain a task_id. You can check when the task is completed using the following command: +[source,console] +------------------------------------------------------------ +GET _tasks/ +------------------------------------------------------------ +// TEST[skip:TBD] +-- +.. Set the number of replicas to the original number when the reindexing is finished. ++ +-- +[source,console] +------------------------------------------------------------ +PUT /.reindexed-v9-ml-anomalies-custom-example/_settings +{ + "index": { + "number_of_replicas": "" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +-- + +. Get the aliases the original index is pointing to. ++ +-- +[source,console] +------------------------------------------------------------ +GET .ml-anomalies-custom-example/_alias +------------------------------------------------------------ +// TEST[skip:TBD] + +The response may contain multiple aliases if the results of multiple jobs are stored in the same index. + +[source,console-result] +------------------------------------------------------------ +{ + ".ml-anomalies-custom-example": { + "aliases": { + ".ml-anomalies-example1": { + "filter": { + "term": { + "job_id": { + "value": "example1" + } + } + }, + "is_hidden": true + }, + ".ml-anomalies-example2": { + "filter": { + "term": { + "job_id": { + "value": "example2" + } + } + }, + "is_hidden": true + } + } + } +} +------------------------------------------------------------ +// NOTCONSOLE +-- + +. Now you can reassign the aliases to the new index and delete the original index in one step. +Note that when adding the new index to the aliases, you must use the same filter and is_hidden parameters as for the original index. ++ +-- +[source,console] +------------------------------------------------------------ +POST _aliases +{ + "actions": [ + { + "add": { + "index": ".reindexed-v9-ml-anomalies-custom-example", + "alias": ".ml-anomalies-example1", + "filter": { + "term": { + "job_id": { + "value": "example1" + } + } + }, + "is_hidden": true + } + }, + { + "add": { + "index": ".reindexed-v9-ml-anomalies-custom-example", + "alias": ".ml-anomalies-example2", + "filter": { + "term": { + "job_id": { + "value": "example2" + } + } + }, + "is_hidden": true + } + }, + { + "remove": { + "index": ".ml-anomalies-custom-example", + "aliases": ".ml-anomalies-*" + } + }, + { + "remove_index": { + "index": ".ml-anomalies-custom-example" + } + }, + { + "add": { + "index": ".reindexed-v9-ml-anomalies-custom-example", + "alias": ".ml-anomalies-custom-example", + "is_hidden": true + } + } + ] +} +------------------------------------------------------------ +// TEST[skip:TBD] +-- +==== + +[[mark_anomaly_result_index_read_only]] +.Marking anomaly result indices as read-only +[%collapsible] +==== +Legacy indexes created in {es} 7.x can be made read-only and supported in {es} 9.x. +Making an index with a large amount of historical results read-only allows for a quick migration to the next major release, since you don't have to wait for the data to be reindexed into the new format. +However, it has the limitation that even after deleting an {anomaly-job}, the historical results associated with this job are not completely deleted. +Therefore, the system will prevent you from creating a new job with the same name. + +To set the index as read-only, add the `write` block to the index: + +[source,console] +------------------------------------------------------------ +PUT .ml-anomalies-custom-example/_block/write +------------------------------------------------------------ +// TEST[skip:TBD] + +Indices created in {es} 7.x that have a `write` block will not raise a critical deprecation warning. +==== + +[[delete_anomaly_result_index]] +.Deleting anomaly result indices +[%collapsible] +==== +If an index contains results of the jobs that are no longer required. +To list all jobs that stored results in an index, use the terms aggregation: + +[source,console] +------------------------------------------------------------ +GET .ml-anomalies-custom-example/_search +{ + "size": 0, + "aggs": { + "job_ids": { + "terms": { + "field": "job_id", + "size": 100 + } + } + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + +The jobs can be deleted in the UI. +After the last job is deleted, the index will be deleted as well. +==== From 1a05f41a7103a0f203261381f04ead29ce9a4cd7 Mon Sep 17 00:00:00 2001 From: George Wallace Date: Tue, 28 Jan 2025 09:52:52 -0700 Subject: [PATCH 144/383] Adjusted alias doc for clarity (#120437) (#121064) Co-authored-by: Kofi B Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> --- docs/reference/alias.asciidoc | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/docs/reference/alias.asciidoc b/docs/reference/alias.asciidoc index f676644c4ec48..3f8553c3b96d9 100644 --- a/docs/reference/alias.asciidoc +++ b/docs/reference/alias.asciidoc @@ -2,12 +2,14 @@ [[aliases]] = Aliases -An alias is a secondary name for a group of data streams or indices. Most {es} +An alias points to one or more indices or data streams. Most {es} APIs accept an alias in place of a data stream or index name. -You can change the data streams or indices of an alias at any time. If you use -aliases in your application's {es} requests, you can reindex data with no -downtime or changes to your app's code. +Aliases enable you to: + +* Query multiple indices/data streams together with a single name +* Change which indices/data streams your application uses in real time +* <> data without downtime [discrete] [[alias-types]] From e48a2051e81109368170d31b742dadc14ee2642a Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 28 Jan 2025 18:07:47 +0100 Subject: [PATCH 145/383] ESQL: Make all LOOKUP JOIN caps depend on the V12 one (#120894) --- .../org/elasticsearch/xpack/esql/action/EsqlCapabilities.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index e8c5edc1c8b58..548fb30a51355 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -699,7 +699,7 @@ public enum Cap { /** * LOOKUP JOIN with TEXT fields on the right (right side of the join) (#119473) */ - LOOKUP_JOIN_TEXT(Build.current().isSnapshot()), + LOOKUP_JOIN_TEXT(JOIN_LOOKUP_V12.isEnabled()), /** * LOOKUP JOIN without MV matching (https://github.com/elastic/elasticsearch/issues/118780) From 375814d007058f9e57563a22642dc97c2065e28e Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Tue, 28 Jan 2025 19:33:12 +0200 Subject: [PATCH 146/383] Adding linear retriever to support weighted sums of sub-retrievers (#120222) --- docs/changelog/120222.yaml | 5 + docs/reference/rest-api/common-parms.asciidoc | 47 +- docs/reference/search/retriever.asciidoc | 29 +- docs/reference/search/rrf.asciidoc | 12 +- .../retrievers-examples.asciidoc | 260 +++- .../retrievers-overview.asciidoc | 3 + .../org/elasticsearch/TransportVersions.java | 1 + .../index/query/RankDocsQueryBuilder.java | 4 +- .../elasticsearch/plugins/SearchPlugin.java | 3 +- .../retriever/CompoundRetrieverBuilder.java | 14 +- .../retriever/RankDocsRetrieverBuilder.java | 8 +- .../retriever/RescorerRetrieverBuilder.java | 1 + .../rules/80_query_rules_retriever.yml | 8 +- .../xpack/rank/linear/LinearRetrieverIT.java | 838 +++++++++++++ .../rank-rrf/src/main/java/module-info.java | 6 +- .../RRFFeatures.java => RankRRFFeatures.java} | 14 +- .../rank/linear/IdentityScoreNormalizer.java | 27 + .../xpack/rank/linear/LinearRankDoc.java | 143 +++ .../rank/linear/LinearRetrieverBuilder.java | 208 ++++ .../rank/linear/LinearRetrieverComponent.java | 85 ++ .../rank/linear/MinMaxScoreNormalizer.java | 65 + .../xpack/rank/linear/ScoreNormalizer.java | 31 + .../xpack/rank/rrf/RRFRankPlugin.java | 16 +- .../xpack/rank/rrf/RRFRetrieverBuilder.java | 1 + ...lasticsearch.features.FeatureSpecification | 2 +- .../xpack/rank/linear/LinearRankDocTests.java | 97 ++ .../LinearRetrieverBuilderParsingTests.java | 101 ++ .../rrf/LinearRankClientYamlTestSuiteIT.java | 45 + .../test/license/100_license.yml | 40 + .../test/linear/10_linear_retriever.yml | 1065 +++++++++++++++++ 30 files changed, 3139 insertions(+), 40 deletions(-) create mode 100644 docs/changelog/120222.yaml create mode 100644 x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverIT.java rename x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/{rrf/RRFFeatures.java => RankRRFFeatures.java} (65%) create mode 100644 x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/IdentityScoreNormalizer.java create mode 100644 x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRankDoc.java create mode 100644 x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverBuilder.java create mode 100644 x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverComponent.java create mode 100644 x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/MinMaxScoreNormalizer.java create mode 100644 x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/ScoreNormalizer.java create mode 100644 x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/LinearRankDocTests.java create mode 100644 x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverBuilderParsingTests.java create mode 100644 x-pack/plugin/rank-rrf/src/yamlRestTest/java/org/elasticsearch/xpack/rank/rrf/LinearRankClientYamlTestSuiteIT.java create mode 100644 x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/linear/10_linear_retriever.yml diff --git a/docs/changelog/120222.yaml b/docs/changelog/120222.yaml new file mode 100644 index 0000000000000..c9ded878ac031 --- /dev/null +++ b/docs/changelog/120222.yaml @@ -0,0 +1,5 @@ +pr: 120222 +summary: Adding linear retriever to support weighted sums of sub-retrievers +area: "Search" +type: enhancement +issues: [] diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 5db1ae10ae902..37c5528812900 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -1338,7 +1338,7 @@ that lower ranked documents have more influence. This value must be greater than equal to `1`. Defaults to `60`. end::rrf-rank-constant[] -tag::rrf-rank-window-size[] +tag::compound-retriever-rank-window-size[] `rank_window_size`:: (Optional, integer) + @@ -1347,15 +1347,54 @@ query. A higher value will improve result relevance at the cost of performance. ranked result set is pruned down to the search request's <>. `rank_window_size` must be greater than or equal to `size` and greater than or equal to `1`. Defaults to the `size` parameter. -end::rrf-rank-window-size[] +end::compound-retriever-rank-window-size[] -tag::rrf-filter[] +tag::compound-retriever-filter[] `filter`:: (Optional, <>) + Applies the specified <> to all of the specified sub-retrievers, according to each retriever's specifications. -end::rrf-filter[] +end::compound-retriever-filter[] + +tag::linear-retriever-components[] +`retrievers`:: +(Required, array of objects) ++ +A list of the sub-retrievers' configuration, that we will take into account and whose result sets +we will merge through a weighted sum. Each configuration can have a different weight and normalization depending +on the specified retriever. + +Each entry specifies the following parameters: + +* `retriever`:: +(Required, a <> object) ++ +Specifies the retriever for which we will compute the top documents for. The retriever will produce `rank_window_size` +results, which will later be merged based on the specified `weight` and `normalizer`. + +* `weight`:: +(Optional, float) ++ +The weight that each score of this retriever's top docs will be multiplied with. Must be greater or equal to 0. Defaults to 1.0. + +* `normalizer`:: +(Optional, String) ++ +Specifies how we will normalize the retriever's scores, before applying the specified `weight`. +Available values are: `minmax`, and `none`. Defaults to `none`. + +** `none` +** `minmax` : +A `MinMaxScoreNormalizer` that normalizes scores based on the following formula ++ +``` +score = (score - min) / (max - min) +``` + +See also <> using a linear retriever on how to +independently configure and apply normalizers to retrievers. +end::linear-retriever-components[] tag::knn-rescore-vector[] diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index 4cccf4d204d99..fe959c4e8cbee 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -28,6 +28,9 @@ A <> that replaces the functionality of a traditi `knn`:: A <> that replaces the functionality of a <>. +`linear`:: +A <> that linearly combines the scores of other retrievers for the top documents. + `rescorer`:: A <> that replaces the functionality of the <>. @@ -45,6 +48,8 @@ A <> that applies contextual <> to pin o A standard retriever returns top documents from a traditional <>. +[discrete] +[[standard-retriever-parameters]] ===== Parameters: `query`:: @@ -195,6 +200,8 @@ Documents matching these conditions will have increased relevancy scores. A kNN retriever returns top documents from a <>. +[discrete] +[[knn-retriever-parameters]] ===== Parameters `field`:: @@ -265,21 +272,37 @@ GET /restaurants/_search This value must be fewer than or equal to `num_candidates`. <5> The size of the initial candidate set from which the final `k` nearest neighbors are selected. +[[linear-retriever]] +==== Linear Retriever +A retriever that normalizes and linearly combines the scores of other retrievers. + +[discrete] +[[linear-retriever-parameters]] +===== Parameters + +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=linear-retriever-components] + +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=compound-retriever-rank-window-size] + +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=compound-retriever-filter] + [[rrf-retriever]] ==== RRF Retriever An <> retriever returns top documents based on the RRF formula, equally weighting two or more child retrievers. Reciprocal rank fusion (RRF) is a method for combining multiple result sets with different relevance indicators into a single result set. +[discrete] +[[rrf-retriever-parameters]] ===== Parameters include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-retrievers] include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-rank-constant] -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-rank-window-size] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=compound-retriever-rank-window-size] -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-filter] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=compound-retriever-filter] [discrete] [[rrf-retriever-example-hybrid]] @@ -540,6 +563,8 @@ score = ln(score), if score < 0 ---- ==== +[discrete] +[[text-similarity-reranker-retriever-parameters]] ===== Parameters `retriever`:: diff --git a/docs/reference/search/rrf.asciidoc b/docs/reference/search/rrf.asciidoc index 842bd7049e3bf..59976cec9c0aa 100644 --- a/docs/reference/search/rrf.asciidoc +++ b/docs/reference/search/rrf.asciidoc @@ -45,7 +45,7 @@ include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-retrievers] include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-rank-constant] -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-rank-window-size] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=compound-retriever-rank-window-size] An example request using RRF: @@ -791,11 +791,11 @@ A more specific example of highlighting in RRF can also be found in the <> functionality, allowing you to retrieve -related nested or parent/child documents alongside your main search results. Inner hits can be -specified as part of any nested sub-retriever and will be propagated to the top-level parent -retriever. Note that the inner hit computation will take place only at end of `rrf` retriever's -evaluation on the top matching documents, and not as part of the query execution of the nested +The `rrf` retriever supports <> functionality, allowing you to retrieve +related nested or parent/child documents alongside your main search results. Inner hits can be +specified as part of any nested sub-retriever and will be propagated to the top-level parent +retriever. Note that the inner hit computation will take place only at end of `rrf` retriever's +evaluation on the top matching documents, and not as part of the query execution of the nested sub-retrievers. [IMPORTANT] diff --git a/docs/reference/search/search-your-data/retrievers-examples.asciidoc b/docs/reference/search/search-your-data/retrievers-examples.asciidoc index c0be7432aa179..bc5f891a759b6 100644 --- a/docs/reference/search/search-your-data/retrievers-examples.asciidoc +++ b/docs/reference/search/search-your-data/retrievers-examples.asciidoc @@ -36,6 +36,9 @@ PUT retrievers_example }, "topic": { "type": "keyword" + }, + "timestamp": { + "type": "date" } } } @@ -46,7 +49,8 @@ POST /retrievers_example/_doc/1 "vector": [0.23, 0.67, 0.89], "text": "Large language models are revolutionizing information retrieval by boosting search precision, deepening contextual understanding, and reshaping user experiences in data-rich environments.", "year": 2024, - "topic": ["llm", "ai", "information_retrieval"] + "topic": ["llm", "ai", "information_retrieval"], + "timestamp": "2021-01-01T12:10:30" } POST /retrievers_example/_doc/2 @@ -54,7 +58,8 @@ POST /retrievers_example/_doc/2 "vector": [0.12, 0.56, 0.78], "text": "Artificial intelligence is transforming medicine, from advancing diagnostics and tailoring treatment plans to empowering predictive patient care for improved health outcomes.", "year": 2023, - "topic": ["ai", "medicine"] + "topic": ["ai", "medicine"], + "timestamp": "2022-01-01T12:10:30" } POST /retrievers_example/_doc/3 @@ -62,7 +67,8 @@ POST /retrievers_example/_doc/3 "vector": [0.45, 0.32, 0.91], "text": "AI is redefining security by enabling advanced threat detection, proactive risk analysis, and dynamic defenses against increasingly sophisticated cyber threats.", "year": 2024, - "topic": ["ai", "security"] + "topic": ["ai", "security"], + "timestamp": "2023-01-01T12:10:30" } POST /retrievers_example/_doc/4 @@ -70,7 +76,8 @@ POST /retrievers_example/_doc/4 "vector": [0.34, 0.21, 0.98], "text": "Elastic introduces Elastic AI Assistant, the open, generative AI sidekick powered by ESRE to democratize cybersecurity and enable users of every skill level.", "year": 2023, - "topic": ["ai", "elastic", "assistant"] + "topic": ["ai", "elastic", "assistant"], + "timestamp": "2024-01-01T12:10:30" } POST /retrievers_example/_doc/5 @@ -78,7 +85,8 @@ POST /retrievers_example/_doc/5 "vector": [0.11, 0.65, 0.47], "text": "Learn how to spin up a deployment of our hosted Elasticsearch Service and use Elastic Observability to gain deeper insight into the behavior of your applications and systems.", "year": 2024, - "topic": ["documentation", "observability", "elastic"] + "topic": ["documentation", "observability", "elastic"], + "timestamp": "2025-01-01T12:10:30" } POST /retrievers_example/_refresh @@ -185,6 +193,248 @@ This returns the following response based on the final rrf score for each result // TESTRESPONSE[s/"took": 42/"took": $body.took/] ============== +[discrete] +[[retrievers-examples-linear-retriever]] +==== Example: Hybrid search with linear retriever + +A different, and more intuitive, way to provide hybrid search, is to linearly combine the top documents of different +retrievers using a weighted sum of the original scores. Since, as above, the scores could lie in different ranges, +we can also specify a `normalizer` that would ensure that all scores for the top ranked documents of a retriever +lie in a specific range. + +To implement this, we define a `linear` retriever, and along with a set of retrievers that will generate the heterogeneous +results sets that we will combine. We will solve a problem similar to the above, by merging the results of a `standard` and a `knn` +retriever. As the `standard` retriever's scores are based on BM25 and are not strictly bounded, we will also define a +`minmax` normalizer to ensure that the scores lie in the [0, 1] range. We will apply the same normalizer to `knn` as well +to ensure that we capture the importance of each document within the result set. + +So, let's now specify the `linear` retriever whose final score is computed as follows: + +[source, text] +---- +score = weight(standard) * score(standard) + weight(knn) * score(knn) +score = 2 * score(standard) + 1.5 * score(knn) +---- +// NOTCONSOLE + +[source,console] +---- +GET /retrievers_example/_search +{ + "retriever": { + "linear": { + "retrievers": [ + { + "retriever": { + "standard": { + "query": { + "query_string": { + "query": "(information retrieval) OR (artificial intelligence)", + "default_field": "text" + } + } + } + }, + "weight": 2, + "normalizer": "minmax" + }, + { + "retriever": { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + }, + "weight": 1.5, + "normalizer": "minmax" + } + ], + "rank_window_size": 10 + } + }, + "_source": false +} +---- +// TEST[continued] + +This returns the following response based on the normalized weighted score for each result. + +.Example response +[%collapsible] +============== +[source,console-result] +---- +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": -1, + "hits": [ + { + "_index": "retrievers_example", + "_id": "2", + "_score": -1 + }, + { + "_index": "retrievers_example", + "_id": "1", + "_score": -2 + }, + { + "_index": "retrievers_example", + "_id": "3", + "_score": -3 + } + ] + } +} +---- +// TESTRESPONSE[s/"took": 42/"took": $body.took/] +// TESTRESPONSE[s/"max_score": -1/"max_score": $body.hits.max_score/] +// TESTRESPONSE[s/"_score": -1/"_score": $body.hits.hits.0._score/] +// TESTRESPONSE[s/"_score": -2/"_score": $body.hits.hits.1._score/] +// TESTRESPONSE[s/"_score": -3/"_score": $body.hits.hits.2._score/] +============== + +By normalizing scores and leveraging `function_score` queries, we can also implement more complex ranking strategies, +such as sorting results based on their timestamps, assign the timestamp as a score, and then normalizing this score to +[0, 1]. +Then, we can easily combine the above with a `knn` retriever as follows: + +[source,console] +---- +GET /retrievers_example/_search +{ + "retriever": { + "linear": { + "retrievers": [ + { + "retriever": { + "standard": { + "query": { + "function_score": { + "query": { + "term": { + "topic": "ai" + } + }, + "functions": [ + { + "script_score": { + "script": { + "source": "doc['timestamp'].value.millis" + } + } + } + ], + "boost_mode": "replace" + } + }, + "sort": { + "timestamp": { + "order": "asc" + } + } + } + }, + "weight": 2, + "normalizer": "minmax" + }, + { + "retriever": { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + }, + "weight": 1.5 + } + ], + "rank_window_size": 10 + } + }, + "_source": false +} +---- +// TEST[continued] + +Which would return the following results: + +.Example response +[%collapsible] +============== +[source,console-result] +---- +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 4, + "relation": "eq" + }, + "max_score": -1, + "hits": [ + { + "_index": "retrievers_example", + "_id": "3", + "_score": -1 + }, + { + "_index": "retrievers_example", + "_id": "2", + "_score": -2 + }, + { + "_index": "retrievers_example", + "_id": "4", + "_score": -3 + }, + { + "_index": "retrievers_example", + "_id": "1", + "_score": -4 + } + ] + } +} +---- +// TESTRESPONSE[s/"took": 42/"took": $body.took/] +// TESTRESPONSE[s/"max_score": -1/"max_score": $body.hits.max_score/] +// TESTRESPONSE[s/"_score": -1/"_score": $body.hits.hits.0._score/] +// TESTRESPONSE[s/"_score": -2/"_score": $body.hits.hits.1._score/] +// TESTRESPONSE[s/"_score": -3/"_score": $body.hits.hits.2._score/] +// TESTRESPONSE[s/"_score": -4/"_score": $body.hits.hits.3._score/] +============== + [discrete] [[retrievers-examples-collapsing-retriever-results]] ==== Example: Grouping results by year with `collapse` diff --git a/docs/reference/search/search-your-data/retrievers-overview.asciidoc b/docs/reference/search/search-your-data/retrievers-overview.asciidoc index 1771b5bb0d849..1a94ae18a5c20 100644 --- a/docs/reference/search/search-your-data/retrievers-overview.asciidoc +++ b/docs/reference/search/search-your-data/retrievers-overview.asciidoc @@ -23,6 +23,9 @@ This ensures backward compatibility as existing `_search` requests remain suppor That way you can transition to the new abstraction at your own pace without mixing syntaxes. * <>. Returns top documents from a <>, in the context of a retriever framework. +* <>. +Combines the top results from multiple sub-retrievers using a weighted sum of their scores. Allows to specify different +weights for each retriever, as well as independently normalize the scores from each result set. * <>. Combines and ranks multiple first-stage retrievers using the reciprocal rank fusion (RRF) algorithm. Allows you to combine multiple result sets with different relevance indicators into a single result set. diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 05c2071ad8d5f..14078fad9e20d 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -168,6 +168,7 @@ static TransportVersion def(int id) { public static final TransportVersion ILM_ADD_SEARCHABLE_SNAPSHOT_ADD_REPLICATE_FOR = def(8_834_00_0); public static final TransportVersion INGEST_REQUEST_INCLUDE_SOURCE_ON_ERROR = def(8_835_00_0); public static final TransportVersion RESOURCE_DEPRECATION_CHECKS = def(8_836_00_0); + public static final TransportVersion LINEAR_RETRIEVER_SUPPORT = def(8_837_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/query/RankDocsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RankDocsQueryBuilder.java index 889fa40b79aa1..524310c547597 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RankDocsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RankDocsQueryBuilder.java @@ -70,7 +70,9 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws changed |= newQueryBuilders[i] != queryBuilders[i]; } if (changed) { - return new RankDocsQueryBuilder(rankDocs, newQueryBuilders, onlyRankDocs); + RankDocsQueryBuilder clone = new RankDocsQueryBuilder(rankDocs, newQueryBuilders, onlyRankDocs); + clone.queryName(queryName()); + return clone; } } return super.doRewrite(queryRewriteContext); diff --git a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java index f5670ebd8a543..bb9f7ad4b7bf1 100644 --- a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java @@ -290,8 +290,7 @@ public RetrieverSpec(ParseField name, RetrieverParser parser) { /** * Specification of custom {@link RetrieverBuilder}. * - * @param name the name by which this retriever might be parsed or deserialized. Make sure that the retriever builder returns - * this name for {@link NamedWriteable#getWriteableName()}. + * @param name the name by which this retriever might be parsed or deserialized. * @param parser the parser the reads the retriever builder from xcontent */ public RetrieverSpec(String name, RetrieverParser parser) { diff --git a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java index 8403031bc65f5..0bb5fd849bbcf 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java @@ -192,8 +192,13 @@ public void onFailure(Exception e) { } }); }); - - return new RankDocsRetrieverBuilder(rankWindowSize, newRetrievers.stream().map(s -> s.retriever).toList(), results::get); + RankDocsRetrieverBuilder rankDocsRetrieverBuilder = new RankDocsRetrieverBuilder( + rankWindowSize, + newRetrievers.stream().map(s -> s.retriever).toList(), + results::get + ); + rankDocsRetrieverBuilder.retrieverName(retrieverName()); + return rankDocsRetrieverBuilder; } @Override @@ -219,7 +224,8 @@ public ActionRequestValidationException validate( boolean allowPartialSearchResults ) { validationException = super.validate(source, validationException, isScroll, allowPartialSearchResults); - if (source.size() > rankWindowSize) { + final int size = source.size(); + if (size > rankWindowSize) { validationException = addValidationError( String.format( Locale.ROOT, @@ -227,7 +233,7 @@ public ActionRequestValidationException validate( getName(), getRankWindowSizeField().getPreferredName(), rankWindowSize, - source.size() + size ), validationException ); diff --git a/server/src/main/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilder.java index 4d3f3fefd4462..a77f5327fbc26 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/RankDocsRetrieverBuilder.java @@ -90,11 +90,13 @@ public QueryBuilder topDocsQuery() { @Override public QueryBuilder explainQuery() { - return new RankDocsQueryBuilder( + var explainQuery = new RankDocsQueryBuilder( rankDocs.get(), sources.stream().map(RetrieverBuilder::explainQuery).toArray(QueryBuilder[]::new), true ); + explainQuery.queryName(retrieverName()); + return explainQuery; } @Override @@ -123,8 +125,12 @@ public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder } else { rankQuery = new RankDocsQueryBuilder(rankDocResults, null, false); } + rankQuery.queryName(retrieverName()); // ignore prefilters of this level, they were already propagated to children searchSourceBuilder.query(rankQuery); + if (searchSourceBuilder.size() < 0) { + searchSourceBuilder.size(rankWindowSize); + } if (sourceHasMinScore()) { searchSourceBuilder.minScore(this.minScore() == null ? Float.MIN_VALUE : this.minScore()); } diff --git a/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java index 4531beef7125d..83a331fd81b54 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java @@ -144,6 +144,7 @@ public void doToXContent(XContentBuilder builder, Params params) throws IOExcept protected RescorerRetrieverBuilder clone(List newChildRetrievers, List newPreFilterQueryBuilders) { var newInstance = new RescorerRetrieverBuilder(newChildRetrievers.get(0), rescorers); newInstance.preFilterQueryBuilders = newPreFilterQueryBuilders; + newInstance.retrieverName = retrieverName; return newInstance; } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/80_query_rules_retriever.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/80_query_rules_retriever.yml index 089a078c62207..4ce0c55511cbd 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/80_query_rules_retriever.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/80_query_rules_retriever.yml @@ -288,10 +288,9 @@ setup: rank_window_size: 1 - match: { hits.total.value: 3 } + - length: { hits.hits: 1 } - match: { hits.hits.0._id: foo } - match: { hits.hits.0._score: 1.7014124E38 } - - match: { hits.hits.1._score: 0 } - - match: { hits.hits.2._score: 0 } - do: headers: @@ -315,12 +314,10 @@ setup: rank_window_size: 2 - match: { hits.total.value: 3 } + - length: { hits.hits: 2 } - match: { hits.hits.0._id: foo } - match: { hits.hits.0._score: 1.7014124E38 } - match: { hits.hits.1._id: foo2 } - - match: { hits.hits.1._score: 1.7014122E38 } - - match: { hits.hits.2._id: bar_no_rule } - - match: { hits.hits.2._score: 0 } - do: headers: @@ -344,6 +341,7 @@ setup: rank_window_size: 10 - match: { hits.total.value: 3 } + - length: { hits.hits: 3 } - match: { hits.hits.0._id: foo } - match: { hits.hits.0._score: 1.7014124E38 } - match: { hits.hits.1._id: foo2 } diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverIT.java new file mode 100644 index 0000000000000..f98231a647470 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverIT.java @@ -0,0 +1,838 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.apache.lucene.search.TotalHits; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.InnerHitBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.collapse.CollapseBuilder; +import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; +import org.elasticsearch.search.retriever.KnnRetrieverBuilder; +import org.elasticsearch.search.retriever.StandardRetrieverBuilder; +import org.elasticsearch.search.retriever.TestRetrieverBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; +import org.elasticsearch.search.vectors.QueryVectorBuilder; +import org.elasticsearch.search.vectors.TestQueryVectorBuilderPlugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.rank.rrf.RRFRankPlugin; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +@ESIntegTestCase.ClusterScope(minNumDataNodes = 2) +public class LinearRetrieverIT extends ESIntegTestCase { + + protected static String INDEX = "test_index"; + protected static final String DOC_FIELD = "doc"; + protected static final String TEXT_FIELD = "text"; + protected static final String VECTOR_FIELD = "vector"; + protected static final String TOPIC_FIELD = "topic"; + + @Override + protected Collection> nodePlugins() { + return List.of(RRFRankPlugin.class); + } + + @Before + public void setup() throws Exception { + setupIndex(); + } + + protected void setupIndex() { + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 1, + "element_type": "float", + "similarity": "l2_norm", + "index": true, + "index_options": { + "type": "flat" + } + }, + "text": { + "type": "text" + }, + "doc": { + "type": "keyword" + }, + "topic": { + "type": "keyword" + }, + "views": { + "type": "nested", + "properties": { + "last30d": { + "type": "integer" + }, + "all": { + "type": "integer" + } + } + } + } + } + """; + createIndex(INDEX, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)).build()); + admin().indices().preparePutMapping(INDEX).setSource(mapping, XContentType.JSON).get(); + indexDoc(INDEX, "doc_1", DOC_FIELD, "doc_1", TOPIC_FIELD, "technology", TEXT_FIELD, "term"); + indexDoc( + INDEX, + "doc_2", + DOC_FIELD, + "doc_2", + TOPIC_FIELD, + "astronomy", + TEXT_FIELD, + "search term term", + VECTOR_FIELD, + new float[] { 2.0f } + ); + indexDoc(INDEX, "doc_3", DOC_FIELD, "doc_3", TOPIC_FIELD, "technology", VECTOR_FIELD, new float[] { 3.0f }); + indexDoc(INDEX, "doc_4", DOC_FIELD, "doc_4", TOPIC_FIELD, "technology", TEXT_FIELD, "term term term term"); + indexDoc(INDEX, "doc_5", DOC_FIELD, "doc_5", TOPIC_FIELD, "science", TEXT_FIELD, "irrelevant stuff"); + indexDoc( + INDEX, + "doc_6", + DOC_FIELD, + "doc_6", + TEXT_FIELD, + "search term term term term term term", + VECTOR_FIELD, + new float[] { 6.0f } + ); + indexDoc( + INDEX, + "doc_7", + DOC_FIELD, + "doc_7", + TOPIC_FIELD, + "biology", + TEXT_FIELD, + "term term term term term term term", + VECTOR_FIELD, + new float[] { 7.0f } + ); + refresh(INDEX); + } + + public void testLinearRetrieverWithAggs() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this one retrieves docs 1, 2, 4, 6, and 7 + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(8L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(7L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_7")).boost(6L)) + ); + // this one retrieves docs 2 and 6 due to prefilter + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + // this one retrieves docs 2, 3, 6, and 7 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null, null); + + // all requests would have an equal weight and use the identity normalizer + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null), + new CompoundRetrieverBuilder.RetrieverSource(knnRetrieverBuilder, null) + ), + rankWindowSize + ) + ); + source.size(1); + source.aggregation(AggregationBuilders.terms("topic_agg").field(TOPIC_FIELD)); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getHits().length, equalTo(1)); + assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); + + assertNotNull(resp.getAggregations()); + assertNotNull(resp.getAggregations().get("topic_agg")); + Terms terms = resp.getAggregations().get("topic_agg"); + + assertThat(terms.getBucketByKey("technology").getDocCount(), equalTo(3L)); + assertThat(terms.getBucketByKey("astronomy").getDocCount(), equalTo(1L)); + assertThat(terms.getBucketByKey("biology").getDocCount(), equalTo(1L)); + }); + } + + public void testLinearWithCollapse() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this one retrieves docs 1, 2, 4, 6, and 7 + // with scores 10, 9, 8, 7, 6 + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(8L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(7L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_7")).boost(6L)) + ); + // this one retrieves docs 2 and 6 due to prefilter + // with scores 20, 5 + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + // this one retrieves docs 2, 3, 6, and 7 + // with scores 1, 0.5, 0.05882353, 0.03846154 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null, null); + // final ranking with no-normalizer would be: doc 2, 6, 1, 4, 7, 3 + // doc 1: 10 + // doc 2: 9 + 20 + 1 = 30 + // doc 3: 0.5 + // doc 4: 8 + // doc 6: 7 + 5 + 0.05882353 = 12.05882353 + // doc 7: 6 + 0.03846154 = 6.03846154 + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null), + new CompoundRetrieverBuilder.RetrieverSource(knnRetrieverBuilder, null) + ), + rankWindowSize + ) + ); + source.collapse( + new CollapseBuilder(TOPIC_FIELD).setInnerHits( + new InnerHitBuilder("a").addSort(new FieldSortBuilder(DOC_FIELD).order(SortOrder.DESC)).setSize(10) + ) + ); + source.fetchField(TOPIC_FIELD); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getHits().length, equalTo(4)); + assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); + assertThat(resp.getHits().getAt(0).getScore(), equalTo(30f)); + assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_6")); + assertThat((double) resp.getHits().getAt(1).getScore(), closeTo(12.0588f, 0.0001f)); + assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_1")); + assertThat(resp.getHits().getAt(2).getScore(), equalTo(10f)); + assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(0).getId(), equalTo("doc_4")); + assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(1).getId(), equalTo("doc_3")); + assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(2).getId(), equalTo("doc_1")); + assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_7")); + assertThat((double) resp.getHits().getAt(3).getScore(), closeTo(6.0384f, 0.0001f)); + }); + } + + public void testLinearRetrieverWithCollapseAndAggs() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this one retrieves docs 1, 2, 4, 6, and 7 + // with scores 10, 9, 8, 7, 6 + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(8L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(7L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_7")).boost(6L)) + ); + // this one retrieves docs 2 and 6 due to prefilter + // with scores 20, 5 + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + // this one retrieves docs 2, 3, 6, and 7 + // with scores 1, 0.5, 0.05882353, 0.03846154 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null, null); + // final ranking with no-normalizer would be: doc 2, 6, 1, 4, 7, 3 + // doc 1: 10 + // doc 2: 9 + 20 + 1 = 30 + // doc 3: 0.5 + // doc 4: 8 + // doc 6: 7 + 5 + 0.05882353 = 12.05882353 + // doc 7: 6 + 0.03846154 = 6.03846154 + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null), + new CompoundRetrieverBuilder.RetrieverSource(knnRetrieverBuilder, null) + ), + rankWindowSize + ) + ); + source.collapse( + new CollapseBuilder(TOPIC_FIELD).setInnerHits( + new InnerHitBuilder("a").addSort(new FieldSortBuilder(DOC_FIELD).order(SortOrder.DESC)).setSize(10) + ) + ); + source.fetchField(TOPIC_FIELD); + source.aggregation(AggregationBuilders.terms("topic_agg").field(TOPIC_FIELD)); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getHits().length, equalTo(4)); + assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); + assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_6")); + assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_1")); + assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(0).getId(), equalTo("doc_4")); + assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(1).getId(), equalTo("doc_3")); + assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(2).getId(), equalTo("doc_1")); + assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_7")); + + assertNotNull(resp.getAggregations()); + assertNotNull(resp.getAggregations().get("topic_agg")); + Terms terms = resp.getAggregations().get("topic_agg"); + + assertThat(terms.getBucketByKey("technology").getDocCount(), equalTo(3L)); + assertThat(terms.getBucketByKey("astronomy").getDocCount(), equalTo(1L)); + assertThat(terms.getBucketByKey("biology").getDocCount(), equalTo(1L)); + }); + } + + public void testMultipleLinearRetrievers() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this one retrieves docs 1, 2, 4, 6, and 7 + // with scores 10, 9, 8, 7, 6 + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(8L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(7L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_7")).boost(6L)) + ); + // this one retrieves docs 2 and 6 due to prefilter + // with scores 20, 5 + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource( + // this one returns docs doc 2, 1, 6, 4, 7 + // with scores 38, 20, 19, 16, 12 + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null) + ), + rankWindowSize, + new float[] { 2.0f, 1.0f }, + new ScoreNormalizer[] { IdentityScoreNormalizer.INSTANCE, IdentityScoreNormalizer.INSTANCE } + ), + null + ), + // this one bring just doc 7 which should be ranked first eventually with a score of 100 + new CompoundRetrieverBuilder.RetrieverSource( + new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 7.0f }, null, 1, 100, null, null), + null + ) + ), + rankWindowSize, + new float[] { 1.0f, 100.0f }, + new ScoreNormalizer[] { IdentityScoreNormalizer.INSTANCE, IdentityScoreNormalizer.INSTANCE } + ) + ); + + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(5L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_7")); + assertThat(resp.getHits().getAt(0).getScore(), equalTo(112f)); + assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_2")); + assertThat(resp.getHits().getAt(1).getScore(), equalTo(38f)); + assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_1")); + assertThat(resp.getHits().getAt(2).getScore(), equalTo(20f)); + assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_6")); + assertThat(resp.getHits().getAt(3).getScore(), equalTo(19f)); + assertThat(resp.getHits().getAt(4).getId(), equalTo("doc_4")); + assertThat(resp.getHits().getAt(4).getScore(), equalTo(16f)); + }); + } + + public void testLinearExplainWithNamedRetrievers() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this one retrieves docs 1, 2, 4, 6, and 7 + // with scores 10, 9, 8, 7, 6 + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(8L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(7L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_7")).boost(6L)) + ); + standard0.retrieverName("my_custom_retriever"); + // this one retrieves docs 2 and 6 due to prefilter + // with scores 20, 5 + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + // this one retrieves docs 2, 3, 6, and 7 + // with scores 1, 0.5, 0.05882353, 0.03846154 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null, null); + // final ranking with no-normalizer would be: doc 2, 6, 1, 4, 7, 3 + // doc 1: 10 + // doc 2: 9 + 20 + 1 = 30 + // doc 3: 0.5 + // doc 4: 8 + // doc 6: 7 + 5 + 0.05882353 = 12.05882353 + // doc 7: 6 + 0.03846154 = 6.03846154 + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null), + new CompoundRetrieverBuilder.RetrieverSource(knnRetrieverBuilder, null) + ), + rankWindowSize + ) + ); + source.explain(true); + source.size(1); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getHits().length, equalTo(1)); + assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); + assertThat(resp.getHits().getAt(0).getExplanation().isMatch(), equalTo(true)); + assertThat(resp.getHits().getAt(0).getExplanation().getDescription(), containsString("sum of:")); + assertThat(resp.getHits().getAt(0).getExplanation().getDetails().length, equalTo(2)); + var rrfDetails = resp.getHits().getAt(0).getExplanation().getDetails()[0]; + assertThat(rrfDetails.getDetails().length, equalTo(3)); + assertThat( + rrfDetails.getDescription(), + equalTo( + "weighted linear combination score: [30.0] computed for normalized scores [9.0, 20.0, 1.0] " + + "and weights [1.0, 1.0, 1.0] as sum of (weight[i] * score[i]) for each query." + ) + ); + + assertThat( + rrfDetails.getDetails()[0].getDescription(), + containsString( + "weighted score: [9.0] in query at index [0] [my_custom_retriever] computed as [1.0 * 9.0] " + + "using score normalizer [none] for original matching query with score" + ) + ); + assertThat( + rrfDetails.getDetails()[1].getDescription(), + containsString( + "weighted score: [20.0] in query at index [1] computed as [1.0 * 20.0] using score normalizer [none] " + + "for original matching query with score:" + ) + ); + assertThat( + rrfDetails.getDetails()[2].getDescription(), + containsString( + "weighted score: [1.0] in query at index [2] computed as [1.0 * 1.0] using score normalizer [none] " + + "for original matching query with score" + ) + ); + }); + } + + public void testLinearExplainWithAnotherNestedLinear() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this one retrieves docs 1, 2, 4, 6, and 7 + // with scores 10, 9, 8, 7, 6 + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(8L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(7L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_7")).boost(6L)) + ); + standard0.retrieverName("my_custom_retriever"); + // this one retrieves docs 2 and 6 due to prefilter + // with scores 20, 5 + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + // this one retrieves docs 2, 3, 6, and 7 + // with scores 1, 0.5, 0.05882353, 0.03846154 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null, null); + // final ranking with no-normalizer would be: doc 2, 6, 1, 4, 7, 3 + // doc 1: 10 + // doc 2: 9 + 20 + 1 = 30 + // doc 3: 0.5 + // doc 4: 8 + // doc 6: 7 + 5 + 0.05882353 = 12.05882353 + // doc 7: 6 + 0.03846154 = 6.03846154 + LinearRetrieverBuilder nestedLinear = new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null), + new CompoundRetrieverBuilder.RetrieverSource(knnRetrieverBuilder, null) + ), + rankWindowSize + ); + nestedLinear.retrieverName("nested_linear"); + // this one retrieves docs 6 with a score of 100 + StandardRetrieverBuilder standard2 = new StandardRetrieverBuilder( + QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(20L) + ); + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(nestedLinear, null), + new CompoundRetrieverBuilder.RetrieverSource(standard2, null) + ), + rankWindowSize, + new float[] { 1, 5f }, + new ScoreNormalizer[] { IdentityScoreNormalizer.INSTANCE, IdentityScoreNormalizer.INSTANCE } + ) + ); + source.explain(true); + source.size(1); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getHits().length, equalTo(1)); + assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); + assertThat(resp.getHits().getAt(0).getExplanation().isMatch(), equalTo(true)); + assertThat(resp.getHits().getAt(0).getExplanation().getDescription(), containsString("sum of:")); + assertThat(resp.getHits().getAt(0).getExplanation().getDetails().length, equalTo(2)); + var linearTopLevel = resp.getHits().getAt(0).getExplanation().getDetails()[0]; + assertThat(linearTopLevel.getDetails().length, equalTo(2)); + assertThat( + linearTopLevel.getDescription(), + containsString( + "weighted linear combination score: [112.05882] computed for normalized scores [12.058824, 20.0] " + + "and weights [1.0, 5.0] as sum of (weight[i] * score[i]) for each query." + ) + ); + assertThat(linearTopLevel.getDetails()[0].getDescription(), containsString("weighted score: [12.058824]")); + assertThat(linearTopLevel.getDetails()[0].getDescription(), containsString("nested_linear")); + assertThat(linearTopLevel.getDetails()[1].getDescription(), containsString("weighted score: [100.0]")); + + var linearNested = linearTopLevel.getDetails()[0]; + assertThat(linearNested.getDetails()[0].getDetails().length, equalTo(3)); + assertThat(linearNested.getDetails()[0].getDetails()[0].getDescription(), containsString("weighted score: [7.0]")); + assertThat(linearNested.getDetails()[0].getDetails()[1].getDescription(), containsString("weighted score: [5.0]")); + assertThat(linearNested.getDetails()[0].getDetails()[2].getDescription(), containsString("weighted score: [0.05882353]")); + + var standard0Details = linearTopLevel.getDetails()[1]; + assertThat(standard0Details.getDetails()[0].getDescription(), containsString("ConstantScore")); + }); + } + + public void testLinearInnerRetrieverAll4xxSearchErrors() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this will throw a 4xx error during evaluation + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.constantScoreQuery(QueryBuilders.rangeQuery(VECTOR_FIELD).gte(10)) + ); + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null) + ), + rankWindowSize + ) + ); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + Exception ex = expectThrows(ElasticsearchStatusException.class, req::get); + assertThat(ex, instanceOf(ElasticsearchStatusException.class)); + assertThat( + ex.getMessage(), + containsString( + "[linear] search failed - retrievers '[standard]' returned errors. All failures are attached as suppressed exceptions." + ) + ); + assertThat(ExceptionsHelper.status(ex), equalTo(RestStatus.BAD_REQUEST)); + assertThat(ex.getSuppressed().length, equalTo(1)); + assertThat(ex.getSuppressed()[0].getCause().getCause(), instanceOf(IllegalArgumentException.class)); + } + + public void testLinearInnerRetrieverMultipleErrorsOne5xx() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this will throw a 4xx error during evaluation + StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( + QueryBuilders.constantScoreQuery(QueryBuilders.rangeQuery(VECTOR_FIELD).gte(10)) + ); + // this will throw a 5xx error + TestRetrieverBuilder testRetrieverBuilder = new TestRetrieverBuilder("val") { + @Override + public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder, boolean compoundUsed) { + searchSourceBuilder.aggregation(AggregationBuilders.avg("some_invalid_param")); + } + }; + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standard0, null), + new CompoundRetrieverBuilder.RetrieverSource(testRetrieverBuilder, null) + ), + rankWindowSize + ) + ); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + Exception ex = expectThrows(ElasticsearchStatusException.class, req::get); + assertThat(ex, instanceOf(ElasticsearchStatusException.class)); + assertThat( + ex.getMessage(), + containsString( + "[linear] search failed - retrievers '[standard, test]' returned errors. " + + "All failures are attached as suppressed exceptions." + ) + ); + assertThat(ExceptionsHelper.status(ex), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); + assertThat(ex.getSuppressed().length, equalTo(2)); + assertThat(ex.getSuppressed()[0].getCause().getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(ex.getSuppressed()[1].getCause().getCause(), instanceOf(IllegalStateException.class)); + } + + public void testLinearInnerRetrieverErrorWhenExtractingToSource() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + TestRetrieverBuilder failingRetriever = new TestRetrieverBuilder("some value") { + @Override + public QueryBuilder topDocsQuery() { + return QueryBuilders.matchAllQuery(); + } + + @Override + public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder, boolean compoundUsed) { + throw new UnsupportedOperationException("simulated failure"); + } + }; + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(failingRetriever, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null) + ), + rankWindowSize + ) + ); + source.size(1); + expectThrows(UnsupportedOperationException.class, () -> client().prepareSearch(INDEX).setSource(source).get()); + } + + public void testLinearInnerRetrieverErrorOnTopDocs() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + TestRetrieverBuilder failingRetriever = new TestRetrieverBuilder("some value") { + @Override + public QueryBuilder topDocsQuery() { + throw new UnsupportedOperationException("simulated failure"); + } + + @Override + public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder, boolean compoundUsed) { + searchSourceBuilder.query(QueryBuilders.matchAllQuery()); + } + }; + StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( + QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2")).boost(20L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_3")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(5L)) + ); + standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(failingRetriever, null), + new CompoundRetrieverBuilder.RetrieverSource(standard1, null) + ), + rankWindowSize + ) + ); + source.size(1); + source.aggregation(AggregationBuilders.terms("topic_agg").field(TOPIC_FIELD)); + expectThrows(UnsupportedOperationException.class, () -> client().prepareSearch(INDEX).setSource(source).get()); + } + + public void testLinearFiltersPropagatedToKnnQueryVectorBuilder() { + final int rankWindowSize = 100; + SearchSourceBuilder source = new SearchSourceBuilder(); + // this will retriever all but 7 only due to top-level filter + StandardRetrieverBuilder standardRetriever = new StandardRetrieverBuilder(QueryBuilders.matchAllQuery()); + // this will too retrieve just doc 7 + KnnRetrieverBuilder knnRetriever = new KnnRetrieverBuilder( + "vector", + null, + new TestQueryVectorBuilderPlugin.TestQueryVectorBuilder(new float[] { 3 }), + 10, + 10, + null, + null + ); + source.retriever( + new LinearRetrieverBuilder( + Arrays.asList( + new CompoundRetrieverBuilder.RetrieverSource(standardRetriever, null), + new CompoundRetrieverBuilder.RetrieverSource(knnRetriever, null) + ), + rankWindowSize + ) + ); + source.retriever().getPreFilterQueryBuilders().add(QueryBuilders.boolQuery().must(QueryBuilders.termQuery(DOC_FIELD, "doc_7"))); + source.size(10); + SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); + ElasticsearchAssertions.assertResponse(req, resp -> { + assertNull(resp.pointInTimeId()); + assertNotNull(resp.getHits().getTotalHits()); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getHits()[0].getId(), equalTo("doc_7")); + }); + } + + public void testRewriteOnce() { + final float[] vector = new float[] { 1 }; + AtomicInteger numAsyncCalls = new AtomicInteger(); + QueryVectorBuilder vectorBuilder = new QueryVectorBuilder() { + @Override + public void buildVector(Client client, ActionListener listener) { + numAsyncCalls.incrementAndGet(); + listener.onResponse(vector); + } + + @Override + public String getWriteableName() { + throw new IllegalStateException("Should not be called"); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + throw new IllegalStateException("Should not be called"); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new IllegalStateException("Should not be called"); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + throw new IllegalStateException("Should not be called"); + } + }; + var knn = new KnnRetrieverBuilder("vector", null, vectorBuilder, 10, 10, null, null); + var standard = new StandardRetrieverBuilder(new KnnVectorQueryBuilder("vector", vectorBuilder, 10, 10, null)); + var rrf = new LinearRetrieverBuilder( + List.of(new CompoundRetrieverBuilder.RetrieverSource(knn, null), new CompoundRetrieverBuilder.RetrieverSource(standard, null)), + 10 + ); + assertResponse( + client().prepareSearch(INDEX).setSource(new SearchSourceBuilder().retriever(rrf)), + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(4L)) + ); + assertThat(numAsyncCalls.get(), equalTo(2)); + + // check that we use the rewritten vector to build the explain query + assertResponse( + client().prepareSearch(INDEX).setSource(new SearchSourceBuilder().retriever(rrf).explain(true)), + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(4L)) + ); + assertThat(numAsyncCalls.get(), equalTo(4)); + } +} diff --git a/x-pack/plugin/rank-rrf/src/main/java/module-info.java b/x-pack/plugin/rank-rrf/src/main/java/module-info.java index 4fd2a7e4d54f3..fbe467fdf3eae 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/module-info.java +++ b/x-pack/plugin/rank-rrf/src/main/java/module-info.java @@ -5,7 +5,7 @@ * 2.0. */ -import org.elasticsearch.xpack.rank.rrf.RRFFeatures; +import org.elasticsearch.xpack.rank.RankRRFFeatures; module org.elasticsearch.rank.rrf { requires org.apache.lucene.core; @@ -14,7 +14,9 @@ requires org.elasticsearch.server; requires org.elasticsearch.xcore; + exports org.elasticsearch.xpack.rank; exports org.elasticsearch.xpack.rank.rrf; + exports org.elasticsearch.xpack.rank.linear; - provides org.elasticsearch.features.FeatureSpecification with RRFFeatures; + provides org.elasticsearch.features.FeatureSpecification with RankRRFFeatures; } diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFFeatures.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/RankRRFFeatures.java similarity index 65% rename from x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFFeatures.java rename to x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/RankRRFFeatures.java index 494eaa508c14a..5966e17f20429 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFFeatures.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/RankRRFFeatures.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.rank.rrf; +package org.elasticsearch.xpack.rank; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; @@ -14,10 +14,14 @@ import static org.elasticsearch.search.retriever.CompoundRetrieverBuilder.INNER_RETRIEVERS_FILTER_SUPPORT; -/** - * A set of features specifically for the rrf plugin. - */ -public class RRFFeatures implements FeatureSpecification { +public class RankRRFFeatures implements FeatureSpecification { + + public static final NodeFeature LINEAR_RETRIEVER_SUPPORTED = new NodeFeature("linear_retriever_supported"); + + @Override + public Set getFeatures() { + return Set.of(LINEAR_RETRIEVER_SUPPORTED); + } @Override public Set getTestFeatures() { diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/IdentityScoreNormalizer.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/IdentityScoreNormalizer.java new file mode 100644 index 0000000000000..15af17a1db4ef --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/IdentityScoreNormalizer.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.apache.lucene.search.ScoreDoc; + +public class IdentityScoreNormalizer extends ScoreNormalizer { + + public static final IdentityScoreNormalizer INSTANCE = new IdentityScoreNormalizer(); + + public static final String NAME = "none"; + + @Override + public String getName() { + return NAME; + } + + @Override + public ScoreDoc[] normalizeScores(ScoreDoc[] docs) { + return docs; + } +} diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRankDoc.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRankDoc.java new file mode 100644 index 0000000000000..bb1c420bbd06c --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRankDoc.java @@ -0,0 +1,143 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.apache.lucene.search.Explanation; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.rank.RankDoc; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +import static org.elasticsearch.xpack.rank.linear.LinearRetrieverBuilder.DEFAULT_SCORE; +import static org.elasticsearch.xpack.rank.linear.LinearRetrieverComponent.DEFAULT_NORMALIZER; +import static org.elasticsearch.xpack.rank.linear.LinearRetrieverComponent.DEFAULT_WEIGHT; + +public class LinearRankDoc extends RankDoc { + + public static final String NAME = "linear_rank_doc"; + + final float[] weights; + final String[] normalizers; + public float[] normalizedScores; + + public LinearRankDoc(int doc, float score, int shardIndex) { + super(doc, score, shardIndex); + this.weights = null; + this.normalizers = null; + } + + public LinearRankDoc(int doc, float score, int shardIndex, float[] weights, String[] normalizers) { + super(doc, score, shardIndex); + this.weights = weights; + this.normalizers = normalizers; + } + + public LinearRankDoc(StreamInput in) throws IOException { + super(in); + weights = in.readOptionalFloatArray(); + normalizedScores = in.readOptionalFloatArray(); + normalizers = in.readOptionalStringArray(); + } + + @Override + public Explanation explain(Explanation[] sources, String[] queryNames) { + assert normalizedScores != null && weights != null && normalizers != null; + assert normalizedScores.length == sources.length; + + Explanation[] details = new Explanation[sources.length]; + for (int i = 0; i < sources.length; i++) { + final String queryAlias = queryNames[i] == null ? "" : " [" + queryNames[i] + "]"; + final String queryIdentifier = "at index [" + i + "]" + queryAlias; + final float weight = weights == null ? DEFAULT_WEIGHT : weights[i]; + final float normalizedScore = normalizedScores == null ? DEFAULT_SCORE : normalizedScores[i]; + final String normalizer = normalizers == null ? DEFAULT_NORMALIZER.getName() : normalizers[i]; + if (normalizedScore > 0) { + details[i] = Explanation.match( + weight * normalizedScore, + "weighted score: [" + + weight * normalizedScore + + "] in query " + + queryIdentifier + + " computed as [" + + weight + + " * " + + normalizedScore + + "]" + + " using score normalizer [" + + normalizer + + "]" + + " for original matching query with score:", + sources[i] + ); + } else { + final String description = "weighted score: [0], result not found in query " + queryIdentifier; + details[i] = Explanation.noMatch(description); + } + } + return Explanation.match( + score, + "weighted linear combination score: [" + + score + + "] computed for normalized scores " + + Arrays.toString(normalizedScores) + + (weights == null ? "" : " and weights " + Arrays.toString(weights)) + + " as sum of (weight[i] * score[i]) for each query.", + details + ); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeOptionalFloatArray(weights); + out.writeOptionalFloatArray(normalizedScores); + out.writeOptionalStringArray(normalizers); + } + + @Override + protected void doToXContent(XContentBuilder builder, Params params) throws IOException { + if (weights != null) { + builder.field("weights", weights); + } + if (normalizedScores != null) { + builder.field("normalizedScores", normalizedScores); + } + if (normalizers != null) { + builder.field("normalizers", normalizers); + } + } + + @Override + public boolean doEquals(RankDoc rd) { + LinearRankDoc lrd = (LinearRankDoc) rd; + return Arrays.equals(weights, lrd.weights) + && Arrays.equals(normalizedScores, lrd.normalizedScores) + && Arrays.equals(normalizers, lrd.normalizers); + } + + @Override + public int doHashCode() { + int result = Objects.hash(Arrays.hashCode(weights), Arrays.hashCode(normalizedScores), Arrays.hashCode(normalizers)); + return 31 * result; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.LINEAR_RETRIEVER_SUPPORT; + } +} diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverBuilder.java new file mode 100644 index 0000000000000..66bbbf95bc9d6 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverBuilder.java @@ -0,0 +1,208 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.apache.lucene.search.ScoreDoc; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.util.Maps; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.rank.RankBuilder; +import org.elasticsearch.search.rank.RankDoc; +import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; +import org.elasticsearch.search.retriever.RetrieverBuilder; +import org.elasticsearch.search.retriever.RetrieverParserContext; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.rank.rrf.RRFRankPlugin; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.rank.RankRRFFeatures.LINEAR_RETRIEVER_SUPPORTED; +import static org.elasticsearch.xpack.rank.linear.LinearRetrieverComponent.DEFAULT_WEIGHT; + +/** + * The {@code LinearRetrieverBuilder} supports the combination of different retrievers through a weighted linear combination. + * For example, assume that we have retrievers r1 and r2, the final score of the {@code LinearRetrieverBuilder} is defined as + * {@code score(r)=w1*score(r1) + w2*score(r2)}. + * Each sub-retriever score can be normalized before being considered for the weighted linear sum, by setting the appropriate + * normalizer parameter. + * + */ +public final class LinearRetrieverBuilder extends CompoundRetrieverBuilder { + + public static final String NAME = "linear"; + + public static final ParseField RETRIEVERS_FIELD = new ParseField("retrievers"); + + public static final float DEFAULT_SCORE = 0f; + + private final float[] weights; + private final ScoreNormalizer[] normalizers; + + @SuppressWarnings("unchecked") + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + false, + args -> { + List retrieverComponents = (List) args[0]; + int rankWindowSize = args[1] == null ? RankBuilder.DEFAULT_RANK_WINDOW_SIZE : (int) args[1]; + List innerRetrievers = new ArrayList<>(); + float[] weights = new float[retrieverComponents.size()]; + ScoreNormalizer[] normalizers = new ScoreNormalizer[retrieverComponents.size()]; + int index = 0; + for (LinearRetrieverComponent component : retrieverComponents) { + innerRetrievers.add(new RetrieverSource(component.retriever, null)); + weights[index] = component.weight; + normalizers[index] = component.normalizer; + index++; + } + return new LinearRetrieverBuilder(innerRetrievers, rankWindowSize, weights, normalizers); + } + ); + + static { + PARSER.declareObjectArray(constructorArg(), LinearRetrieverComponent::fromXContent, RETRIEVERS_FIELD); + PARSER.declareInt(optionalConstructorArg(), RANK_WINDOW_SIZE_FIELD); + RetrieverBuilder.declareBaseParserFields(NAME, PARSER); + } + + private static float[] getDefaultWeight(int size) { + float[] weights = new float[size]; + Arrays.fill(weights, DEFAULT_WEIGHT); + return weights; + } + + private static ScoreNormalizer[] getDefaultNormalizers(int size) { + ScoreNormalizer[] normalizers = new ScoreNormalizer[size]; + Arrays.fill(normalizers, IdentityScoreNormalizer.INSTANCE); + return normalizers; + } + + public static LinearRetrieverBuilder fromXContent(XContentParser parser, RetrieverParserContext context) throws IOException { + if (context.clusterSupportsFeature(LINEAR_RETRIEVER_SUPPORTED) == false) { + throw new ParsingException(parser.getTokenLocation(), "unknown retriever [" + NAME + "]"); + } + if (RRFRankPlugin.LINEAR_RETRIEVER_FEATURE.check(XPackPlugin.getSharedLicenseState()) == false) { + throw LicenseUtils.newComplianceException("linear retriever"); + } + return PARSER.apply(parser, context); + } + + LinearRetrieverBuilder(List innerRetrievers, int rankWindowSize) { + this(innerRetrievers, rankWindowSize, getDefaultWeight(innerRetrievers.size()), getDefaultNormalizers(innerRetrievers.size())); + } + + public LinearRetrieverBuilder( + List innerRetrievers, + int rankWindowSize, + float[] weights, + ScoreNormalizer[] normalizers + ) { + super(innerRetrievers, rankWindowSize); + if (weights.length != innerRetrievers.size()) { + throw new IllegalArgumentException("The number of weights must match the number of inner retrievers"); + } + if (normalizers.length != innerRetrievers.size()) { + throw new IllegalArgumentException("The number of normalizers must match the number of inner retrievers"); + } + this.weights = weights; + this.normalizers = normalizers; + } + + @Override + protected LinearRetrieverBuilder clone(List newChildRetrievers, List newPreFilterQueryBuilders) { + LinearRetrieverBuilder clone = new LinearRetrieverBuilder(newChildRetrievers, rankWindowSize, weights, normalizers); + clone.preFilterQueryBuilders = newPreFilterQueryBuilders; + clone.retrieverName = retrieverName; + return clone; + } + + @Override + protected SearchSourceBuilder finalizeSourceBuilder(SearchSourceBuilder sourceBuilder) { + sourceBuilder.trackScores(true); + return sourceBuilder; + } + + @Override + protected RankDoc[] combineInnerRetrieverResults(List rankResults, boolean isExplain) { + Map docsToRankResults = Maps.newMapWithExpectedSize(rankWindowSize); + final String[] normalizerNames = Arrays.stream(normalizers).map(ScoreNormalizer::getName).toArray(String[]::new); + for (int result = 0; result < rankResults.size(); result++) { + final ScoreNormalizer normalizer = normalizers[result] == null ? IdentityScoreNormalizer.INSTANCE : normalizers[result]; + ScoreDoc[] originalScoreDocs = rankResults.get(result); + ScoreDoc[] normalizedScoreDocs = normalizer.normalizeScores(originalScoreDocs); + for (int scoreDocIndex = 0; scoreDocIndex < normalizedScoreDocs.length; scoreDocIndex++) { + LinearRankDoc rankDoc = docsToRankResults.computeIfAbsent( + new RankDoc.RankKey(originalScoreDocs[scoreDocIndex].doc, originalScoreDocs[scoreDocIndex].shardIndex), + key -> { + if (isExplain) { + LinearRankDoc doc = new LinearRankDoc(key.doc(), 0f, key.shardIndex(), weights, normalizerNames); + doc.normalizedScores = new float[rankResults.size()]; + return doc; + } else { + return new LinearRankDoc(key.doc(), 0f, key.shardIndex()); + } + } + ); + if (isExplain) { + rankDoc.normalizedScores[result] = normalizedScoreDocs[scoreDocIndex].score; + } + // if we do not have scores associated with this result set, just ignore its contribution to the final + // score computation by setting its score to 0. + final float docScore = false == Float.isNaN(normalizedScoreDocs[scoreDocIndex].score) + ? normalizedScoreDocs[scoreDocIndex].score + : DEFAULT_SCORE; + final float weight = Float.isNaN(weights[result]) ? DEFAULT_WEIGHT : weights[result]; + rankDoc.score += weight * docScore; + } + } + // sort the results based on the final score, tiebreaker based on smaller doc id + LinearRankDoc[] sortedResults = docsToRankResults.values().toArray(LinearRankDoc[]::new); + Arrays.sort(sortedResults); + // trim the results if needed, otherwise each shard will always return `rank_window_size` results. + LinearRankDoc[] topResults = new LinearRankDoc[Math.min(rankWindowSize, sortedResults.length)]; + for (int rank = 0; rank < topResults.length; ++rank) { + topResults[rank] = sortedResults[rank]; + topResults[rank].rank = rank + 1; + } + return topResults; + } + + @Override + public String getName() { + return NAME; + } + + public void doToXContent(XContentBuilder builder, Params params) throws IOException { + int index = 0; + if (innerRetrievers.isEmpty() == false) { + builder.startArray(RETRIEVERS_FIELD.getPreferredName()); + for (var entry : innerRetrievers) { + builder.startObject(); + builder.field(LinearRetrieverComponent.RETRIEVER_FIELD.getPreferredName(), entry.retriever()); + builder.field(LinearRetrieverComponent.WEIGHT_FIELD.getPreferredName(), weights[index]); + builder.field(LinearRetrieverComponent.NORMALIZER_FIELD.getPreferredName(), normalizers[index].getName()); + builder.endObject(); + index++; + } + builder.endArray(); + } + builder.field(RANK_WINDOW_SIZE_FIELD.getPreferredName(), rankWindowSize); + } +} diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverComponent.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverComponent.java new file mode 100644 index 0000000000000..bb0d79d3fe488 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverComponent.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.elasticsearch.search.retriever.RetrieverBuilder; +import org.elasticsearch.search.retriever.RetrieverParserContext; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class LinearRetrieverComponent implements ToXContentObject { + + public static final ParseField RETRIEVER_FIELD = new ParseField("retriever"); + public static final ParseField WEIGHT_FIELD = new ParseField("weight"); + public static final ParseField NORMALIZER_FIELD = new ParseField("normalizer"); + + static final float DEFAULT_WEIGHT = 1f; + static final ScoreNormalizer DEFAULT_NORMALIZER = IdentityScoreNormalizer.INSTANCE; + + RetrieverBuilder retriever; + float weight; + ScoreNormalizer normalizer; + + public LinearRetrieverComponent(RetrieverBuilder retrieverBuilder, Float weight, ScoreNormalizer normalizer) { + assert retrieverBuilder != null; + this.retriever = retrieverBuilder; + this.weight = weight == null ? DEFAULT_WEIGHT : weight; + this.normalizer = normalizer == null ? DEFAULT_NORMALIZER : normalizer; + if (this.weight < 0) { + throw new IllegalArgumentException("[weight] must be non-negative"); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(RETRIEVER_FIELD.getPreferredName(), retriever); + builder.field(WEIGHT_FIELD.getPreferredName(), weight); + builder.field(NORMALIZER_FIELD.getPreferredName(), normalizer.getName()); + return builder; + } + + @SuppressWarnings("unchecked") + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "retriever-component", + false, + args -> { + RetrieverBuilder retrieverBuilder = (RetrieverBuilder) args[0]; + Float weight = (Float) args[1]; + ScoreNormalizer normalizer = (ScoreNormalizer) args[2]; + return new LinearRetrieverComponent(retrieverBuilder, weight, normalizer); + } + ); + + static { + PARSER.declareNamedObject(constructorArg(), (p, c, n) -> { + RetrieverBuilder innerRetriever = p.namedObject(RetrieverBuilder.class, n, c); + c.trackRetrieverUsage(innerRetriever.getName()); + return innerRetriever; + }, RETRIEVER_FIELD); + PARSER.declareFloat(optionalConstructorArg(), WEIGHT_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> ScoreNormalizer.valueOf(p.text()), + NORMALIZER_FIELD, + ObjectParser.ValueType.STRING + ); + } + + public static LinearRetrieverComponent fromXContent(XContentParser parser, RetrieverParserContext context) throws IOException { + return PARSER.apply(parser, context); + } +} diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/MinMaxScoreNormalizer.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/MinMaxScoreNormalizer.java new file mode 100644 index 0000000000000..56b42b48a5d47 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/MinMaxScoreNormalizer.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.apache.lucene.search.ScoreDoc; + +public class MinMaxScoreNormalizer extends ScoreNormalizer { + + public static final MinMaxScoreNormalizer INSTANCE = new MinMaxScoreNormalizer(); + + public static final String NAME = "minmax"; + + private static final float EPSILON = 1e-6f; + + public MinMaxScoreNormalizer() {} + + @Override + public String getName() { + return NAME; + } + + @Override + public ScoreDoc[] normalizeScores(ScoreDoc[] docs) { + if (docs.length == 0) { + return docs; + } + // create a new array to avoid changing ScoreDocs in place + ScoreDoc[] scoreDocs = new ScoreDoc[docs.length]; + float min = Float.MAX_VALUE; + float max = Float.MIN_VALUE; + boolean atLeastOneValidScore = false; + for (ScoreDoc rd : docs) { + if (false == atLeastOneValidScore && false == Float.isNaN(rd.score)) { + atLeastOneValidScore = true; + } + if (rd.score > max) { + max = rd.score; + } + if (rd.score < min) { + min = rd.score; + } + } + if (false == atLeastOneValidScore) { + // we do not have any scores to normalize, so we just return the original array + return docs; + } + + boolean minEqualsMax = Math.abs(min - max) < EPSILON; + for (int i = 0; i < docs.length; i++) { + float score; + if (minEqualsMax) { + score = min; + } else { + score = (docs[i].score - min) / (max - min); + } + scoreDocs[i] = new ScoreDoc(docs[i].doc, score, docs[i].shardIndex); + } + return scoreDocs; + } +} diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/ScoreNormalizer.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/ScoreNormalizer.java new file mode 100644 index 0000000000000..48334b9adf957 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/linear/ScoreNormalizer.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.apache.lucene.search.ScoreDoc; + +/** + * A no-op {@link ScoreNormalizer} that does not modify the scores. + */ +public abstract class ScoreNormalizer { + + public static ScoreNormalizer valueOf(String normalizer) { + if (MinMaxScoreNormalizer.NAME.equalsIgnoreCase(normalizer)) { + return MinMaxScoreNormalizer.INSTANCE; + } else if (IdentityScoreNormalizer.NAME.equalsIgnoreCase(normalizer)) { + return IdentityScoreNormalizer.INSTANCE; + + } else { + throw new IllegalArgumentException("Unknown normalizer [" + normalizer + "]"); + } + } + + public abstract String getName(); + + public abstract ScoreDoc[] normalizeScores(ScoreDoc[] docs); +} diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java index 9404d863f1d28..251015b21ff50 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java @@ -17,6 +17,8 @@ import org.elasticsearch.search.rank.RankShardResult; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xpack.rank.linear.LinearRankDoc; +import org.elasticsearch.xpack.rank.linear.LinearRetrieverBuilder; import java.util.List; @@ -28,6 +30,12 @@ public class RRFRankPlugin extends Plugin implements SearchPlugin { License.OperationMode.ENTERPRISE ); + public static final LicensedFeature.Momentary LINEAR_RETRIEVER_FEATURE = LicensedFeature.momentary( + null, + "linear-retriever", + License.OperationMode.ENTERPRISE + ); + public static final String NAME = "rrf"; @Override @@ -35,7 +43,8 @@ public List getNamedWriteables() { return List.of( new NamedWriteableRegistry.Entry(RankBuilder.class, NAME, RRFRankBuilder::new), new NamedWriteableRegistry.Entry(RankShardResult.class, NAME, RRFRankShardResult::new), - new NamedWriteableRegistry.Entry(RankDoc.class, RRFRankDoc.NAME, RRFRankDoc::new) + new NamedWriteableRegistry.Entry(RankDoc.class, RRFRankDoc.NAME, RRFRankDoc::new), + new NamedWriteableRegistry.Entry(RankDoc.class, LinearRankDoc.NAME, LinearRankDoc::new) ); } @@ -46,6 +55,9 @@ public List getNamedXContent() { @Override public List> getRetrievers() { - return List.of(new RetrieverSpec<>(new ParseField(NAME), RRFRetrieverBuilder::fromXContent)); + return List.of( + new RetrieverSpec<>(new ParseField(NAME), RRFRetrieverBuilder::fromXContent), + new RetrieverSpec<>(new ParseField(LinearRetrieverBuilder.NAME), LinearRetrieverBuilder::fromXContent) + ); } } diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java index 93445a9ce5ac9..a32f7ba1f923d 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java @@ -101,6 +101,7 @@ public String getName() { protected RRFRetrieverBuilder clone(List newRetrievers, List newPreFilterQueryBuilders) { RRFRetrieverBuilder clone = new RRFRetrieverBuilder(newRetrievers, this.rankWindowSize, this.rankConstant); clone.preFilterQueryBuilders = newPreFilterQueryBuilders; + clone.retrieverName = retrieverName; return clone; } diff --git a/x-pack/plugin/rank-rrf/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/x-pack/plugin/rank-rrf/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index 605e999b66c66..528b7e35bee65 100644 --- a/x-pack/plugin/rank-rrf/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/x-pack/plugin/rank-rrf/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -5,4 +5,4 @@ # 2.0. # -org.elasticsearch.xpack.rank.rrf.RRFFeatures +org.elasticsearch.xpack.rank.RankRRFFeatures diff --git a/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/LinearRankDocTests.java b/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/LinearRankDocTests.java new file mode 100644 index 0000000000000..051aa6bddb4d7 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/LinearRankDocTests.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.search.rank.AbstractRankDocWireSerializingTestCase; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.rank.rrf.RRFRankPlugin; + +import java.io.IOException; +import java.util.List; + +public class LinearRankDocTests extends AbstractRankDocWireSerializingTestCase { + + protected LinearRankDoc createTestRankDoc() { + int queries = randomIntBetween(2, 20); + float[] weights = new float[queries]; + String[] normalizers = new String[queries]; + float[] normalizedScores = new float[queries]; + for (int i = 0; i < queries; i++) { + weights[i] = randomFloat(); + normalizers[i] = randomAlphaOfLengthBetween(1, 10); + normalizedScores[i] = randomFloat(); + } + LinearRankDoc rankDoc = new LinearRankDoc(randomNonNegativeInt(), randomFloat(), randomIntBetween(0, 1), weights, normalizers); + rankDoc.rank = randomNonNegativeInt(); + rankDoc.normalizedScores = normalizedScores; + return rankDoc; + } + + @Override + protected List getAdditionalNamedWriteables() { + try (RRFRankPlugin rrfRankPlugin = new RRFRankPlugin()) { + return rrfRankPlugin.getNamedWriteables(); + } catch (IOException ex) { + throw new AssertionError("Failed to create RRFRankPlugin", ex); + } + } + + @Override + protected Writeable.Reader instanceReader() { + return LinearRankDoc::new; + } + + @Override + protected LinearRankDoc mutateInstance(LinearRankDoc instance) throws IOException { + LinearRankDoc mutated = new LinearRankDoc( + instance.doc, + instance.score, + instance.shardIndex, + instance.weights, + instance.normalizers + ); + mutated.normalizedScores = instance.normalizedScores; + mutated.rank = instance.rank; + if (frequently()) { + mutated.doc = randomValueOtherThan(instance.doc, ESTestCase::randomNonNegativeInt); + } + if (frequently()) { + mutated.score = randomValueOtherThan(instance.score, ESTestCase::randomFloat); + } + if (frequently()) { + mutated.shardIndex = randomValueOtherThan(instance.shardIndex, ESTestCase::randomNonNegativeInt); + } + if (frequently()) { + mutated.rank = randomValueOtherThan(instance.rank, ESTestCase::randomNonNegativeInt); + } + if (frequently()) { + for (int i = 0; i < mutated.normalizedScores.length; i++) { + if (frequently()) { + mutated.normalizedScores[i] = randomFloat(); + } + } + } + if (frequently()) { + for (int i = 0; i < mutated.weights.length; i++) { + if (frequently()) { + mutated.weights[i] = randomFloat(); + } + } + } + if (frequently()) { + for (int i = 0; i < mutated.normalizers.length; i++) { + if (frequently()) { + mutated.normalizers[i] = randomValueOtherThan(instance.normalizers[i], () -> randomAlphaOfLengthBetween(1, 10)); + } + } + } + return mutated; + } +} diff --git a/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverBuilderParsingTests.java b/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverBuilderParsingTests.java new file mode 100644 index 0000000000000..5cc66c6f50d3c --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/linear/LinearRetrieverBuilderParsingTests.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.linear; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; +import org.elasticsearch.search.retriever.RetrieverBuilder; +import org.elasticsearch.search.retriever.RetrieverParserContext; +import org.elasticsearch.search.retriever.TestRetrieverBuilder; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.usage.SearchUsage; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static java.util.Collections.emptyList; + +public class LinearRetrieverBuilderParsingTests extends AbstractXContentTestCase { + private static List xContentRegistryEntries; + + @BeforeClass + public static void init() { + xContentRegistryEntries = new SearchModule(Settings.EMPTY, emptyList()).getNamedXContents(); + } + + @AfterClass + public static void afterClass() throws Exception { + xContentRegistryEntries = null; + } + + @Override + protected LinearRetrieverBuilder createTestInstance() { + int rankWindowSize = randomInt(100); + int num = randomIntBetween(1, 3); + List innerRetrievers = new ArrayList<>(); + float[] weights = new float[num]; + ScoreNormalizer[] normalizers = new ScoreNormalizer[num]; + for (int i = 0; i < num; i++) { + innerRetrievers.add( + new CompoundRetrieverBuilder.RetrieverSource(TestRetrieverBuilder.createRandomTestRetrieverBuilder(), null) + ); + weights[i] = randomFloat(); + normalizers[i] = randomScoreNormalizer(); + } + return new LinearRetrieverBuilder(innerRetrievers, rankWindowSize, weights, normalizers); + } + + @Override + protected LinearRetrieverBuilder doParseInstance(XContentParser parser) throws IOException { + return (LinearRetrieverBuilder) RetrieverBuilder.parseTopLevelRetrieverBuilder( + parser, + new RetrieverParserContext(new SearchUsage(), n -> true) + ); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + List entries = new ArrayList<>(xContentRegistryEntries); + entries.add( + new NamedXContentRegistry.Entry( + RetrieverBuilder.class, + TestRetrieverBuilder.TEST_SPEC.getName(), + (p, c) -> TestRetrieverBuilder.TEST_SPEC.getParser().fromXContent(p, (RetrieverParserContext) c), + TestRetrieverBuilder.TEST_SPEC.getName().getForRestApiVersion() + ) + ); + entries.add( + new NamedXContentRegistry.Entry( + RetrieverBuilder.class, + new ParseField(LinearRetrieverBuilder.NAME), + (p, c) -> LinearRetrieverBuilder.PARSER.apply(p, (RetrieverParserContext) c) + ) + ); + return new NamedXContentRegistry(entries); + } + + private static ScoreNormalizer randomScoreNormalizer() { + if (randomBoolean()) { + return MinMaxScoreNormalizer.INSTANCE; + } else { + return IdentityScoreNormalizer.INSTANCE; + } + } +} diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/java/org/elasticsearch/xpack/rank/rrf/LinearRankClientYamlTestSuiteIT.java b/x-pack/plugin/rank-rrf/src/yamlRestTest/java/org/elasticsearch/xpack/rank/rrf/LinearRankClientYamlTestSuiteIT.java new file mode 100644 index 0000000000000..8af4ae307a51a --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/java/org/elasticsearch/xpack/rank/rrf/LinearRankClientYamlTestSuiteIT.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.rank.rrf; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; + +/** Runs yaml rest tests. */ +public class LinearRankClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .nodes(2) + .module("mapper-extras") + .module("rank-rrf") + .module("lang-painless") + .module("x-pack-inference") + .setting("xpack.license.self_generated.type", "trial") + .plugin("inference-service-test") + .build(); + + public LinearRankClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return ESClientYamlSuiteTestCase.createParameters(new String[] { "linear" }); + } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml index cd227eec4e227..42d0fa1998246 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml @@ -111,3 +111,43 @@ setup: - match: { status: 403 } - match: { error.type: security_exception } - match: { error.reason: "current license is non-compliant for [Reciprocal Rank Fusion (RRF)]" } + + +--- +"linear retriever invalid license": + - requires: + cluster_features: [ "linear_retriever_supported" ] + reason: "Support for linear retriever" + + - do: + catch: forbidden + search: + index: test + body: + track_total_hits: false + fields: [ "text" ] + retriever: + linear: + retrievers: [ + { + knn: { + field: vector, + query_vector: [ 0.0 ], + k: 3, + num_candidates: 3 + } + }, + { + standard: { + query: { + term: { + text: term + } + } + } + } + ] + + - match: { status: 403 } + - match: { error.type: security_exception } + - match: { error.reason: "current license is non-compliant for [linear retriever]" } diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/linear/10_linear_retriever.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/linear/10_linear_retriever.yml new file mode 100644 index 0000000000000..70db6c1543365 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/linear/10_linear_retriever.yml @@ -0,0 +1,1065 @@ +setup: + - requires: + cluster_features: [ "linear_retriever_supported" ] + reason: "Support for linear retriever" + test_runner_features: close_to + + - do: + indices.create: + index: test + body: + mappings: + properties: + vector: + type: dense_vector + dims: 1 + index: true + similarity: l2_norm + index_options: + type: flat + keyword: + type: keyword + other_keyword: + type: keyword + timestamp: + type: date + + - do: + bulk: + refresh: true + index: test + body: + - '{"index": {"_id": 1 }}' + - '{"vector": [1], "keyword": "one", "other_keyword": "other", "timestamp": "2021-01-01T00:00:00"}' + - '{"index": {"_id": 2 }}' + - '{"vector": [2], "keyword": "two", "timestamp": "2022-01-01T00:00:00"}' + - '{"index": {"_id": 3 }}' + - '{"vector": [3], "keyword": "three", "timestamp": "2023-01-01T00:00:00"}' + - '{"index": {"_id": 4 }}' + - '{"vector": [4], "keyword": "four", "other_keyword": "other", "timestamp": "2024-01-01T00:00:00"}' + +--- +"basic linear weighted combination of a standard and knn retrievers": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + } + } + }, + weight: 0.5 + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + + - match: { hits.total.value: 2 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 5.0 } + - match: { hits.hits.1._id: "4" } + - match: { hits.hits.1._score: 2.0 } + +--- +"basic linear weighted combination - interleaved results": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + # this one will return docs 1 and doc 2 with scores 20 and 10 respectively + retriever: { + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "two" + } + } + }, + boost: 5.0 + } + } + ] + } + } + } + }, + weight: 2 + }, + { + # this one will return docs 3 and doc 4 with scores 15 and 12 respectively + retriever: { + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "three" + } + } + }, + boost: 5.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "four" + } + } + }, + boost: 4.0 + } + } + ] + } + } + } + }, + weight: 3 + } + ] + + - match: { hits.total.value: 4 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 20.0 } + - match: { hits.hits.1._id: "3" } + - match: { hits.hits.1._score: 15.0 } + - match: { hits.hits.2._id: "4" } + - match: { hits.hits.2._score: 12.0 } + - match: { hits.hits.3._id: "2" } + - match: { hits.hits.3._score: 10.0 } + +--- +"should normalize initial scores": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "two" + } + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "three" + } + } + }, + boost: 5.0 + } + } + ] + } + } + } + }, + weight: 10.0, + normalizer: "minmax" + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + + - match: { hits.total.value: 4 } + - match: { hits.hits.0._id: "1" } + - match: {hits.hits.0._score: 10.0} + - match: { hits.hits.1._id: "2" } + - match: {hits.hits.1._score: 8.0} + - match: { hits.hits.2._id: "4" } + - match: {hits.hits.2._score: 2.0} + - match: { hits.hits.2._score: 2.0 } + - match: { hits.hits.3._id: "3" } + - close_to: { hits.hits.3._score: { value: 0.0, error: 0.001 } } + +--- +"should throw on unknown normalizer": + - do: + catch: /Unknown normalizer \[aardvark\]/ + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + } + } + }, + weight: 1.0, + normalizer: "aardvark" + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + +--- +"should throw on negative weights": + - do: + catch: /\[weight\] must be non-negative/ + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + } + } + }, + weight: 1.0 + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: -10 + } + ] + +--- +"pagination within a consistent rank_window_size": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "two" + } + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "three" + } + } + }, + boost: 5.0 + } + } + ] + } + } + } + }, + weight: 10.0, + normalizer: "minmax" + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + from: 2 + size: 1 + + - match: { hits.total.value: 4 } + - length: { hits.hits: 1 } + - match: { hits.hits.0._id: "4" } + - match: { hits.hits.0._score: 2.0 } + + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "two" + } + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "three" + } + } + }, + boost: 5.0 + } + } + ] + } + } + } + }, + weight: 10.0, + normalizer: "minmax" + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + from: 3 + size: 1 + + - match: { hits.total.value: 4 } + - match: { hits.hits.0._id: "3" } + - close_to: { hits.hits.0._score: { value: 0.0, error: 0.001 } } + +--- +"should throw when rank_window_size less than size": + - do: + catch: "/\\[linear\\] requires \\[rank_window_size: 2\\] be greater than or equal to \\[size: 10\\]/" + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + match_all: { } + } + } + }, + weight: 10.0, + normalizer: "minmax" + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + rank_window_size: 2 + size: 10 +--- +"should respect rank_window_size for normalization and returned hits": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "two" + } + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "three" + } + } + }, + boost: 5.0 + } + } + ] + } + } + } + }, + weight: 1.0, + normalizer: "minmax" + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + rank_window_size: 2 + size: 2 + + - match: { hits.total.value: 4 } + - match: { hits.hits.0._id: "4" } + - match: { hits.hits.0._score: 2.0 } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.1._score: 1.0 } + +--- +"explain should provide info on weights and inner retrievers": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "four" + } + } + }, + boost: 1.0 + } + } + ] + } + }, + _name: "my_standard_retriever" + } + }, + weight: 10.0, + normalizer: "minmax" + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 20.0 + } + ] + explain: true + size: 2 + + - match: { hits.hits.0._id: "4" } + - match: { hits.hits.0._explanation.description: "/weighted.linear.combination.score:.\\[20.0].computed.for.normalized.scores.\\[.*,.1.0\\].and.weights.\\[10.0,.20.0\\].as.sum.of.\\(weight\\[i\\].*.score\\[i\\]\\).for.each.query./"} + - match: { hits.hits.0._explanation.details.0.value: 0.0 } + - match: { hits.hits.0._explanation.details.0.description: "/.*weighted.score.*result.not.found.in.query.at.index.\\[0\\].\\[my_standard_retriever\\]/" } + - match: { hits.hits.0._explanation.details.1.value: 20.0 } + - match: { hits.hits.0._explanation.details.1.description: "/.*weighted.score.*using.score.normalizer.\\[none\\].*/" } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.1._explanation.description: "/weighted.linear.combination.score:.\\[10.0].computed.for.normalized.scores.\\[1.0,.0.0\\].and.weights.\\[10.0,.20.0\\].as.sum.of.\\(weight\\[i\\].*.score\\[i\\]\\).for.each.query./"} + - match: { hits.hits.1._explanation.details.0.value: 10.0 } + - match: { hits.hits.1._explanation.details.0.description: "/.*weighted.score.*\\[my_standard_retriever\\].*using.score.normalizer.\\[minmax\\].*/" } + - match: { hits.hits.1._explanation.details.1.value: 0.0 } + - match: { hits.hits.1._explanation.details.1.description: "/.*weighted.score.*result.not.found.in.query.at.index.\\[1\\]/" } + +--- +"collapsing results": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + } + } + }, + weight: 0.5 + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + collapse: + field: other_keyword + inner_hits: { + name: sub_hits, + sort: + { + keyword: { + order: desc + } + } + } + - match: { hits.hits.0._id: "1" } + - length: { hits.hits.0.inner_hits.sub_hits.hits.hits : 2 } + - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "1" } + - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.1._id: "4" } + +--- +"multiple nested linear retrievers": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + } + } + }, + weight: 0.5 + }, + { + retriever: { + linear: { + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "two" + } + } + }, + boost: 20.0 + } + } + } + } + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + } + } + ] + } + }, + weight: 2.0 + } + ] + + - match: { hits.total.value: 3 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0._score: 40.0 } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.1._score: 5.0 } + - match: { hits.hits.2._id: "4" } + - match: { hits.hits.2._score: 2.0 } + +--- +"linear retriever with filters": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + } + } + }, + weight: 0.5 + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + filter: + term: + keyword: "four" + + + - match: { hits.total.value: 1 } + - length: {hits.hits: 1} + - match: { hits.hits.0._id: "4" } + - match: { hits.hits.0._score: 2.0 } + +--- +"linear retriever with filters on nested retrievers": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + filter: { + term: { + keyword: "four" + } + } + } + }, + weight: 0.5 + }, + { + retriever: { + knn: { + field: "vector", + query_vector: [ 4 ], + k: 1, + num_candidates: 1 + } + }, + weight: 2.0 + } + ] + + - match: { hits.total.value: 1 } + - length: {hits.hits: 1} + - match: { hits.hits.0._id: "4" } + - match: { hits.hits.0._score: 2.0 } + + +--- +"linear retriever with custom sort and score for nested retrievers": + - do: + search: + index: test + body: + retriever: + linear: + retrievers: [ + { + retriever: { + standard: { + query: { + constant_score: { + filter: { + bool: { + should: [ + { + term: { + keyword: { + value: "one" # this will give doc 1 a normalized score of 10 because min == max + } + } + }, + { + term: { + keyword: { + value: "two" # this will give doc 2 a normalized score of 10 because min == max + } + } + } ] + } + }, + boost: 10.0 + } + }, + sort: { + timestamp: { + order: "asc" + } + } + } + }, + weight: 1.0, + normalizer: "minmax" + }, + { + # because we're sorting on timestamp and use a rank window size of 3, we will only get to see + # docs 3 and 2. + # their `scores` (which are the timestamps) are: + # doc 3: 1672531200000 (2023-01-01T00:00:00) + # doc 2: 1640995200000 (2022-01-01T00:00:00) + # doc 1: 1609459200000 (2021-01-01T00:00:00) + # and their normalized scores based on the provided conf + # will be: + # normalized(doc3) = 1. + # normalized(doc2) = 0.5 + # normalized(doc1) = 0 + retriever: { + standard: { + query: { + function_score: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + keyword: { + value: "one" + } + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "two" + } + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + keyword: { + value: "three" + } + } + }, + boost: 1.0 + } + } + ] + } + }, + functions: [ { + script_score: { + script: { + source: "doc['timestamp'].value.millis" + } + } + } ], + "boost_mode": "replace" + } + }, + sort: { + timestamp: { + order: "desc" + } + } + } + }, + weight: 1.0, + normalizer: "minmax" + } + ] + rank_window_size: 3 + size: 2 + + - match: { hits.total.value: 3 } + - length: {hits.hits: 2} + - match: { hits.hits.0._id: "2" } + - close_to: { hits.hits.0._score: { value: 10.5, error: 0.001 } } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.1._score: 10 } From dc92c837651884173fe74d9372436f23cf7b5aeb Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 28 Jan 2025 09:35:31 -0800 Subject: [PATCH 147/383] Change Elasticsearch timeouts to 429 response instead of 5xx (#116026) When a user passes a timeout to an Elasticsearch request, the user is telling the system to limit how long the request should take. If that timeout is breached, it is because the user selected a timeout less than the time the request needed. Therefore, it is up to the user to select a long enough time. If they don't, it's on the user to adjust their request. Given the above, a breached timeout is an issue with the user request, and the http response code should refelect that. This commit changes timeout exceptions to give a 429 response code instead of the current 5xx. --- docs/changelog/116026.yaml | 13 +++++++++++++ .../ElasticsearchTimeoutException.java | 7 +++++++ .../ProcessClusterEventTimeoutException.java | 2 +- .../search/query/SearchTimeoutException.java | 2 +- 4 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/116026.yaml diff --git a/docs/changelog/116026.yaml b/docs/changelog/116026.yaml new file mode 100644 index 0000000000000..9beb6a1891bb8 --- /dev/null +++ b/docs/changelog/116026.yaml @@ -0,0 +1,13 @@ +pr: 116026 +summary: Change Elasticsearch timeouts to 429 response instead of 5xx +area: Infra/Core +type: breaking +issues: [] +breaking: + title: Change most Elasticsearch timeouts to 429 response instead of 5xx + area: REST API + details: When a timeout occurs in most REST requests, whether via a per-request timeout, or a system default, the + request would return a 5xx response code. The response code from those APIs when a timeout occurs is now 429. + impact: Adjust any code relying on retrying on 5xx responses for timeouts to look for a 429 response code and + inspect the response to determine whether a timeout occured. + notable: false diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchTimeoutException.java b/server/src/main/java/org/elasticsearch/ElasticsearchTimeoutException.java index eddce19c77888..06ae43144476e 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchTimeoutException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchTimeoutException.java @@ -10,6 +10,7 @@ package org.elasticsearch; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -34,4 +35,10 @@ public ElasticsearchTimeoutException(String message, Object... args) { public ElasticsearchTimeoutException(String message, Throwable cause, Object... args) { super(message, cause, args); } + + @Override + public RestStatus status() { + // closest thing to "your request took longer than you asked for" + return RestStatus.TOO_MANY_REQUESTS; + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ProcessClusterEventTimeoutException.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ProcessClusterEventTimeoutException.java index 1182faea81ed6..2a273f7f81e0f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ProcessClusterEventTimeoutException.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ProcessClusterEventTimeoutException.java @@ -28,6 +28,6 @@ public ProcessClusterEventTimeoutException(StreamInput in) throws IOException { @Override public RestStatus status() { - return RestStatus.SERVICE_UNAVAILABLE; + return RestStatus.TOO_MANY_REQUESTS; } } diff --git a/server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java b/server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java index e006f176ff91a..e5caa00537c67 100644 --- a/server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java +++ b/server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java @@ -31,7 +31,7 @@ public SearchTimeoutException(StreamInput in) throws IOException { @Override public RestStatus status() { - return RestStatus.GATEWAY_TIMEOUT; + return RestStatus.TOO_MANY_REQUESTS; } /** From a4d8a87f8a607e6a9333dc157f8dd5ca01853017 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Tue, 28 Jan 2025 12:43:57 -0500 Subject: [PATCH 148/383] Removing unified feature flag (#121043) --- .../test/cluster/FeatureFlag.java | 3 +-- .../inference/BaseMockEISAuthServerTest.java | 2 -- .../inference/InferenceBaseRestTest.java | 3 +-- .../xpack/inference/InferencePlugin.java | 24 +++++-------------- .../inference/UnifiedCompletionFeature.java | 20 ---------------- 5 files changed, 8 insertions(+), 44 deletions(-) delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnifiedCompletionFeature.java diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index 5630c33ad559c..11787866af0d7 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -18,8 +18,7 @@ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), - SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - INFERENCE_UNIFIED_API_ENABLED("es.inference_unified_feature_flag_enabled=true", Version.fromString("8.18.0"), null); + SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null); public final String systemProperty; public final Version from; diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java index 230b7ff576296..b78d5f1749396 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; import org.junit.ClassRule; @@ -45,7 +44,6 @@ public class BaseMockEISAuthServerTest extends ESRestTestCase { // This plugin is located in the inference/qa/test-service-plugin package, look for TestInferenceServicePlugin .plugin("inference-service-test") .user("x_pack_rest_user", "x-pack-test-password") - .feature(FeatureFlag.INFERENCE_UNIFIED_API_ENABLED) .build(); // The reason we're doing this is to make sure the mock server is initialized first so we can get the address before communicating diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index 5174b5bbb8cb4..546eab471a077 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -19,7 +19,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; @@ -50,8 +49,8 @@ public class InferenceBaseRestTest extends ESRestTestCase { .setting("xpack.security.enabled", "true") .plugin("inference-service-test") .user("x_pack_rest_user", "x-pack-test-password") - .feature(FeatureFlag.INFERENCE_UNIFIED_API_ENABLED) .build(); + @ClassRule public static MlModelServer mlModelServer = new MlModelServer(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index b007aa8bfa1f8..a9f920770c3dc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -131,7 +131,6 @@ import java.util.Map; import java.util.function.Predicate; import java.util.function.Supplier; -import java.util.stream.Stream; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; @@ -193,24 +192,17 @@ public InferencePlugin(Settings settings) { @Override public List> getActions() { - var availableActions = List.of( + return List.of( new ActionHandler<>(InferenceAction.INSTANCE, TransportInferenceAction.class), - new ActionHandler<>(GetInferenceModelAction.INSTANCE, TransportGetInferenceModelAction.class), new ActionHandler<>(PutInferenceModelAction.INSTANCE, TransportPutInferenceModelAction.class), new ActionHandler<>(UpdateInferenceModelAction.INSTANCE, TransportUpdateInferenceModelAction.class), new ActionHandler<>(DeleteInferenceEndpointAction.INSTANCE, TransportDeleteInferenceEndpointAction.class), new ActionHandler<>(XPackUsageFeatureAction.INFERENCE, TransportInferenceUsageAction.class), new ActionHandler<>(GetInferenceDiagnosticsAction.INSTANCE, TransportGetInferenceDiagnosticsAction.class), - new ActionHandler<>(GetInferenceServicesAction.INSTANCE, TransportGetInferenceServicesAction.class) + new ActionHandler<>(GetInferenceServicesAction.INSTANCE, TransportGetInferenceServicesAction.class), + new ActionHandler<>(UnifiedCompletionAction.INSTANCE, TransportUnifiedCompletionInferenceAction.class) ); - - List> conditionalActions = - UnifiedCompletionFeature.UNIFIED_COMPLETION_FEATURE_FLAG.isEnabled() - ? List.of(new ActionHandler<>(UnifiedCompletionAction.INSTANCE, TransportUnifiedCompletionInferenceAction.class)) - : List.of(); - - return Stream.concat(availableActions.stream(), conditionalActions.stream()).toList(); } @Override @@ -225,7 +217,7 @@ public List getRestHandlers( Supplier nodesInCluster, Predicate clusterSupportsFeature ) { - var availableRestActions = List.of( + return List.of( new RestInferenceAction(), new RestStreamInferenceAction(threadPoolSetOnce), new RestGetInferenceModelAction(), @@ -233,13 +225,9 @@ public List getRestHandlers( new RestUpdateInferenceModelAction(), new RestDeleteInferenceEndpointAction(), new RestGetInferenceDiagnosticsAction(), - new RestGetInferenceServicesAction() + new RestGetInferenceServicesAction(), + new RestUnifiedCompletionInferenceAction(threadPoolSetOnce) ); - List conditionalRestActions = UnifiedCompletionFeature.UNIFIED_COMPLETION_FEATURE_FLAG.isEnabled() - ? List.of(new RestUnifiedCompletionInferenceAction(threadPoolSetOnce)) - : List.of(); - - return Stream.concat(availableRestActions.stream(), conditionalRestActions.stream()).toList(); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnifiedCompletionFeature.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnifiedCompletionFeature.java deleted file mode 100644 index 3e13d0c1e39de..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnifiedCompletionFeature.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference; - -import org.elasticsearch.common.util.FeatureFlag; - -/** - * Unified Completion feature flag. When the feature is complete, this flag will be removed. - * Enable feature via JVM option: `-Des.inference_unified_feature_flag_enabled=true`. - */ -public class UnifiedCompletionFeature { - public static final FeatureFlag UNIFIED_COMPLETION_FEATURE_FLAG = new FeatureFlag("inference_unified"); - - private UnifiedCompletionFeature() {} -} From e7e1155831f655109661005fef0c59da92a461dd Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 28 Jan 2025 10:11:52 -0800 Subject: [PATCH 149/383] Use links when possible when installing test cluster modules (#121067) When we install modules into test clusters we do a full copy instead of links. This both eats up more IO and disk space unnecessarily. --- .../local/AbstractLocalClusterFactory.java | 13 ++------- .../test/cluster/util/IOUtils.java | 28 +++++++++++++++++++ 2 files changed, 31 insertions(+), 10 deletions(-) diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java index c7007ac60fe57..cfdca56542eb2 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java @@ -350,15 +350,7 @@ private void initializeWorkingDirectory(boolean preserveWorkingDirectory) { IOUtils.deleteWithRetry(distributionDir); } - try { - IOUtils.syncWithLinks(distributionDescriptor.getDistributionDir(), distributionDir); - } catch (IOUtils.LinkCreationException e) { - // Note does not work for network drives, e.g. Vagrant - LOGGER.info("Failed to create working dir using hard links. Falling back to copy", e); - // ensure we get a clean copy - IOUtils.deleteWithRetry(distributionDir); - IOUtils.syncWithCopy(distributionDescriptor.getDistributionDir(), distributionDir); - } + IOUtils.syncMaybeWithLinks(distributionDescriptor.getDistributionDir(), distributionDir); } Files.createDirectories(repoDir); Files.createDirectories(dataDir); @@ -773,7 +765,8 @@ private void installModule(String moduleName, DefaultPluginInstallSpec installSp }); - IOUtils.syncWithCopy(modulePath, destination); + IOUtils.syncMaybeWithLinks(modulePath, destination); + try { if (installSpec.entitlementsOverride != null) { Path entitlementsFile = modulePath.resolve(ENTITLEMENT_POLICY_YAML); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/IOUtils.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/IOUtils.java index 43034e502fbfb..b1e2175205594 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/IOUtils.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/IOUtils.java @@ -9,6 +9,9 @@ package org.elasticsearch.test.cluster.util; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + import java.io.File; import java.io.IOException; import java.io.UncheckedIOException; @@ -20,6 +23,7 @@ import java.util.stream.Stream; public final class IOUtils { + private static final Logger LOGGER = LogManager.getLogger(IOUtils.class); private static final int RETRY_DELETE_MILLIS = OS.current() == OS.WINDOWS ? 500 : 0; private static final int MAX_RETRY_DELETE_TIMES = OS.current() == OS.WINDOWS ? 15 : 0; @@ -51,6 +55,30 @@ public static void uncheckedDeleteWithRetry(Path path) { } } + /** + * Attempts to do a copy via linking, falling back to a normal copy if an exception is encountered. + * + * @see #syncWithLinks(Path, Path) + * @see #syncWithCopy(Path, Path) + * @param sourceRoot where to copy from + * @param destinationRoot destination to link to + */ + public static void syncMaybeWithLinks(Path sourceRoot, Path destinationRoot) { + try { + syncWithLinks(sourceRoot, destinationRoot); + } catch (LinkCreationException e) { + // Note does not work for network drives, e.g. Vagrant + LOGGER.info("Failed to sync using hard links. Falling back to copy.", e); + // ensure we get a clean copy + try { + deleteWithRetry(destinationRoot); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } + syncWithCopy(sourceRoot, destinationRoot); + } + } + /** * Does the equivalent of `cp -lr` and `chmod -r a-w` to save space and improve speed. * We remove write permissions to make sure files are note mistakenly edited ( e.x. the config file ) and changes From 2c3f663954d3e0b73287fd88c5dd13fe64129525 Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Tue, 28 Jan 2025 11:30:00 -0700 Subject: [PATCH 150/383] Fix cleanup for skipped test (#121073) --- muted-tests.yml | 6 ------ .../RemoteClusterSecurityEsqlIT.java | 19 +++++++++++++++---- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 381adda7da8ed..fbf289cb2edbb 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -280,12 +280,6 @@ tests: - class: org.elasticsearch.xpack.esql.parser.StatementParserTests method: testNamedFunctionArgumentInMap issue: https://github.com/elastic/elasticsearch/issues/121020 -- class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityEsqlIT - method: testCrossClusterAsyncQuery - issue: https://github.com/elastic/elasticsearch/issues/121021 -- class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityEsqlIT - method: testCrossClusterAsyncQueryStop - issue: https://github.com/elastic/elasticsearch/issues/121021 - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testSuggestProfilesWithName issue: https://github.com/elastic/elasticsearch/issues/121022 diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java index 09dda0f708a86..dcf993ea4ce7a 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java @@ -344,13 +344,24 @@ private static String populateOtherUser() throws IOException { return otherUser; } + private void performRequestWithAdminUserIgnoreNotFound(RestClient targetFulfillingClusterClient, Request request) throws IOException { + try { + performRequestWithAdminUser(targetFulfillingClusterClient, request); + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() != 404) { + throw e; + } + logger.info("Ignored \"not found\" exception", e); + } + } + @After public void wipeData() throws Exception { CheckedConsumer wipe = client -> { - performRequestWithAdminUser(client, new Request("DELETE", "/employees")); - performRequestWithAdminUser(client, new Request("DELETE", "/employees2")); - performRequestWithAdminUser(client, new Request("DELETE", "/employees3")); - performRequestWithAdminUser(client, new Request("DELETE", "/_enrich/policy/countries")); + performRequestWithAdminUserIgnoreNotFound(client, new Request("DELETE", "/employees")); + performRequestWithAdminUserIgnoreNotFound(client, new Request("DELETE", "/employees2")); + performRequestWithAdminUserIgnoreNotFound(client, new Request("DELETE", "/employees3")); + performRequestWithAdminUserIgnoreNotFound(client, new Request("DELETE", "/_enrich/policy/countries")); }; wipe.accept(fulfillingClusterClient); wipe.accept(client()); From 325000d328a0b9a51178edc0d7511e9676e13ab3 Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Tue, 28 Jan 2025 13:41:45 -0500 Subject: [PATCH 151/383] Set default reranker for text similarity reranker (#120551) --- docs/changelog/120551.yaml | 5 +++++ .../TextSimilarityRankRetrieverBuilder.java | 10 ++++++++-- .../TextSimilarityRankRetrieverBuilderTests.java | 3 ++- 3 files changed, 15 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/120551.yaml diff --git a/docs/changelog/120551.yaml b/docs/changelog/120551.yaml new file mode 100644 index 0000000000000..171d639be3e89 --- /dev/null +++ b/docs/changelog/120551.yaml @@ -0,0 +1,5 @@ +pr: 120551 +summary: Set default reranker for text similarity reranker to Elastic reranker +area: Ranking +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java index 165c42fdb7d1f..fa6cc3db0ef9f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java @@ -29,6 +29,7 @@ import static org.elasticsearch.search.rank.RankBuilder.DEFAULT_RANK_WINDOW_SIZE; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.DEFAULT_RERANK_ID; /** * A {@code RetrieverBuilder} for parsing and constructing a text similarity reranker retriever. @@ -47,10 +48,11 @@ public class TextSimilarityRankRetrieverBuilder extends CompoundRetrieverBuilder public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(TextSimilarityRankBuilder.NAME, args -> { RetrieverBuilder retrieverBuilder = (RetrieverBuilder) args[0]; - String inferenceId = (String) args[1]; + String inferenceId = args[1] == null ? DEFAULT_RERANK_ID : (String) args[1]; String inferenceText = (String) args[2]; String field = (String) args[3]; int rankWindowSize = args[4] == null ? DEFAULT_RANK_WINDOW_SIZE : (int) args[4]; + return new TextSimilarityRankRetrieverBuilder(retrieverBuilder, inferenceId, inferenceText, field, rankWindowSize); }); @@ -60,7 +62,7 @@ public class TextSimilarityRankRetrieverBuilder extends CompoundRetrieverBuilder c.trackRetrieverUsage(innerRetriever.getName()); return innerRetriever; }, RETRIEVER_FIELD); - PARSER.declareString(constructorArg(), INFERENCE_ID_FIELD); + PARSER.declareString(optionalConstructorArg(), INFERENCE_ID_FIELD); PARSER.declareString(constructorArg(), INFERENCE_TEXT_FIELD); PARSER.declareString(constructorArg(), FIELD_FIELD); PARSER.declareInt(optionalConstructorArg(), RANK_WINDOW_SIZE_FIELD); @@ -171,6 +173,10 @@ public String getName() { return TextSimilarityRankBuilder.NAME; } + public String inferenceId() { + return inferenceId; + } + public int rankWindowSize() { return rankWindowSize; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java index b6d455dd233ba..93c3ffe5d14fb 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java @@ -31,6 +31,7 @@ import java.util.List; import static org.elasticsearch.search.rank.RankBuilder.DEFAULT_RANK_WINDOW_SIZE; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.DEFAULT_RERANK_ID; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -108,7 +109,6 @@ public void testParserDefaults() throws IOException { } }, "field": "my-field", - "inference_id": "my-inference-id", "inference_text": "my-inference-text" }"""; @@ -118,6 +118,7 @@ public void testParserDefaults() throws IOException { new RetrieverParserContext(new SearchUsage(), nf -> true) ); assertEquals(DEFAULT_RANK_WINDOW_SIZE, parsed.rankWindowSize()); + assertEquals(DEFAULT_RERANK_ID, parsed.inferenceId()); } } From 3df6e34ec0384353b7f0b7effebfc555a214da32 Mon Sep 17 00:00:00 2001 From: Adam Demjen Date: Tue, 28 Jan 2025 13:54:20 -0500 Subject: [PATCH 152/383] [ML] Remove Elastic Inference Service feature flag and deprecated setting (#120842) * Remove EIS feature flag * Remove Inference Service feature flag and deprecated setting * Put back FF temporarily * Fix after rebasing * [CI] Auto commit changes from spotless * Update docs/changelog/120842.yaml --------- Co-authored-by: elasticsearchmachine --- docs/changelog/120842.yaml | 5 + .../inference/BaseMockEISAuthServerTest.java | 2 - ...etModelsWithElasticInferenceServiceIT.java | 17 +-- .../inference/InferenceGetServicesIT.java | 64 +++------- .../xpack/inference/InferencePlugin.java | 109 ++++++------------ .../ElasticInferenceServiceFeature.java | 8 +- .../ElasticInferenceServiceSettings.java | 15 --- .../xpack/inference/InferencePluginTests.java | 65 ----------- 8 files changed, 67 insertions(+), 218 deletions(-) create mode 100644 docs/changelog/120842.yaml delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java diff --git a/docs/changelog/120842.yaml b/docs/changelog/120842.yaml new file mode 100644 index 0000000000000..98227cf399b56 --- /dev/null +++ b/docs/changelog/120842.yaml @@ -0,0 +1,5 @@ +pr: 120842 +summary: Remove Elastic Inference Service feature flag and deprecated setting +area: Inference +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java index b78d5f1749396..d0f797e9f8fab 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/BaseMockEISAuthServerTest.java @@ -39,8 +39,6 @@ public class BaseMockEISAuthServerTest extends ESRestTestCase { .setting("xpack.security.enabled", "true") // Adding both settings unless one feature flag is disabled in a particular environment .setting("xpack.inference.elastic.url", mockEISServer::getUrl) - // TODO remove this once we've removed DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG and EIS_GATEWAY_URL - .setting("xpack.inference.eis.gateway.url", mockEISServer::getUrl) // This plugin is located in the inference/qa/test-service-plugin package, look for TestInferenceServicePlugin .plugin("inference-service-test") .user("x_pack_rest_user", "x-pack-test-password") diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java index 76483a5f62fec..3a2a003636b13 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetModelsWithElasticInferenceServiceIT.java @@ -10,7 +10,6 @@ package org.elasticsearch.xpack.inference; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature; import java.io.IOException; @@ -24,17 +23,11 @@ public void testGetDefaultEndpoints() throws IOException { var allModels = getAllModels(); var chatCompletionModels = getModels("_all", TaskType.CHAT_COMPLETION); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - assertThat(allModels, hasSize(4)); - assertThat(chatCompletionModels, hasSize(1)); - - for (var model : chatCompletionModels) { - assertEquals("chat_completion", model.get("task_type")); - } - } else { - assertThat(allModels, hasSize(3)); - assertThat(chatCompletionModels, hasSize(0)); + assertThat(allModels, hasSize(4)); + assertThat(chatCompletionModels, hasSize(1)); + + for (var model : chatCompletionModels) { + assertEquals("chat_completion", model.get("task_type")); } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java index 856fdeb6287e9..9d4cec798964a 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceGetServicesIT.java @@ -12,11 +12,8 @@ import org.elasticsearch.client.Request; import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.Map; @@ -28,12 +25,7 @@ public class InferenceGetServicesIT extends BaseMockEISAuthServerTest { @SuppressWarnings("unchecked") public void testGetServicesWithoutTaskType() throws IOException { List services = getAllServices(); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - assertThat(services.size(), equalTo(19)); - } else { - assertThat(services.size(), equalTo(18)); - } + assertThat(services.size(), equalTo(19)); String[] providers = new String[services.size()]; for (int i = 0; i < services.size(); i++) { @@ -41,14 +33,15 @@ public void testGetServicesWithoutTaskType() throws IOException { providers[i] = (String) serviceConfig.get("service"); } - var providerList = new ArrayList<>( - Arrays.asList( + assertArrayEquals( + List.of( "alibabacloud-ai-search", "amazonbedrock", "anthropic", "azureaistudio", "azureopenai", "cohere", + "elastic", "elasticsearch", "googleaistudio", "googlevertexai", @@ -61,13 +54,9 @@ public void testGetServicesWithoutTaskType() throws IOException { "test_service", "text_embedding_test_service", "watsonxai" - ) + ).toArray(), + providers ); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - providerList.add(6, "elastic"); - } - assertArrayEquals(providerList.toArray(), providers); } @SuppressWarnings("unchecked") @@ -130,7 +119,7 @@ public void testGetServicesWithCompletionTaskType() throws IOException { providers[i] = (String) serviceConfig.get("service"); } - var providerList = new ArrayList<>( + assertArrayEquals( List.of( "alibabacloud-ai-search", "amazonbedrock", @@ -141,21 +130,15 @@ public void testGetServicesWithCompletionTaskType() throws IOException { "googleaistudio", "openai", "streaming_completion_test_service" - ) + ).toArray(), + providers ); - - assertArrayEquals(providers, providerList.toArray()); } @SuppressWarnings("unchecked") public void testGetServicesWithChatCompletionTaskType() throws IOException { List services = getServices(TaskType.CHAT_COMPLETION); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - assertThat(services.size(), equalTo(3)); - } else { - assertThat(services.size(), equalTo(2)); - } + assertThat(services.size(), equalTo(3)); String[] providers = new String[services.size()]; for (int i = 0; i < services.size(); i++) { @@ -163,26 +146,13 @@ public void testGetServicesWithChatCompletionTaskType() throws IOException { providers[i] = (String) serviceConfig.get("service"); } - var providerList = new ArrayList<>(List.of("openai", "streaming_completion_test_service")); - - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - providerList.addFirst("elastic"); - } - - assertArrayEquals(providers, providerList.toArray()); + assertArrayEquals(List.of("elastic", "openai", "streaming_completion_test_service").toArray(), providers); } @SuppressWarnings("unchecked") public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { List services = getServices(TaskType.SPARSE_EMBEDDING); - - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - assertThat(services.size(), equalTo(5)); - } else { - assertThat(services.size(), equalTo(4)); - } + assertThat(services.size(), equalTo(5)); String[] providers = new String[services.size()]; for (int i = 0; i < services.size(); i++) { @@ -190,12 +160,10 @@ public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { providers[i] = (String) serviceConfig.get("service"); } - var providerList = new ArrayList<>(Arrays.asList("alibabacloud-ai-search", "elasticsearch", "hugging_face", "test_service")); - if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() - || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - providerList.add(1, "elastic"); - } - assertArrayEquals(providers, providerList.toArray()); + assertArrayEquals( + List.of("alibabacloud-ai-search", "elastic", "elasticsearch", "hugging_face", "test_service").toArray(), + providers + ); } private List getAllServices() throws IOException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index a9f920770c3dc..adea09adb8afc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -133,9 +133,6 @@ import java.util.function.Supplier; import static java.util.Collections.singletonList; -import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; -import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG; -import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG; public class InferencePlugin extends Plugin implements @@ -252,46 +249,44 @@ public Collection createComponents(PluginServices services) { var inferenceServices = new ArrayList<>(inferenceServiceExtensions); inferenceServices.add(this::getInferenceServiceFactories); - if (isElasticInferenceServiceEnabled()) { - // Create a separate instance of HTTPClientManager with its own SSL configuration (`xpack.inference.elastic.http.ssl.*`). - var elasticInferenceServiceHttpClientManager = HttpClientManager.create( - settings, - services.threadPool(), - services.clusterService(), - throttlerManager, - getSslService() - ); - - var elasticInferenceServiceRequestSenderFactory = new HttpRequestSender.Factory( - serviceComponents.get(), - elasticInferenceServiceHttpClientManager, - services.clusterService() - ); - elasicInferenceServiceFactory.set(elasticInferenceServiceRequestSenderFactory); - - ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); - String elasticInferenceUrl = this.getElasticInferenceServiceUrl(inferenceServiceSettings); - - var elasticInferenceServiceComponentsInstance = new ElasticInferenceServiceComponents(elasticInferenceUrl); - elasticInferenceServiceComponents.set(elasticInferenceServiceComponentsInstance); - - var authorizationHandler = new ElasticInferenceServiceAuthorizationHandler( - elasticInferenceServiceComponentsInstance.elasticInferenceServiceUrl(), - services.threadPool() - ); - - inferenceServices.add( - () -> List.of( - context -> new ElasticInferenceService( - elasicInferenceServiceFactory.get(), - serviceComponents.get(), - elasticInferenceServiceComponentsInstance, - modelRegistry, - authorizationHandler - ) + // Create a separate instance of HTTPClientManager with its own SSL configuration (`xpack.inference.elastic.http.ssl.*`). + var elasticInferenceServiceHttpClientManager = HttpClientManager.create( + settings, + services.threadPool(), + services.clusterService(), + throttlerManager, + getSslService() + ); + + var elasticInferenceServiceRequestSenderFactory = new HttpRequestSender.Factory( + serviceComponents.get(), + elasticInferenceServiceHttpClientManager, + services.clusterService() + ); + elasicInferenceServiceFactory.set(elasticInferenceServiceRequestSenderFactory); + + ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); + String elasticInferenceUrl = inferenceServiceSettings.getElasticInferenceServiceUrl(); + + var elasticInferenceServiceComponentsInstance = new ElasticInferenceServiceComponents(elasticInferenceUrl); + elasticInferenceServiceComponents.set(elasticInferenceServiceComponentsInstance); + + var authorizationHandler = new ElasticInferenceServiceAuthorizationHandler( + elasticInferenceServiceComponentsInstance.elasticInferenceServiceUrl(), + services.threadPool() + ); + + inferenceServices.add( + () -> List.of( + context -> new ElasticInferenceService( + elasicInferenceServiceFactory.get(), + serviceComponents.get(), + elasticInferenceServiceComponentsInstance, + modelRegistry, + authorizationHandler ) - ); - } + ) + ); var factoryContext = new InferenceServiceExtension.InferenceServiceFactoryContext( services.client(), @@ -417,11 +412,7 @@ public List> getSettings() { settings.addAll(Truncator.getSettingsDefinitions()); settings.addAll(RequestExecutorServiceSettings.getSettingsDefinitions()); settings.add(SKIP_VALIDATE_AND_START); - - // Register Elastic Inference Service settings definitions if the corresponding feature flag is enabled. - if (isElasticInferenceServiceEnabled()) { - settings.addAll(ElasticInferenceServiceSettings.getSettingsDefinitions()); - } + settings.addAll(ElasticInferenceServiceSettings.getSettingsDefinitions()); return settings; } @@ -493,30 +484,6 @@ public Map getHighlighters() { return Map.of(SemanticTextHighlighter.NAME, new SemanticTextHighlighter()); } - // Get Elastic Inference service URL based on feature flags to support transitioning - // to the new Elastic Inference Service URL. - private String getElasticInferenceServiceUrl(ElasticInferenceServiceSettings settings) { - String elasticInferenceUrl = null; - - if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - elasticInferenceUrl = settings.getElasticInferenceServiceUrl(); - } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - log.warn( - "Deprecated flag {} detected for enabling {}. Please use {}.", - ELASTIC_INFERENCE_SERVICE_IDENTIFIER, - DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, - ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG - ); - elasticInferenceUrl = settings.getEisGatewayUrl(); - } - - return elasticInferenceUrl; - } - - protected Boolean isElasticInferenceServiceEnabled() { - return (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() || DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()); - } - protected SSLService getSslService() { return XPackPlugin.getSharedSslService(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java index 623c25222446c..4ec270eef3a62 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java @@ -10,14 +10,12 @@ import org.elasticsearch.common.util.FeatureFlag; /** - * Elastic Inference Service (EIS) feature flag. When the feature is complete, this flag will be removed. - * Enable feature via JVM option: `-Des.elastic_inference_service_feature_flag_enabled=true`. + * Elastic Inference Service feature flag. Not being used anymore, but we'll keep it until the controller is no longer + * passing -Des.elastic_inference_service_feature_flag_enabled=true at startup. */ public class ElasticInferenceServiceFeature { - // TODO when we remove this also look in InferenceGetServicesIT and remove references to the deprecated URL setting @Deprecated - public static final FeatureFlag DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("eis"); - public static final FeatureFlag ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("elastic_inference_service"); + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java index 3b6b1088cc9cf..fd4a70da01fda 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java @@ -20,10 +20,6 @@ */ public class ElasticInferenceServiceSettings { - // TODO when we remove this look at InferenceGetServicesIT and remove the setting there as well - @Deprecated - static final Setting EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); - public static final String ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX = "xpack.inference.elastic.http.ssl."; static final Setting ELASTIC_INFERENCE_SERVICE_URL = Setting.simpleString( @@ -31,14 +27,9 @@ public class ElasticInferenceServiceSettings { Setting.Property.NodeScope ); - // Adjust this variable to be volatile, if the setting can be updated at some point in time - @Deprecated - private final String eisGatewayUrl; - private final String elasticInferenceServiceUrl; public ElasticInferenceServiceSettings(Settings settings) { - eisGatewayUrl = EIS_GATEWAY_URL.get(settings); elasticInferenceServiceUrl = ELASTIC_INFERENCE_SERVICE_URL.get(settings); } @@ -55,7 +46,6 @@ public ElasticInferenceServiceSettings(Settings settings) { public static List> getSettingsDefinitions() { ArrayList> settings = new ArrayList<>(); - settings.add(EIS_GATEWAY_URL); settings.add(ELASTIC_INFERENCE_SERVICE_URL); settings.add(ELASTIC_INFERENCE_SERVICE_SSL_ENABLED); settings.addAll(ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_SETTINGS.getEnabledSettings()); @@ -63,11 +53,6 @@ public static List> getSettingsDefinitions() { return settings; } - @Deprecated - public String getEisGatewayUrl() { - return eisGatewayUrl; - } - public String getElasticInferenceServiceUrl() { return elasticInferenceServiceUrl; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java deleted file mode 100644 index d1db5b8b12cc6..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference; - -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings; -import org.junit.After; -import org.junit.Before; - -import static org.hamcrest.Matchers.is; - -public class InferencePluginTests extends ESTestCase { - private InferencePlugin inferencePlugin; - - private Boolean elasticInferenceServiceEnabled = true; - - private void setElasticInferenceServiceEnabled(Boolean elasticInferenceServiceEnabled) { - this.elasticInferenceServiceEnabled = elasticInferenceServiceEnabled; - } - - @Before - public void setUp() throws Exception { - super.setUp(); - - Settings settings = Settings.builder().build(); - inferencePlugin = new InferencePlugin(settings) { - @Override - protected Boolean isElasticInferenceServiceEnabled() { - return elasticInferenceServiceEnabled; - } - }; - } - - @After - public void tearDown() throws Exception { - super.tearDown(); - } - - public void testElasticInferenceServiceSettingsPresent() throws Exception { - setElasticInferenceServiceEnabled(true); // enable elastic inference service - boolean anyMatch = inferencePlugin.getSettings() - .stream() - .map(Setting::getKey) - .anyMatch(key -> key.startsWith(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX)); - - assertThat("xpack.inference.elastic settings are present", anyMatch, is(true)); - } - - public void testElasticInferenceServiceSettingsNotPresent() throws Exception { - setElasticInferenceServiceEnabled(false); // disable elastic inference service - boolean noneMatch = inferencePlugin.getSettings() - .stream() - .map(Setting::getKey) - .noneMatch(key -> key.startsWith(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX)); - - assertThat("xpack.inference.elastic settings are not present", noneMatch, is(true)); - } -} From 7e6affbce016ba81d3fbf32e06812691736eb584 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Tue, 28 Jan 2025 13:08:37 -0600 Subject: [PATCH 153/383] Automatically opening closed indices when reindexing data streams (#120970) --- .../deprecation/DeprecatedIndexPredicate.java | 5 -- .../core/security/user/InternalUsers.java | 4 ++ .../DataStreamDeprecationCheckerTests.java | 16 ++--- .../IndexDeprecationCheckerTests.java | 19 +----- ...ReindexDataStreamIndexTransportAction.java | 41 ++++++++++++- .../upgrades/DataStreamsUpgradeIT.java | 61 +++++++------------ 6 files changed, 71 insertions(+), 75 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java index 48fb8ebdc577d..cba1df9b79c76 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java @@ -50,7 +50,6 @@ public static Predicate getReindexRequiredPredicate(Metadata metadata, bo public static boolean reindexRequired(IndexMetadata indexMetadata, boolean filterToBlockedStatus) { return creationVersionBeforeMinimumWritableVersion(indexMetadata) && isNotSearchableSnapshot(indexMetadata) - && isNotClosed(indexMetadata) && matchBlockedStatus(indexMetadata, filterToBlockedStatus); } @@ -62,10 +61,6 @@ private static boolean creationVersionBeforeMinimumWritableVersion(IndexMetadata return metadata.getCreationVersion().before(MINIMUM_WRITEABLE_VERSION_AFTER_UPGRADE); } - private static boolean isNotClosed(IndexMetadata indexMetadata) { - return indexMetadata.getState().equals(IndexMetadata.State.CLOSE) == false; - } - private static boolean matchBlockedStatus(IndexMetadata indexMetadata, boolean filterToBlockedStatus) { return MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.get(indexMetadata.getSettings()) == filterToBlockedStatus; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java index 52f077b658d02..1229d62dce047 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java @@ -9,9 +9,11 @@ import org.elasticsearch.action.admin.cluster.shards.TransportClusterSearchShardsAction; import org.elasticsearch.action.admin.indices.analyze.TransportReloadAnalyzersAction; +import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; +import org.elasticsearch.action.admin.indices.open.OpenIndexAction; import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; @@ -206,6 +208,8 @@ public class InternalUsers { "indices:admin/data_stream/index/reindex", "indices:admin/index/create_from_source", TransportAddIndexBlockAction.TYPE.name(), + OpenIndexAction.NAME, + TransportCloseIndexAction.NAME, TransportCreateIndexAction.TYPE.name(), TransportClusterSearchShardsAction.TYPE.name(), TransportUpdateSettingsAction.TYPE.name(), diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java index e3c205ff8c740..2c32fb7610a18 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java @@ -75,23 +75,15 @@ public void testOldIndicesCheck() { assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected))); } - public void testOldIndicesCheckWithOnlyClosedOrNewIndices() { + public void testOldIndicesCheckWithOnlyNewIndices() { // This tests what happens when any old indices that we have are closed. We expect no deprecation warning. - int oldClosedIndexCount = randomIntBetween(1, 100); int newOpenIndexCount = randomIntBetween(0, 100); int newClosedIndexCount = randomIntBetween(0, 100); Map nameToIndexMetadata = new HashMap<>(); Set expectedIndices = new HashSet<>(); - DataStream dataStream = createTestDataStream( - 0, - oldClosedIndexCount, - newOpenIndexCount, - newClosedIndexCount, - nameToIndexMetadata, - expectedIndices - ); + DataStream dataStream = createTestDataStream(0, 0, newOpenIndexCount, newClosedIndexCount, nameToIndexMetadata, expectedIndices); Metadata metadata = Metadata.builder() .indices(nameToIndexMetadata) @@ -168,7 +160,7 @@ private DataStream createTestDataStream( allIndices.add(createOldIndex(i, false, nameToIndexMetadata, expectedIndices)); } for (int i = 0; i < oldClosedIndexCount; i++) { - allIndices.add(createOldIndex(i, true, nameToIndexMetadata, null)); + allIndices.add(createOldIndex(i, true, nameToIndexMetadata, expectedIndices)); } for (int i = 0; i < newOpenIndexCount; i++) { allIndices.add(createNewIndex(i, false, nameToIndexMetadata)); @@ -218,7 +210,7 @@ private Index createIndex( ) { Settings.Builder settingsBuilder = isOld ? settings(IndexVersion.fromId(7170099)) : settings(IndexVersion.current()); String indexName = (isOld ? "old-" : "new-") + (isClosed ? "closed-" : "") + "data-stream-index-" + suffix; - if (isOld && isClosed == false) { // we only expect warnings on open old indices + if (isOld) { if (expectedIndices.isEmpty() == false && randomIntBetween(0, 2) == 0) { settingsBuilder.put(INDEX_STORE_TYPE_SETTING.getKey(), SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE); } else { diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java index e49a6046c5c64..edbe7562a1560 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java @@ -49,6 +49,7 @@ public void testOldIndicesCheck() { .settings(settings(OLD_VERSION)) .numberOfShards(1) .numberOfReplicas(0) + .state(randomBoolean() ? IndexMetadata.State.OPEN : IndexMetadata.State.CLOSE) // does not matter if its open or closed .build(); ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder().put(indexMetadata, true)) @@ -205,24 +206,6 @@ public void testOldIndicesCheckSnapshotIgnored() { assertThat(issuesByIndex.size(), equalTo(0)); } - public void testOldIndicesCheckClosedIgnored() { - Settings.Builder settings = settings(OLD_VERSION); - IndexMetadata indexMetadata = IndexMetadata.builder("test") - .settings(settings) - .numberOfShards(1) - .numberOfReplicas(0) - .state(IndexMetadata.State.CLOSE) - .build(); - ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) - .metadata(Metadata.builder().put(indexMetadata, true)) - .build(); - Map> issuesByIndex = checker.check( - clusterState, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) - ); - assertThat(issuesByIndex.size(), equalTo(0)); - } - public void testOldIndicesIgnoredWarningCheck() { Settings.Builder settings = settings(OLD_VERSION).put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true); IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java index d3fe27006e82e..dfc207fa84d47 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java @@ -11,7 +11,13 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; +import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; +import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.open.OpenIndexAction; +import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; +import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockResponse; import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; @@ -139,18 +145,51 @@ protected void doExecute( listener.onFailure(new ElasticsearchException(errorMessage)); return; } - + final boolean wasClosed = isClosed(sourceIndex); SubscribableListener.newForked(l -> setBlockWrites(sourceIndexName, l, taskId)) + .andThen(l -> openIndexIfClosed(sourceIndexName, wasClosed, l, taskId)) .andThen(l -> refresh(sourceIndexName, l, taskId)) .andThen(l -> deleteDestIfExists(destIndexName, l, taskId)) .andThen(l -> createIndex(sourceIndex, destIndexName, l, taskId)) .andThen(l -> reindex(sourceIndexName, destIndexName, l, taskId)) .andThen(l -> copyOldSourceSettingsToDest(settingsBefore, destIndexName, l, taskId)) .andThen(l -> sanityCheck(sourceIndexName, destIndexName, l, taskId)) + .andThen(l -> closeIndexIfWasClosed(destIndexName, wasClosed, l, taskId)) .andThenApply(ignored -> new ReindexDataStreamIndexAction.Response(destIndexName)) .addListener(listener); } + private void openIndexIfClosed(String indexName, boolean isClosed, ActionListener listener, TaskId parentTaskId) { + if (isClosed) { + logger.debug("Opening index [{}]", indexName); + var request = new OpenIndexRequest(indexName); + request.setParentTask(parentTaskId); + client.execute(OpenIndexAction.INSTANCE, request, listener); + } else { + listener.onResponse(null); + } + } + + private void closeIndexIfWasClosed( + String indexName, + boolean wasClosed, + ActionListener listener, + TaskId parentTaskId + ) { + if (wasClosed) { + logger.debug("Closing index [{}]", indexName); + var request = new CloseIndexRequest(indexName); + request.setParentTask(parentTaskId); + client.execute(TransportCloseIndexAction.TYPE, request, listener); + } else { + listener.onResponse(null); + } + } + + private static boolean isClosed(IndexMetadata indexMetadata) { + return indexMetadata.getState().equals(IndexMetadata.State.CLOSE); + } + private void setBlockWrites(String sourceIndexName, ActionListener listener, TaskId parentTaskId) { logger.debug("Setting write block on source index [{}]", sourceIndexName); addBlockToIndex(WRITE, sourceIndexName, new ActionListener<>() { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java index 746c8c926086e..2d229d7ffece5 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java @@ -30,7 +30,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.time.Instant; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -269,15 +268,10 @@ private static void createAndRolloverDataStream(String dataStreamName, int numRo private void upgradeDataStream(String dataStreamName, int numRolloversOnOldCluster) throws Exception { Set indicesNeedingUpgrade = getDataStreamIndices(dataStreamName); - Set closedOldIndices = getClosedIndices(dataStreamName); final int explicitRolloverOnNewClusterCount = randomIntBetween(0, 2); for (int i = 0; i < explicitRolloverOnNewClusterCount; i++) { String oldIndexName = rollover(dataStreamName); if (randomBoolean()) { - if (i == 0) { - // Since this is the first rollover on the new cluster, the old index came from the old cluster - closedOldIndices.add(oldIndexName); - } closeIndex(oldIndexName); } } @@ -305,39 +299,51 @@ private void upgradeDataStream(String dataStreamName, int numRolloversOnOldClust statusResponse.getEntity().getContent(), false ); + String statusResponseString = statusResponseMap.keySet() + .stream() + .map(key -> key + "=" + statusResponseMap.get(key)) + .collect(Collectors.joining(", ", "{", "}")); assertOK(statusResponse); - assertThat(statusResponseMap.get("complete"), equalTo(true)); + assertThat(statusResponseString, statusResponseMap.get("complete"), equalTo(true)); final int originalWriteIndex = 1; if (isOriginalClusterSameMajorVersionAsCurrent()) { assertThat( + statusResponseString, statusResponseMap.get("total_indices_in_data_stream"), equalTo(originalWriteIndex + numRolloversOnOldCluster + explicitRolloverOnNewClusterCount) ); // If the original cluster was the same as this one, we don't want any indices reindexed: - assertThat(statusResponseMap.get("total_indices_requiring_upgrade"), equalTo(0)); - assertThat(statusResponseMap.get("successes"), equalTo(0)); + assertThat(statusResponseString, statusResponseMap.get("total_indices_requiring_upgrade"), equalTo(0)); + assertThat(statusResponseString, statusResponseMap.get("successes"), equalTo(0)); } else { // The number of rollovers that will have happened when we call reindex: final int rolloversPerformedByReindex = explicitRolloverOnNewClusterCount == 0 ? 1 : 0; final int expectedTotalIndicesInDataStream = originalWriteIndex + numRolloversOnOldCluster + explicitRolloverOnNewClusterCount + rolloversPerformedByReindex; - assertThat(statusResponseMap.get("total_indices_in_data_stream"), equalTo(expectedTotalIndicesInDataStream)); + assertThat( + statusResponseString, + statusResponseMap.get("total_indices_in_data_stream"), + equalTo(expectedTotalIndicesInDataStream) + ); /* * total_indices_requiring_upgrade is made up of: (the original write index) + numRolloversOnOldCluster. The number of * rollovers on the upgraded cluster is irrelevant since those will not be reindexed. */ assertThat( + statusResponseString, statusResponseMap.get("total_indices_requiring_upgrade"), - equalTo(originalWriteIndex + numRolloversOnOldCluster - closedOldIndices.size()) + equalTo(originalWriteIndex + numRolloversOnOldCluster) ); - assertThat(statusResponseMap.get("successes"), equalTo(numRolloversOnOldCluster + 1 - closedOldIndices.size())); + assertThat(statusResponseString, statusResponseMap.get("successes"), equalTo(numRolloversOnOldCluster + 1)); // We expect all the original indices to have been deleted for (String oldIndex : indicesNeedingUpgrade) { - if (closedOldIndices.contains(oldIndex) == false) { - assertThat(indexExists(oldIndex), equalTo(false)); - } + assertThat(statusResponseString, indexExists(oldIndex), equalTo(false)); } - assertThat(getDataStreamIndices(dataStreamName).size(), equalTo(expectedTotalIndicesInDataStream)); + assertThat( + statusResponseString, + getDataStreamIndices(dataStreamName).size(), + equalTo(expectedTotalIndicesInDataStream) + ); } }, 60, TimeUnit.SECONDS); Request cancelRequest = new Request("POST", "_migration/reindex/" + dataStreamName + "/_cancel"); @@ -356,29 +362,6 @@ private Set getDataStreamIndices(String dataStreamName) throws IOExcepti return indices.stream().map(index -> index.get("index_name").toString()).collect(Collectors.toSet()); } - @SuppressWarnings("unchecked") - private Set getClosedIndices(String dataStreamName) throws IOException { - Set allIndices = getDataStreamIndices(dataStreamName); - Set closedIndices = new HashSet<>(); - Response response = client().performRequest(new Request("GET", "_cluster/state/blocks/indices")); - Map responseMap = XContentHelper.convertToMap(JsonXContent.jsonXContent, response.getEntity().getContent(), false); - Map blocks = (Map) responseMap.get("blocks"); - Map indices = (Map) blocks.get("indices"); - for (Map.Entry indexEntry : indices.entrySet()) { - String indexName = indexEntry.getKey(); - if (allIndices.contains(indexName)) { - Map blocksForIndex = (Map) indexEntry.getValue(); - for (Map.Entry blockEntry : blocksForIndex.entrySet()) { - Map block = (Map) blockEntry.getValue(); - if ("index closed".equals(block.get("description"))) { - closedIndices.add(indexName); - } - } - } - } - return closedIndices; - } - /* * Similar to isOriginalClusterCurrent, but returns true if the major versions of the clusters are the same. So true * for 8.6 and 8.17, but false for 7.17 and 8.18. From 376e3f52ed8a03b3f8013676cd9369f47bc01080 Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Tue, 28 Jan 2025 13:30:52 -0600 Subject: [PATCH 154/383] Return 404 if source index missing in ReindexDataStreamIndexAction (#120971) ReindexDataStreamIndexAction currently has an NPE if the source index has been deleted. Instead return a resource_not_found_exception. --- .../ReindexDatastreamIndexTransportActionIT.java | 10 ++++++++++ .../action/ReindexDataStreamIndexTransportAction.java | 6 ++++++ 2 files changed, 16 insertions(+) diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java index 0ad7dc45d4df8..1c9d85af8d5bd 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDatastreamIndexTransportActionIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.migrate.action; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; @@ -152,6 +153,15 @@ public void testSetSourceToBlockWrites() throws Exception { assertHitCount(prepareSearch(sourceIndex).setSize(0), 0); } + public void testMissingSourceIndex() { + var nonExistentSourceIndex = randomAlphaOfLength(20).toLowerCase(Locale.ROOT); + assertThrows( + ResourceNotFoundException.class, + () -> client().execute(ReindexDataStreamIndexAction.INSTANCE, new ReindexDataStreamIndexAction.Request(nonExistentSourceIndex)) + .actionGet() + ); + } + public void testSettingsAddedBeforeReindex() throws Exception { // start with a static setting var numShards = randomIntBetween(1, 10); diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java index dfc207fa84d47..8c12011ca4bb1 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; @@ -123,6 +124,11 @@ protected void doExecute( var destIndexName = generateDestIndexName(sourceIndexName); TaskId taskId = new TaskId(clusterService.localNode().getId(), task.getId()); IndexMetadata sourceIndex = clusterService.state().getMetadata().index(sourceIndexName); + if (sourceIndex == null) { + listener.onFailure(new ResourceNotFoundException("source index [{}] does not exist", sourceIndexName)); + return; + } + Settings settingsBefore = sourceIndex.getSettings(); var hasOldVersion = DeprecatedIndexPredicate.getReindexRequiredPredicate(clusterService.state().metadata(), false); From a187d3d27f9c14f381ebc02a384211a96417b83e Mon Sep 17 00:00:00 2001 From: Valentin Crettaz Date: Tue, 28 Jan 2025 20:43:42 +0100 Subject: [PATCH 155/383] Update monitoring mapping to add some new fields (#121062) --- .../src/main/resources/monitoring-es-mb.json | 492 ++++++++++++++++++ .../MonitoringTemplateRegistry.java | 2 +- 2 files changed, 493 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json index 793a8c3035d8e..6d82f6e5295e6 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json @@ -464,6 +464,13 @@ } } }, + "threads": { + "properties": { + "count": { + "type": "long" + } + } + }, "gc": { "properties": { "collectors": { @@ -562,6 +569,20 @@ "type": "long" } } + }, + "fetch_total": { + "properties": { + "count": { + "type": "long" + } + } + }, + "fetch_time": { + "properties": { + "ms": { + "type": "long" + } + } } } }, @@ -586,6 +607,42 @@ } } }, + "get": { + "properties": { + "total": { + "properties": { + "count": { + "type": "long" + } + } + }, + "time": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "merges": { + "properties": { + "total": { + "properties": { + "count": { + "type": "long" + } + } + }, + "total_time": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, "fielddata": { "properties": { "memory": { @@ -594,6 +651,13 @@ "type": "long" } } + }, + "evictions": { + "properties": { + "count": { + "type": "long" + } + } } } }, @@ -651,6 +715,60 @@ } } }, + "translog": { + "properties": { + "operations": { + "properties": { + "count": { + "type": "long" + } + } + }, + "size": { + "properties": { + "bytes": { + "type": "long" + } + } + } + } + }, + "refresh": { + "properties": { + "total": { + "properties": { + "count": { + "type": "long" + } + } + }, + "total_time": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "flush": { + "properties": { + "total": { + "properties": { + "count": { + "type": "long" + } + } + }, + "total_time": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, "segments": { "properties": { "version_map": { @@ -768,6 +886,20 @@ }, "process": { "properties": { + "mem": { + "properties": { + "total_virtual": { + "properties": { + "bytes": { + "type": "long" + } + } + } + } + }, + "open_file_descriptors": { + "type": "long" + }, "cpu": { "properties": { "pct": { @@ -882,6 +1014,88 @@ } } }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, + "queue": { + "properties": { + "count": { + "type": "long" + } + } + } + } + }, + "force_merge": { + "properties": { + "rejected": { + "properties": { + "count": { + "type": "long" + } + } + }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, + "queue": { + "properties": { + "count": { + "type": "long" + } + } + } + } + }, + "flush": { + "properties": { + "rejected": { + "properties": { + "count": { + "type": "long" + } + } + }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, + "queue": { + "properties": { + "count": { + "type": "long" + } + } + } + } + }, + "search_worker": { + "properties": { + "rejected": { + "properties": { + "count": { + "type": "long" + } + } + }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, "queue": { "properties": { "count": { @@ -900,6 +1114,38 @@ } } }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, + "queue": { + "properties": { + "count": { + "type": "long" + } + } + } + } + }, + "system_read": { + "properties": { + "rejected": { + "properties": { + "count": { + "type": "long" + } + } + }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, "queue": { "properties": { "count": { @@ -918,6 +1164,38 @@ } } }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, + "queue": { + "properties": { + "count": { + "type": "long" + } + } + } + } + }, + "esql_worker": { + "properties": { + "rejected": { + "properties": { + "count": { + "type": "long" + } + } + }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, "queue": { "properties": { "count": { @@ -936,6 +1214,38 @@ } } }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, + "queue": { + "properties": { + "count": { + "type": "long" + } + } + } + } + }, + "system_write": { + "properties": { + "rejected": { + "properties": { + "count": { + "type": "long" + } + } + }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, "queue": { "properties": { "count": { @@ -954,6 +1264,13 @@ } } }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, "queue": { "properties": { "count": { @@ -962,6 +1279,181 @@ } } } + }, + "snapshot": { + "properties": { + "rejected": { + "properties": { + "count": { + "type": "long" + } + } + }, + "active": { + "properties": { + "count": { + "type": "long" + } + } + }, + "queue": { + "properties": { + "count": { + "type": "long" + } + } + } + } + } + } + }, + "transport": { + "properties": { + "tx": { + "properties": { + "size": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "count": { + "type": "long" + } + } + }, + "rx": { + "properties": { + "size": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "count": { + "type": "long" + } + } + } + } + }, + "ingest": { + "properties": { + "total": { + "properties": { + "current": { + "type": "long" + }, + "time_in_millis": { + "type": "long" + }, + "count": { + "type": "long" + }, + "failed": { + "type": "long" + } + } + } + } + }, + "indexing_pressure": { + "properties": { + "memory": { + "properties": { + "current": { + "properties": { + "all": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "coordinating": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "replica": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "combined_coordinating_and_primary": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "primary": { + "properties": { + "bytes": { + "type": "long" + } + } + } + } + }, + "total": { + "properties": { + "all": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "coordinating": { + "properties": { + "rejections": { + "type": "long" + }, + "bytes": { + "type": "long" + } + } + }, + "replica": { + "properties": { + "rejections": { + "type": "long" + }, + "bytes": { + "type": "long" + } + } + }, + "combined_coordinating_and_primary": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "primary": { + "properties": { + "rejections": { + "type": "long" + }, + "bytes": { + "type": "long" + } + } + } + } + }, + "limit_in_bytes": { + "type": "long" + } + } } } }, diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java index cfd322d04e92f..0605177b2c2e5 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @@ -77,7 +77,7 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { * writes monitoring data in ECS format as of 8.0. These templates define the ECS schema as well as alias fields for the old monitoring * mappings that point to the corresponding ECS fields. */ - public static final int STACK_MONITORING_REGISTRY_VERSION = 8_00_00_99 + 19; + public static final int STACK_MONITORING_REGISTRY_VERSION = 8_00_00_99 + 20; private static final String STACK_MONITORING_REGISTRY_VERSION_VARIABLE = "xpack.stack.monitoring.template.release.version"; private static final String STACK_TEMPLATE_VERSION = "8"; private static final String STACK_TEMPLATE_VERSION_VARIABLE = "xpack.stack.monitoring.template.version"; From e0b168d3f6d201970b5a3c404cfd97138a289804 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 28 Jan 2025 12:13:31 -0800 Subject: [PATCH 156/383] Add initial file entitlement check (#120243) This commit adds FileEntitlement to entitlements. It does not add checks for all file access methods yet, instead opting for example read and write methods. Each module contains a sorted array of paths with read and write permissions. Binary search is used to quickly identify the closest path to determine whether a target path can be read or written. Some important things about FileEntitlement are the path can either be a file or a directory. All directories grant recursive permission. The mode is either read or read_write. All operations like create or delete are considered write. relates ES-10354 Co-authored-by: Jack Conradson --- libs/entitlement/bridge/build.gradle | 5 +- .../bridge/EntitlementChecker.java | 46 ++++++-- .../qa/entitled/EntitledActions.java | 8 ++ .../entitlement/qa/test/FileCheckActions.java | 84 +++++++++++++++ .../qa/test/RestEntitlementsCheckAction.java | 12 ++- .../qa/AbstractEntitlementsIT.java | 5 + .../entitlement/qa/EntitlementsTestRule.java | 15 ++- .../api/ElasticsearchEntitlementChecker.java | 50 +++++++++ .../runtime/policy/FileAccessTree.java | 90 ++++++++++++++++ .../runtime/policy/FileEntitlement.java | 68 ++++-------- .../runtime/policy/PolicyManager.java | 101 +++++++++++++++++- .../runtime/policy/FileAccessTreeTests.java | 89 +++++++++++++++ .../runtime/policy/PolicyManagerTests.java | 11 +- .../policy/PolicyParserFailureTests.java | 7 +- .../runtime/policy/PolicyParserTests.java | 4 +- .../runtime/policy/test-policy.yaml | 4 +- 16 files changed, 519 insertions(+), 80 deletions(-) create mode 100644 libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java create mode 100644 libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java create mode 100644 libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java diff --git a/libs/entitlement/bridge/build.gradle b/libs/entitlement/bridge/build.gradle index a9f8f6e3a3b0a..5dec95b4b9bb4 100644 --- a/libs/entitlement/bridge/build.gradle +++ b/libs/entitlement/bridge/build.gradle @@ -19,6 +19,9 @@ tasks.named('jar').configure { } } +// The bridge only uses things within the jdk, but the checker +// needs to have many forbidden apis in its signatures. Suppressing +// each use of forbidden apis would be tedious and not useful. tasks.withType(CheckForbiddenApisTask).configureEach { - replaceSignatureFiles 'jdk-signatures' + enabled = false } diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index d509763b3541d..de47e88aa8e95 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.bridge; +import java.io.File; import java.io.InputStream; import java.io.PrintStream; import java.io.PrintWriter; @@ -47,7 +48,9 @@ import java.nio.channels.DatagramChannel; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; +import java.nio.charset.Charset; import java.nio.file.Path; +import java.nio.file.attribute.UserPrincipal; import java.security.cert.CertStoreParameters; import java.util.List; import java.util.Locale; @@ -63,7 +66,7 @@ @SuppressWarnings("unused") // Called from instrumentation code inserted by the Entitlements agent public interface EntitlementChecker { - //////////////////// + /// ///////////////// // // Exit the JVM process // @@ -74,7 +77,7 @@ public interface EntitlementChecker { void check$java_lang_System$$exit(Class callerClass, int status); - //////////////////// + /// ///////////////// // // ClassLoader ctor // @@ -85,7 +88,7 @@ public interface EntitlementChecker { void check$java_lang_ClassLoader$(Class callerClass, String name, ClassLoader parent); - //////////////////// + /// ///////////////// // // SecureClassLoader ctor // @@ -96,7 +99,7 @@ public interface EntitlementChecker { void check$java_security_SecureClassLoader$(Class callerClass, String name, ClassLoader parent); - //////////////////// + /// ///////////////// // // URLClassLoader constructors // @@ -111,7 +114,7 @@ public interface EntitlementChecker { void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory); - //////////////////// + /// ///////////////// // // "setFactory" methods // @@ -124,7 +127,7 @@ public interface EntitlementChecker { void check$javax_net_ssl_SSLContext$$setDefault(Class callerClass, SSLContext context); - //////////////////// + /// ///////////////// // // Process creation // @@ -133,7 +136,7 @@ public interface EntitlementChecker { void check$java_lang_ProcessBuilder$$startPipeline(Class callerClass, List builders); - //////////////////// + /// ///////////////// // // System Properties and similar // @@ -142,7 +145,7 @@ public interface EntitlementChecker { void check$java_lang_System$$clearProperty(Class callerClass, String key); - //////////////////// + /// ///////////////// // // JVM-wide state changes // @@ -219,7 +222,7 @@ public interface EntitlementChecker { void check$java_net_URLConnection$$setContentHandlerFactory(Class callerClass, ContentHandlerFactory fac); - //////////////////// + /// ///////////////// // // Network access // @@ -416,7 +419,7 @@ public interface EntitlementChecker { void check$sun_nio_ch_DatagramChannelImpl$receive(Class callerClass, DatagramChannel that, ByteBuffer dst); - //////////////////// + /// ///////////////// // // Load native libraries // @@ -484,4 +487,27 @@ public interface EntitlementChecker { void check$java_lang_foreign_SymbolLookup$$libraryLookup(Class callerClass, Path path, Arena arena); void check$java_lang_ModuleLayer$Controller$enableNativeAccess(Class callerClass, ModuleLayer.Controller that, Module target); + + /// ///////////////// + // + // File access + // + + void check$java_util_Scanner$(Class callerClass, File source); + + void check$java_util_Scanner$(Class callerClass, File source, String charsetName); + + void check$java_util_Scanner$(Class callerClass, File source, Charset charset); + + void check$java_io_FileOutputStream$(Class callerClass, String name); + + void check$java_io_FileOutputStream$(Class callerClass, String name, boolean append); + + void check$java_io_FileOutputStream$(Class callerClass, File file); + + void check$java_io_FileOutputStream$(Class callerClass, File file, boolean append); + + void check$java_nio_file_Files$$probeContentType(Class callerClass, Path path); + + void check$java_nio_file_Files$$setOwner(Class callerClass, Path path, UserPrincipal principal); } diff --git a/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java index 282860e1cdf60..24d7472e07c65 100644 --- a/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java +++ b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java @@ -11,6 +11,11 @@ import org.elasticsearch.core.SuppressForbidden; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.attribute.UserPrincipal; + public final class EntitledActions { private EntitledActions() {} @@ -19,4 +24,7 @@ static void System_clearProperty(String key) { System.clearProperty(key); } + public static UserPrincipal getFileOwner(Path path) throws IOException { + return Files.getOwner(path); + } } diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java new file mode 100644 index 0000000000000..6e15ff4d0cdd1 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.qa.entitled.EntitledActions; + +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.UserPrincipal; +import java.util.Scanner; + +@SuppressForbidden(reason = "Explicitly checking APIs that are forbidden") +class FileCheckActions { + + private static Path testRootDir = Paths.get(System.getProperty("es.entitlements.testdir")); + + private static Path readDir() { + return testRootDir.resolve("read_dir"); + } + + private static Path readWriteDir() { + return testRootDir.resolve("read_write_dir"); + } + + private static Path readFile() { + return testRootDir.resolve("read_file"); + } + + private static Path readWriteFile() { + return testRootDir.resolve("read_write_file"); + } + + static void createScannerFile() throws FileNotFoundException { + new Scanner(readFile().toFile()); + } + + static void createScannerFileWithCharset() throws IOException { + new Scanner(readFile().toFile(), StandardCharsets.UTF_8); + } + + static void createScannerFileWithCharsetName() throws FileNotFoundException { + new Scanner(readFile().toFile(), "UTF-8"); + } + + static void createFileOutputStreamString() throws IOException { + new FileOutputStream(readWriteFile().toString()).close(); + } + + static void createFileOutputStreamStringWithAppend() throws IOException { + new FileOutputStream(readWriteFile().toString(), false).close(); + } + + static void createFileOutputStreamFile() throws IOException { + new FileOutputStream(readWriteFile().toFile()).close(); + } + + static void createFileOutputStreamFileWithAppend() throws IOException { + new FileOutputStream(readWriteFile().toFile(), false).close(); + } + + static void filesProbeContentType() throws IOException { + Files.probeContentType(readFile()); + } + + static void filesSetOwner() throws IOException { + UserPrincipal owner = EntitledActions.getFileOwner(readWriteFile()); + Files.setOwner(readWriteFile(), owner); // set to existing owner, just trying to execute the method + } + + private FileCheckActions() {} +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java index c2b6478e561a8..9b8cae1b72d29 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java @@ -200,7 +200,6 @@ static CheckAction alwaysDenied(CheckedRunnable action) { entry("runtime_load_library", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoadLibrary)), entry("system_load", forPlugins(LoadNativeLibrariesCheckActions::systemLoad)), entry("system_load_library", forPlugins(LoadNativeLibrariesCheckActions::systemLoadLibrary)), - entry("enable_native_access", new CheckAction(VersionSpecificNativeChecks::enableNativeAccess, false, 22)), entry("address_target_layout", new CheckAction(VersionSpecificNativeChecks::addressLayoutWithTargetLayout, false, 22)), entry("donwncall_handle", new CheckAction(VersionSpecificNativeChecks::linkerDowncallHandle, false, 22)), @@ -213,7 +212,16 @@ static CheckAction alwaysDenied(CheckedRunnable action) { new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpretWithSizeAndCleanup, false, 22) ), entry("symbol_lookup_name", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithName, false, 22)), - entry("symbol_lookup_path", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithPath, false, 22)) + entry("symbol_lookup_path", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithPath, false, 22)), + entry("create_scanner", forPlugins(FileCheckActions::createScannerFile)), + entry("create_scanner_with_charset", forPlugins(FileCheckActions::createScannerFileWithCharset)), + entry("create_scanner_with_charset_name", forPlugins(FileCheckActions::createScannerFileWithCharsetName)), + entry("create_file_output_stream_string", forPlugins(FileCheckActions::createFileOutputStreamString)), + entry("create_file_output_stream_string_with_append", forPlugins(FileCheckActions::createFileOutputStreamStringWithAppend)), + entry("create_file_output_stream_file", forPlugins(FileCheckActions::createFileOutputStreamFile)), + entry("create_file_output_stream_file_with_append", forPlugins(FileCheckActions::createFileOutputStreamFileWithAppend)), + entry("files_probe_content_type", forPlugins(FileCheckActions::filesProbeContentType)), + entry("files_set_owner", forPlugins(FileCheckActions::filesSetOwner)) ) .filter(entry -> entry.getValue().fromJavaVersion() == null || Runtime.version().feature() >= entry.getValue().fromJavaVersion()) .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java index b770b4915a317..487f692ef4488 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/AbstractEntitlementsIT.java @@ -34,6 +34,11 @@ public abstract class AbstractEntitlementsIT extends ESRestTestCase { Map.of("properties", List.of("es.entitlements.checkSetSystemProperty", "es.entitlements.checkClearSystemProperty")) ) ); + + builder.value(Map.of("file", Map.of("path", tempDir.resolve("read_dir"), "mode", "read"))); + builder.value(Map.of("file", Map.of("path", tempDir.resolve("read_write_dir"), "mode", "read_write"))); + builder.value(Map.of("file", Map.of("path", tempDir.resolve("read_file"), "mode", "read"))); + builder.value(Map.of("file", Map.of("path", tempDir.resolve("read_write_file"), "mode", "read_write"))); }; private final String actionName; diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java index 8c9dcb6dd0efe..33d5eeca595ab 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java @@ -15,6 +15,7 @@ import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.yaml.YamlXContent; +import org.junit.rules.ExternalResource; import org.junit.rules.RuleChain; import org.junit.rules.TemporaryFolder; import org.junit.rules.TestRule; @@ -23,6 +24,7 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.nio.file.Files; import java.nio.file.Path; class EntitlementsTestRule implements TestRule { @@ -38,6 +40,16 @@ interface PolicyBuilder { @SuppressWarnings("this-escape") EntitlementsTestRule(boolean modular, PolicyBuilder policyBuilder) { testDir = new TemporaryFolder(); + var tempDirSetup = new ExternalResource() { + @Override + protected void before() throws Throwable { + Path testPath = testDir.getRoot().toPath(); + Files.createDirectory(testPath.resolve("read_dir")); + Files.createDirectory(testPath.resolve("read_write_dir")); + Files.writeString(testPath.resolve("read_file"), ""); + Files.writeString(testPath.resolve("read_write_file"), ""); + } + }; cluster = ElasticsearchCluster.local() .module("entitled") .module("entitlement-test-plugin", spec -> setupEntitlements(spec, modular, policyBuilder)) @@ -45,7 +57,7 @@ interface PolicyBuilder { .systemProperty("es.entitlements.testdir", () -> testDir.getRoot().getAbsolutePath()) .setting("xpack.security.enabled", "false") .build(); - ruleChain = RuleChain.outerRule(testDir).around(cluster); + ruleChain = RuleChain.outerRule(testDir).around(tempDirSetup).around(cluster); } @Override @@ -62,6 +74,7 @@ private void setupEntitlements(PluginInstallSpec spec, boolean modular, PolicyBu builder.startObject(); builder.field(moduleName); builder.startArray(); + policyBuilder.build(builder, testDir.getRoot().toPath()); builder.endArray(); builder.endObject(); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 8600dd357c384..48a7400a1db7b 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -9,9 +9,11 @@ package org.elasticsearch.entitlement.runtime.api; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.bridge.EntitlementChecker; import org.elasticsearch.entitlement.runtime.policy.PolicyManager; +import java.io.File; import java.io.InputStream; import java.io.PrintStream; import java.io.PrintWriter; @@ -51,7 +53,9 @@ import java.nio.channels.DatagramChannel; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; +import java.nio.charset.Charset; import java.nio.file.Path; +import java.nio.file.attribute.UserPrincipal; import java.security.cert.CertStoreParameters; import java.util.List; import java.util.Locale; @@ -69,6 +73,7 @@ * API methods for managing the checks. * The trampoline module loads this object via SPI. */ +@SuppressForbidden(reason = "Explicitly checking APIs that are forbidden") public class ElasticsearchEntitlementChecker implements EntitlementChecker { private final PolicyManager policyManager; @@ -868,4 +873,49 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { ) { policyManager.checkLoadingNativeLibraries(callerClass); } + + @Override + public void check$java_util_Scanner$(Class callerClass, File source) { + policyManager.checkFileRead(callerClass, source); + } + + @Override + public void check$java_util_Scanner$(Class callerClass, File source, String charsetName) { + policyManager.checkFileRead(callerClass, source); + } + + @Override + public void check$java_util_Scanner$(Class callerClass, File source, Charset charset) { + policyManager.checkFileRead(callerClass, source); + } + + @Override + public void check$java_io_FileOutputStream$(Class callerClass, String name) { + policyManager.checkFileWrite(callerClass, new File(name)); + } + + @Override + public void check$java_io_FileOutputStream$(Class callerClass, String name, boolean append) { + policyManager.checkFileWrite(callerClass, new File(name)); + } + + @Override + public void check$java_io_FileOutputStream$(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_FileOutputStream$(Class callerClass, File file, boolean append) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_nio_file_Files$$probeContentType(Class callerClass, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$setOwner(Class callerClass, Path path, UserPrincipal principal) { + policyManager.checkFileWrite(callerClass, path); + } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java new file mode 100644 index 0000000000000..55813df28b6f8 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.core.SuppressForbidden; + +import java.io.File; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +final class FileAccessTree { + static final FileAccessTree EMPTY = new FileAccessTree(List.of()); + + private final String[] readPaths; + private final String[] writePaths; + + FileAccessTree(List fileEntitlements) { + List readPaths = new ArrayList<>(); + List writePaths = new ArrayList<>(); + for (FileEntitlement fileEntitlement : fileEntitlements) { + var mode = fileEntitlement.mode(); + if (mode == FileEntitlement.Mode.READ_WRITE) { + writePaths.add(fileEntitlement.path()); + } + readPaths.add(fileEntitlement.path()); + } + + readPaths.sort(String::compareTo); + writePaths.sort(String::compareTo); + + this.readPaths = readPaths.toArray(new String[0]); + this.writePaths = writePaths.toArray(new String[0]); + } + + boolean canRead(Path path) { + return checkPath(normalize(path), readPaths); + } + + @SuppressForbidden(reason = "Explicitly checking File apis") + boolean canRead(File file) { + return checkPath(normalize(file.toPath()), readPaths); + } + + boolean canWrite(Path path) { + return checkPath(normalize(path), writePaths); + } + + @SuppressForbidden(reason = "Explicitly checking File apis") + boolean canWrite(File file) { + return checkPath(normalize(file.toPath()), writePaths); + } + + private static String normalize(Path path) { + return path.toAbsolutePath().normalize().toString(); + } + + private static boolean checkPath(String path, String[] paths) { + if (paths.length == 0) { + return false; + } + int ndx = Arrays.binarySearch(paths, path); + if (ndx < -1) { + String maybeParent = paths[-ndx - 2]; + return path.startsWith(maybeParent); + } + return ndx >= 0; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) return false; + FileAccessTree that = (FileAccessTree) o; + return Objects.deepEquals(readPaths, that.readPaths) && Objects.deepEquals(writePaths, that.writePaths); + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(readPaths), Arrays.hashCode(writePaths)); + } +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java index 4fdbcc93ea6e0..4bd1dc10c85bb 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java @@ -9,62 +9,38 @@ package org.elasticsearch.entitlement.runtime.policy; -import java.util.List; -import java.util.Objects; +import java.nio.file.Paths; /** - * Describes a file entitlement with a path and actions. + * Describes a file entitlement with a path and mode. */ -public class FileEntitlement implements Entitlement { +public record FileEntitlement(String path, Mode mode) implements Entitlement { - public static final int READ_ACTION = 0x1; - public static final int WRITE_ACTION = 0x2; - - public static final String READ = "read"; - public static final String WRITE = "write"; - - private final String path; - private final int actions; - - @ExternalEntitlement(parameterNames = { "path", "actions" }, esModulesOnly = false) - public FileEntitlement(String path, List actionsList) { - this.path = path; - int actionsInt = 0; - - for (String actionString : actionsList) { - if (READ.equals(actionString)) { - if ((actionsInt & READ_ACTION) == READ_ACTION) { - throw new IllegalArgumentException("file action [read] specified multiple times"); - } - actionsInt |= READ_ACTION; - } else if (WRITE.equals(actionString)) { - if ((actionsInt & WRITE_ACTION) == WRITE_ACTION) { - throw new IllegalArgumentException("file action [write] specified multiple times"); - } - actionsInt |= WRITE_ACTION; - } else { - throw new IllegalArgumentException("unknown file action [" + actionString + "]"); - } - } + public enum Mode { + READ, + READ_WRITE + } - this.actions = actionsInt; + public FileEntitlement { + path = normalizePath(path); } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - FileEntitlement that = (FileEntitlement) o; - return actions == that.actions && Objects.equals(path, that.path); + private static String normalizePath(String path) { + return Paths.get(path).toAbsolutePath().normalize().toString(); } - @Override - public int hashCode() { - return Objects.hash(path, actions); + private static Mode parseMode(String mode) { + if (mode.equals("read")) { + return Mode.READ; + } else if (mode.equals("read_write")) { + return Mode.READ_WRITE; + } else { + throw new PolicyValidationException("invalid mode: " + mode + ", valid values: [read, read_write]"); + } } - @Override - public String toString() { - return "FileEntitlement{" + "path='" + path + '\'' + ", actions=" + actions + '}'; + @ExternalEntitlement(parameterNames = { "path", "mode" }, esModulesOnly = false) + public FileEntitlement(String path, String mode) { + this(path, parseMode(mode)); } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index dcdc7d1a47f9f..04942e15d10a4 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -10,13 +10,16 @@ package org.elasticsearch.entitlement.runtime.policy; import org.elasticsearch.core.Strings; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.runtime.api.NotEntitledException; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; +import java.io.File; import java.lang.StackWalker.StackFrame; import java.lang.module.ModuleFinder; import java.lang.module.ModuleReference; +import java.nio.file.Path; import java.util.List; import java.util.Map; import java.util.Optional; @@ -36,15 +39,22 @@ public class PolicyManager { private static final Logger logger = LogManager.getLogger(PolicyManager.class); - record ModuleEntitlements(Map, List> entitlementsByType) { - public static final ModuleEntitlements NONE = new ModuleEntitlements(Map.of()); + record ModuleEntitlements(Map, List> entitlementsByType, FileAccessTree fileAccess) { + public static final ModuleEntitlements NONE = new ModuleEntitlements(Map.of(), FileAccessTree.EMPTY); ModuleEntitlements { entitlementsByType = Map.copyOf(entitlementsByType); } public static ModuleEntitlements from(List entitlements) { - return new ModuleEntitlements(entitlements.stream().collect(groupingBy(Entitlement::getClass))); + var fileEntitlements = entitlements.stream() + .filter(e -> e.getClass().equals(FileEntitlement.class)) + .map(e -> (FileEntitlement) e) + .toList(); + return new ModuleEntitlements( + entitlements.stream().collect(groupingBy(Entitlement::getClass)), + new FileAccessTree(fileEntitlements) + ); } public boolean hasEntitlement(Class entitlementClass) { @@ -189,6 +199,91 @@ public void checkChangeNetworkHandling(Class callerClass) { checkChangeJVMGlobalState(callerClass); } + /** + * Check for operations that can access sensitive network information, e.g. secrets, tokens or SSL sessions + */ + public void checkReadSensitiveNetworkInformation(Class callerClass) { + neverEntitled(callerClass, "access sensitive network information"); + } + + @SuppressForbidden(reason = "Explicitly checking File apis") + public void checkFileRead(Class callerClass, File file) { + var requestingClass = requestingClass(callerClass); + if (isTriviallyAllowed(requestingClass)) { + return; + } + + ModuleEntitlements entitlements = getEntitlements(requestingClass); + if (entitlements.fileAccess().canRead(file) == false) { + throw new NotEntitledException( + Strings.format( + "Not entitled: caller [%s], module [%s], entitlement [file], operation [read], path [%s]", + callerClass, + requestingClass.getModule(), + file + ) + ); + } + } + + public void checkFileRead(Class callerClass, Path path) { + var requestingClass = requestingClass(callerClass); + if (isTriviallyAllowed(requestingClass)) { + return; + } + + ModuleEntitlements entitlements = getEntitlements(requestingClass); + if (entitlements.fileAccess().canRead(path) == false) { + throw new NotEntitledException( + Strings.format( + "Not entitled: caller [%s], module [%s], entitlement [file], operation [read], path [%s]", + callerClass, + requestingClass.getModule(), + path + ) + ); + } + } + + @SuppressForbidden(reason = "Explicitly checking File apis") + public void checkFileWrite(Class callerClass, File file) { + var requestingClass = requestingClass(callerClass); + if (isTriviallyAllowed(requestingClass)) { + return; + } + + ModuleEntitlements entitlements = getEntitlements(requestingClass); + if (entitlements.fileAccess().canWrite(file) == false) { + throw new NotEntitledException( + Strings.format( + "Not entitled: caller [%s], module [%s], entitlement [file], operation [write], path [%s]", + callerClass, + requestingClass.getModule(), + file + ) + ); + } + } + + public void checkFileWrite(Class callerClass, Path path) { + var requestingClass = requestingClass(callerClass); + if (isTriviallyAllowed(requestingClass)) { + return; + } + + ModuleEntitlements entitlements = getEntitlements(requestingClass); + if (entitlements.fileAccess().canWrite(path) == false) { + throw new NotEntitledException( + Strings.format( + "Not entitled: caller [%s], module [%s], entitlement [file], operation [write], path [%s]", + callerClass, + requestingClass.getModule(), + path + ) + ); + } + } + /** * Check for operations that can access sensitive network information, e.g. secrets, tokens or SSL sessions */ diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java new file mode 100644 index 0000000000000..1521c80341b9d --- /dev/null +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.nio.file.Path; +import java.util.List; + +import static org.hamcrest.Matchers.is; + +public class FileAccessTreeTests extends ESTestCase { + + static Path root; + + @BeforeClass + public static void setupRoot() { + root = createTempDir(); + } + + private static Path path(String s) { + return root.resolve(s); + } + + public void testEmpty() { + var tree = new FileAccessTree(List.of()); + assertThat(tree.canRead(path("path")), is(false)); + assertThat(tree.canWrite(path("path")), is(false)); + } + + public void testRead() { + var tree = new FileAccessTree(List.of(entitlement("foo", "read"))); + assertThat(tree.canRead(path("foo")), is(true)); + assertThat(tree.canRead(path("foo/subdir")), is(true)); + assertThat(tree.canWrite(path("foo")), is(false)); + + assertThat(tree.canRead(path("before")), is(false)); + assertThat(tree.canRead(path("later")), is(false)); + } + + public void testWrite() { + var tree = new FileAccessTree(List.of(entitlement("foo", "read_write"))); + assertThat(tree.canWrite(path("foo")), is(true)); + assertThat(tree.canWrite(path("foo/subdir")), is(true)); + assertThat(tree.canRead(path("foo")), is(true)); + + assertThat(tree.canWrite(path("before")), is(false)); + assertThat(tree.canWrite(path("later")), is(false)); + } + + public void testTwoPaths() { + var tree = new FileAccessTree(List.of(entitlement("foo", "read"), entitlement("bar", "read"))); + assertThat(tree.canRead(path("a")), is(false)); + assertThat(tree.canRead(path("bar")), is(true)); + assertThat(tree.canRead(path("bar/subdir")), is(true)); + assertThat(tree.canRead(path("c")), is(false)); + assertThat(tree.canRead(path("foo")), is(true)); + assertThat(tree.canRead(path("foo/subdir")), is(true)); + assertThat(tree.canRead(path("z")), is(false)); + } + + public void testReadWriteUnderRead() { + var tree = new FileAccessTree(List.of(entitlement("foo", "read"), entitlement("foo/bar", "read_write"))); + assertThat(tree.canRead(path("foo")), is(true)); + assertThat(tree.canWrite(path("foo")), is(false)); + assertThat(tree.canRead(path("foo/bar")), is(true)); + assertThat(tree.canWrite(path("foo/bar")), is(true)); + } + + public void testNormalizePath() { + var tree = new FileAccessTree(List.of(entitlement("foo/../bar", "read"))); + assertThat(tree.canRead(path("foo/../bar")), is(true)); + assertThat(tree.canRead(path("foo")), is(false)); + assertThat(tree.canRead(path("")), is(false)); + } + + FileEntitlement entitlement(String path, String mode) { + Path p = path(path); + return new FileEntitlement(p.toString(), mode); + } +} diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index f50cd217696de..20035d0bb258b 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -31,10 +31,7 @@ import static java.util.Map.entry; import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNNAMED; -import static org.elasticsearch.test.LambdaMatchers.transformedMatch; import static org.hamcrest.Matchers.aMapWithSize; -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; @@ -204,10 +201,8 @@ public void testGetEntitlementsReturnsEntitlementsForPluginModule() throws IOExc var entitlements = policyManager.getEntitlements(mockPluginClass); assertThat(entitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); - assertThat( - entitlements.getEntitlements(FileEntitlement.class).toList(), - contains(transformedMatch(FileEntitlement::toString, containsString("/test/path"))) - ); + // TODO: this can't work on Windows, we need to have the root be unknown + // assertThat(entitlements.fileAccess().canRead("/test/path"), is(true)); } public void testGetEntitlementsResultIsCached() { @@ -324,7 +319,7 @@ private static Policy createPluginPolicy(String... pluginModules) { .map( name -> new Scope( name, - List.of(new FileEntitlement("/test/path", List.of(FileEntitlement.READ)), new CreateClassLoaderEntitlement()) + List.of(new FileEntitlement("/test/path", FileEntitlement.Mode.READ), new CreateClassLoaderEntitlement()) ) ) .toList() diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java index dfcc5d8916f2c..cc8043990930d 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java @@ -55,7 +55,7 @@ public void testEntitlementMissingParameter() { """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( "[4:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " - + "for entitlement type [file]: missing entitlement parameter [actions]", + + "for entitlement type [file]: missing entitlement parameter [mode]", ppe.getMessage() ); } @@ -65,12 +65,11 @@ public void testEntitlementExtraneousParameter() { entitlement-module-name: - file: path: test-path - actions: - - read + mode: read extra: test """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( - "[7:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "[6:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "for entitlement type [file]: extraneous entitlement parameter(s) {extra=test}", ppe.getMessage() ); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java index 08185c3f82b31..191b3afcdc674 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java @@ -47,7 +47,7 @@ public void testPolicyBuilder() throws IOException { .parsePolicy(); Policy expected = new Policy( "test-policy.yaml", - List.of(new Scope("entitlement-module-name", List.of(new FileEntitlement("test/path/to/file", List.of("read", "write"))))) + List.of(new Scope("entitlement-module-name", List.of(new FileEntitlement("test/path/to/file", "read_write")))) ); assertEquals(expected, parsedPolicy); } @@ -57,7 +57,7 @@ public void testPolicyBuilderOnExternalPlugin() throws IOException { .parsePolicy(); Policy expected = new Policy( "test-policy.yaml", - List.of(new Scope("entitlement-module-name", List.of(new FileEntitlement("test/path/to/file", List.of("read", "write"))))) + List.of(new Scope("entitlement-module-name", List.of(new FileEntitlement("test/path/to/file", "read_write")))) ); assertEquals(expected, parsedPolicy); } diff --git a/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml b/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml index f13f574535bec..bbb926ccdd37d 100644 --- a/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml +++ b/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml @@ -1,6 +1,4 @@ entitlement-module-name: - file: path: "test/path/to/file" - actions: - - "read" - - "write" + mode: "read_write" From ef7325fdcbe264a33e6c2e92be72b0165ce02a5c Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 28 Jan 2025 20:15:14 +0000 Subject: [PATCH 157/383] [ML] Automatically rollover legacy .ml-anomalies indices (#120913) Roll over legacy version 7 .ml-anomalies indices and update the job aliases. This is required for anomaly detection jobs to continue to work in 9 --- docs/changelog/120913.yaml | 5 + .../persistence/AnomalyDetectorsIndex.java | 21 +- .../AnomalyDetectorsIndexFields.java | 3 + .../xpack/core/ml/utils/MlIndexAndAlias.java | 52 ++-- .../core/ml/utils/MlIndexAndAliasTests.java | 17 +- .../xpack/ml/MachineLearning.java | 3 +- .../xpack/ml/MlAnomaliesIndexUpdate.java | 235 ++++++++++++++++ .../xpack/ml/MlIndexRollover.java | 7 +- .../xpack/ml/MlAnomaliesIndexUpdateTests.java | 254 ++++++++++++++++++ .../xpack/ml/MlIndexRolloverTests.java | 2 +- 10 files changed, 569 insertions(+), 30 deletions(-) create mode 100644 docs/changelog/120913.yaml create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdate.java create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdateTests.java diff --git a/docs/changelog/120913.yaml b/docs/changelog/120913.yaml new file mode 100644 index 0000000000000..69db6027caa69 --- /dev/null +++ b/docs/changelog/120913.yaml @@ -0,0 +1,5 @@ +pr: 120913 +summary: Automatically rollover legacy .ml-anomalies indices +area: Machine Learning +type: upgrade +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java index 7a098d432f35b..1ab4906ed0d06 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java @@ -38,6 +38,10 @@ public static String jobResultsIndexPrefix() { return AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX; } + public static String jobResultsIndexPattern() { + return AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*"; + } + /** * The name of the alias pointing to the indices where the job's results are stored * @param jobId Job Id @@ -47,15 +51,26 @@ public static String jobResultsAliasedName(String jobId) { return AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + jobId; } + /** + * Extract the job Id from the alias name. + * If not an results index alias null is returned + * @param jobResultsAliasedName The alias + * @return The job Id + */ + public static String jobIdFromAlias(String jobResultsAliasedName) { + if (jobResultsAliasedName.length() < AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX.length()) { + return null; + } + return jobResultsAliasedName.substring(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX.length()); + } + /** * The name of the alias pointing to the write index for a job * @param jobId Job Id * @return The write alias */ public static String resultsWriteAlias(String jobId) { - // ".write" rather than simply "write" to avoid the danger of clashing - // with the read alias of a job whose name begins with "write-" - return AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + ".write-" + jobId; + return AnomalyDetectorsIndexFields.RESULTS_INDEX_WRITE_PREFIX + jobId; } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndexFields.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndexFields.java index 504a4b756c979..2a0fff86ba494 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndexFields.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndexFields.java @@ -11,6 +11,9 @@ public final class AnomalyDetectorsIndexFields { public static final String STATE_INDEX_PREFIX = ".ml-state"; public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-"; + // ".write" rather than simply "write" to avoid the danger of clashing + // with the read alias of a job whose name begins with "write-" + public static final String RESULTS_INDEX_WRITE_PREFIX = RESULTS_INDEX_PREFIX + ".write-"; public static final String RESULTS_INDEX_DEFAULT = "shared"; private AnomalyDetectorsIndexFields() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java index e85acc159059e..06b2cfbad0105 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java @@ -31,6 +31,8 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.json.JsonXContent; @@ -64,27 +66,24 @@ public final class MlIndexAndAlias { */ public static final String BWC_MAPPINGS_VERSION = "8.11.0"; - private static final Logger logger = LogManager.getLogger(MlIndexAndAlias.class); + public static final String FIRST_INDEX_SIX_DIGIT_SUFFIX = "-000001"; - static final Comparator INDEX_NAME_COMPARATOR = new Comparator<>() { - - private final Predicate HAS_SIX_DIGIT_SUFFIX = Pattern.compile("\\d{6}").asMatchPredicate(); - - @Override - public int compare(String index1, String index2) { - String[] index1Parts = index1.split("-"); - String index1Suffix = index1Parts[index1Parts.length - 1]; - boolean index1HasSixDigitsSuffix = HAS_SIX_DIGIT_SUFFIX.test(index1Suffix); - String[] index2Parts = index2.split("-"); - String index2Suffix = index2Parts[index2Parts.length - 1]; - boolean index2HasSixDigitsSuffix = HAS_SIX_DIGIT_SUFFIX.test(index2Suffix); - if (index1HasSixDigitsSuffix && index2HasSixDigitsSuffix) { - return index1Suffix.compareTo(index2Suffix); - } else if (index1HasSixDigitsSuffix != index2HasSixDigitsSuffix) { - return Boolean.compare(index1HasSixDigitsSuffix, index2HasSixDigitsSuffix); - } else { - return index1.compareTo(index2); - } + private static final Logger logger = LogManager.getLogger(MlIndexAndAlias.class); + private static final Predicate HAS_SIX_DIGIT_SUFFIX = Pattern.compile("\\d{6}").asMatchPredicate(); + + static final Comparator INDEX_NAME_COMPARATOR = (index1, index2) -> { + String[] index1Parts = index1.split("-"); + String index1Suffix = index1Parts[index1Parts.length - 1]; + boolean index1HasSixDigitsSuffix = HAS_SIX_DIGIT_SUFFIX.test(index1Suffix); + String[] index2Parts = index2.split("-"); + String index2Suffix = index2Parts[index2Parts.length - 1]; + boolean index2HasSixDigitsSuffix = HAS_SIX_DIGIT_SUFFIX.test(index2Suffix); + if (index1HasSixDigitsSuffix && index2HasSixDigitsSuffix) { + return index1Suffix.compareTo(index2Suffix); + } else if (index1HasSixDigitsSuffix != index2HasSixDigitsSuffix) { + return Boolean.compare(index1HasSixDigitsSuffix, index2HasSixDigitsSuffix); + } else { + return index1.compareTo(index2); } }; @@ -126,7 +125,7 @@ public static void createIndexAndAliasIfNecessary( String legacyIndexWithoutSuffix = indexPatternPrefix; String indexPattern = indexPatternPrefix + "*"; // The initial index name must be suitable for rollover functionality. - String firstConcreteIndex = indexPatternPrefix + "-000001"; + String firstConcreteIndex = indexPatternPrefix + FIRST_INDEX_SIX_DIGIT_SUFFIX; String[] concreteIndexNames = resolver.concreteIndexNames(clusterState, IndicesOptions.lenientExpandHidden(), indexPattern); Optional indexPointedByCurrentWriteAlias = clusterState.getMetadata().hasAlias(alias) ? clusterState.getMetadata().getIndicesLookup().get(alias).getIndices().stream().map(Index::getName).findFirst() @@ -384,6 +383,10 @@ public static boolean hasIndexTemplate(ClusterState state, String templateName) return state.getMetadata().templatesV2().containsKey(templateName); } + public static boolean has6DigitSuffix(String indexName) { + return HAS_SIX_DIGIT_SUFFIX.test(indexName); + } + /** * Returns the latest index. Latest is the index with the highest * 6 digit suffix. @@ -395,4 +398,11 @@ public static String latestIndex(String[] concreteIndices) { ? concreteIndices[0] : Arrays.stream(concreteIndices).max(MlIndexAndAlias.INDEX_NAME_COMPARATOR).get(); } + + /** + * True if the version is read *and* write compatible not just read only compatible + */ + public static boolean indexIsReadWriteCompatibleInV9(IndexVersion version) { + return version.onOrAfter(IndexVersions.V_8_0_0); + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java index 8fc1e55ec0ac5..22ec4551af2a9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -365,8 +366,20 @@ public void testIndexNameComparator() { } public void testLatestIndex() { - var names = new String[] { "index-000001", "index-000002", "index-000003" }; - assertThat(MlIndexAndAlias.latestIndex(names), equalTo("index-000003")); + { + var names = new String[] { "index-000001", "index-000002", "index-000003" }; + assertThat(MlIndexAndAlias.latestIndex(names), equalTo("index-000003")); + } + { + var names = new String[] { "index", "index-000001", "index-000002" }; + assertThat(MlIndexAndAlias.latestIndex(names), equalTo("index-000002")); + } + } + + public void testIndexIsReadWriteCompatibleInV9() { + assertTrue(MlIndexAndAlias.indexIsReadWriteCompatibleInV9(IndexVersion.current())); + assertTrue(MlIndexAndAlias.indexIsReadWriteCompatibleInV9(IndexVersions.V_8_0_0)); + assertFalse(MlIndexAndAlias.indexIsReadWriteCompatibleInV9(IndexVersions.V_7_17_0)); } private void createIndexAndAliasIfNecessary(ClusterState clusterState) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 043a27b7cd147..01127c97ba90c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -1240,7 +1240,8 @@ public Collection createComponents(PluginServices services) { ), indexNameExpressionResolver, client - ) + ), + new MlAnomaliesIndexUpdate(indexNameExpressionResolver, client) ) ); clusterService.addListener(mlAutoUpdateService); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdate.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdate.java new file mode 100644 index 0000000000000..27bce6747b32f --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdate.java @@ -0,0 +1,235 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; +import org.elasticsearch.xpack.core.ml.utils.MlIndexAndAlias; +import org.elasticsearch.xpack.core.ml.utils.MlStrings; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; + +/** + * Rollover the various .ml-anomalies result indices + * updating the read and write aliases + */ +public class MlAnomaliesIndexUpdate implements MlAutoUpdateService.UpdateAction { + + private static final Logger logger = LogManager.getLogger(MlAnomaliesIndexUpdate.class); + + private final IndexNameExpressionResolver expressionResolver; + private final OriginSettingClient client; + + public MlAnomaliesIndexUpdate(IndexNameExpressionResolver expressionResolver, Client client) { + this.expressionResolver = expressionResolver; + this.client = new OriginSettingClient(client, ML_ORIGIN); + } + + @Override + public boolean isMinTransportVersionSupported(TransportVersion minTransportVersion) { + // Automatic rollover does not require any new features + // but wait for all nodes to be upgraded anyway + return minTransportVersion.onOrAfter(TransportVersions.ML_ROLLOVER_LEGACY_INDICES); + } + + @Override + public boolean isAbleToRun(ClusterState latestState) { + // Find the .ml-anomalies-shared and all custom results indices + String[] indices = expressionResolver.concreteIndexNames( + latestState, + IndicesOptions.lenientExpandOpenHidden(), + AnomalyDetectorsIndex.jobResultsIndexPattern() + ); + + for (String index : indices) { + IndexRoutingTable routingTable = latestState.getRoutingTable().index(index); + if (routingTable == null || routingTable.allPrimaryShardsActive() == false) { + return false; + } + } + return true; + } + + @Override + public String getName() { + return "ml_anomalies_index_update"; + } + + @Override + public void runUpdate(ClusterState latestState) { + List failures = new ArrayList<>(); + + // list all indices starting .ml-anomalies- + // this includes the shared index and all custom results indices + String[] indices = expressionResolver.concreteIndexNames( + latestState, + IndicesOptions.lenientExpandOpenHidden(), + AnomalyDetectorsIndex.jobResultsIndexPattern() + ); + + for (String index : indices) { + boolean isCompatibleIndexVersion = MlIndexAndAlias.indexIsReadWriteCompatibleInV9( + latestState.metadata().index(index).getCreationVersion() + ); + + if (isCompatibleIndexVersion) { + continue; + } + + PlainActionFuture updated = new PlainActionFuture<>(); + rollAndUpdateAliases(latestState, index, updated); + try { + updated.actionGet(); + } catch (Exception ex) { + var message = "failed rolling over legacy ml anomalies index [" + index + "]"; + logger.warn(message, ex); + if (ex instanceof ElasticsearchException elasticsearchException) { + failures.add(new ElasticsearchStatusException(message, elasticsearchException.status(), elasticsearchException)); + } else { + failures.add(new ElasticsearchStatusException(message, RestStatus.REQUEST_TIMEOUT, ex)); + } + + break; + } + } + + if (failures.isEmpty()) { + logger.info("legacy ml anomalies indices rolled over and aliases updated"); + return; + } + + var exception = new ElasticsearchStatusException("failed to roll over legacy ml anomalies", RestStatus.CONFLICT); + failures.forEach(exception::addSuppressed); + throw exception; + } + + private void rollAndUpdateAliases(ClusterState clusterState, String index, ActionListener listener) { + // Create an alias specifically for rolling over. + // The ml-anomalies index has aliases for each job anyone + // of which could be used but that means one alias is + // treated differently. + // Using a `.` in the alias name avoids any conflicts + // as AD job Ids cannot start with `.` + String rolloverAlias = index + ".rollover_alias"; + + // If the index does not end in a digit then rollover does not know + // what to name the new index so it must be specified in the request. + // Otherwise leave null and rollover will calculate the new name + String newIndexName = MlIndexAndAlias.has6DigitSuffix(index) ? null : index + MlIndexAndAlias.FIRST_INDEX_SIX_DIGIT_SUFFIX; + IndicesAliasesRequestBuilder aliasRequestBuilder = client.admin().indices().prepareAliases(); + + SubscribableListener.newForked( + l -> { createAliasForRollover(index, rolloverAlias, l.map(AcknowledgedResponse::isAcknowledged)); } + ).andThen((l, success) -> { + rollover(rolloverAlias, newIndexName, l); + }).andThen((l, newIndexNameResponse) -> { + addIndexAliasesRequests(aliasRequestBuilder, index, newIndexNameResponse, clusterState); + // Delete the new alias created for the rollover action + aliasRequestBuilder.removeAlias(newIndexNameResponse, rolloverAlias); + updateAliases(aliasRequestBuilder, l); + }).addListener(listener); + } + + private void rollover(String alias, @Nullable String newIndexName, ActionListener listener) { + client.admin().indices().rolloverIndex(new RolloverRequest(alias, newIndexName), listener.delegateFailure((l, response) -> { + l.onResponse(response.getNewIndex()); + })); + } + + private void createAliasForRollover(String indexName, String aliasName, ActionListener listener) { + logger.info("creating alias for rollover [{}]", aliasName); + client.admin() + .indices() + .prepareAliases() + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index(indexName).alias(aliasName).isHidden(true)) + .execute(listener); + } + + private void updateAliases(IndicesAliasesRequestBuilder request, ActionListener listener) { + request.execute(listener.delegateFailure((l, response) -> l.onResponse(Boolean.TRUE))); + } + + IndicesAliasesRequestBuilder addIndexAliasesRequests( + IndicesAliasesRequestBuilder aliasRequestBuilder, + String oldIndex, + String newIndex, + ClusterState clusterState + ) { + // Multiple jobs can share the same index each job + // has a read and write alias that needs updating + // after the rollover + var meta = clusterState.metadata().index(oldIndex); + assert meta != null; + if (meta == null) { + return aliasRequestBuilder; + } + + for (var alias : meta.getAliases().values()) { + if (isAnomaliesWriteAlias(alias.alias())) { + aliasRequestBuilder.addAliasAction( + IndicesAliasesRequest.AliasActions.add().index(newIndex).alias(alias.alias()).isHidden(true).writeIndex(true) + ); + aliasRequestBuilder.addAliasAction(IndicesAliasesRequest.AliasActions.remove().index(oldIndex).alias(alias.alias())); + } else if (isAnomaliesReadAlias(alias.alias())) { + String jobId = AnomalyDetectorsIndex.jobIdFromAlias(alias.alias()); + aliasRequestBuilder.addAliasAction( + IndicesAliasesRequest.AliasActions.add() + .index(newIndex) + .alias(alias.alias()) + .isHidden(true) + .filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobId)) + ); + } + } + + return aliasRequestBuilder; + } + + static boolean isAnomaliesWriteAlias(String aliasName) { + return aliasName.startsWith(AnomalyDetectorsIndexFields.RESULTS_INDEX_WRITE_PREFIX); + } + + static boolean isAnomaliesReadAlias(String aliasName) { + if (aliasName.startsWith(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX) == false) { + return false; + } + + // See {@link AnomalyDetectorsIndex#jobResultsAliasedName} + String jobIdPart = aliasName.substring(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX.length()); + // If this is a write alias it will start with a `.` character + // which is not a valid job id. + return MlStrings.isValidId(jobIdPart); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java index 7dbafdc2676ba..c079e5dfde737 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexRollover.java @@ -117,7 +117,7 @@ public void runUpdate(ClusterState latestState) { } if (failures.isEmpty()) { - logger.info("ML legacy indies rolled over"); + logger.info("ML legacy indices rolled over"); return; } @@ -136,7 +136,10 @@ private void rolloverLegacyIndices(ClusterState clusterState, String indexPatter } String latestIndex = MlIndexAndAlias.latestIndex(concreteIndices); - boolean isCompatibleIndexVersion = isCompatibleIndexVersion(clusterState.metadata().index(latestIndex).getCreationVersion()); + // Indices created before 8.0 are read only in 9 + boolean isCompatibleIndexVersion = MlIndexAndAlias.indexIsReadWriteCompatibleInV9( + clusterState.metadata().index(latestIndex).getCreationVersion() + ); boolean hasAlias = clusterState.getMetadata().hasAlias(alias); if (isCompatibleIndexVersion && hasAlias) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdateTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdateTests.java new file mode 100644 index 0000000000000..b203d756c3214 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAnomaliesIndexUpdateTests.java @@ -0,0 +1,254 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; +import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; +import org.elasticsearch.action.admin.indices.rollover.RolloverAction; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ElasticsearchClient; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.AliasMetadata; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class MlAnomaliesIndexUpdateTests extends ESTestCase { + + public void testIsAnomaliesWriteAlias() { + assertTrue(MlAnomaliesIndexUpdate.isAnomaliesWriteAlias(AnomalyDetectorsIndex.resultsWriteAlias("foo"))); + assertFalse(MlAnomaliesIndexUpdate.isAnomaliesWriteAlias(AnomalyDetectorsIndex.jobResultsAliasedName("foo"))); + assertFalse(MlAnomaliesIndexUpdate.isAnomaliesWriteAlias("some-index")); + } + + public void testIsAnomaliesAlias() { + assertTrue(MlAnomaliesIndexUpdate.isAnomaliesReadAlias(AnomalyDetectorsIndex.jobResultsAliasedName("foo"))); + assertFalse(MlAnomaliesIndexUpdate.isAnomaliesReadAlias(AnomalyDetectorsIndex.resultsWriteAlias("foo"))); + assertFalse(MlAnomaliesIndexUpdate.isAnomaliesReadAlias("some-index")); + } + + public void testIsAbleToRun_IndicesDoNotExist() { + RoutingTable.Builder routingTable = RoutingTable.builder(); + var updater = new MlAnomaliesIndexUpdate(TestIndexNameExpressionResolver.newInstance(), mock(Client.class)); + + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.routingTable(routingTable.build()); + assertTrue(updater.isAbleToRun(csBuilder.build())); + } + + public void testIsAbleToRun_IndicesHaveNoRouting() { + IndexMetadata.Builder indexMetadata = IndexMetadata.builder(".ml-anomalies-shared"); + indexMetadata.settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_INDEX_UUID, "_uuid") + ); + + Metadata.Builder metadata = Metadata.builder(); + metadata.put(indexMetadata); + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.routingTable(RoutingTable.builder().build()); // no routing to index + csBuilder.metadata(metadata); + + var updater = new MlAnomaliesIndexUpdate(TestIndexNameExpressionResolver.newInstance(), mock(Client.class)); + + assertFalse(updater.isAbleToRun(csBuilder.build())); + } + + public void testBuildIndexAliasesRequest() { + var anomaliesIndex = ".ml-anomalies-sharedindex"; + var jobs = List.of("job1", "job2"); + IndexMetadata.Builder indexMetadata = createSharedResultsIndex(anomaliesIndex, IndexVersion.current(), jobs); + Metadata.Builder metadata = Metadata.builder(); + metadata.put(indexMetadata); + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.metadata(metadata); + + var updater = new MlAnomaliesIndexUpdate( + TestIndexNameExpressionResolver.newInstance(), + new OriginSettingClient(mock(Client.class), "doesn't matter") + ); + + IndicesAliasesRequestBuilder aliasRequestBuilder = new IndicesAliasesRequestBuilder(mock(ElasticsearchClient.class)); + + var newIndex = anomaliesIndex + "-000001"; + var request = updater.addIndexAliasesRequests(aliasRequestBuilder, anomaliesIndex, newIndex, csBuilder.build()); + var actions = request.request().getAliasActions(); + assertThat(actions, hasSize(6)); + + // The order in which the alias actions are created + // is not preserved so look for the item in the list + for (var job : jobs) { + var expected = new AliasActionMatcher( + AnomalyDetectorsIndex.resultsWriteAlias(job), + newIndex, + IndicesAliasesRequest.AliasActions.Type.ADD + ); + assertThat(actions.stream().filter(expected::matches).count(), equalTo(1L)); + + expected = new AliasActionMatcher( + AnomalyDetectorsIndex.resultsWriteAlias(job), + anomaliesIndex, + IndicesAliasesRequest.AliasActions.Type.REMOVE + ); + assertThat(actions.stream().filter(expected::matches).count(), equalTo(1L)); + + expected = new AliasActionMatcher( + AnomalyDetectorsIndex.jobResultsAliasedName(job), + newIndex, + IndicesAliasesRequest.AliasActions.Type.ADD + ); + assertThat(actions.stream().filter(expected::matches).count(), equalTo(1L)); + } + } + + public void testRunUpdate_UpToDateIndices() { + String indexName = ".ml-anomalies-sharedindex"; + var jobs = List.of("job1", "job2"); + IndexMetadata.Builder indexMetadata = createSharedResultsIndex(indexName, IndexVersion.current(), jobs); + + Metadata.Builder metadata = Metadata.builder(); + metadata.put(indexMetadata); + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.metadata(metadata); + + var client = mock(Client.class); + var updater = new MlAnomaliesIndexUpdate(TestIndexNameExpressionResolver.newInstance(), client); + updater.runUpdate(csBuilder.build()); + // everything up to date so no action for the client + verify(client).settings(); + verify(client).threadPool(); + verifyNoMoreInteractions(client); + } + + public void testRunUpdate_LegacyIndex() { + String indexName = ".ml-anomalies-sharedindex"; + var jobs = List.of("job1", "job2"); + IndexMetadata.Builder indexMetadata = createSharedResultsIndex(indexName, IndexVersions.V_7_17_0, jobs); + + Metadata.Builder metadata = Metadata.builder(); + metadata.put(indexMetadata); + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.metadata(metadata); + + var client = mockClientWithRolloverAndAlias(indexName); + var updater = new MlAnomaliesIndexUpdate(TestIndexNameExpressionResolver.newInstance(), client); + + updater.runUpdate(csBuilder.build()); + verify(client).settings(); + verify(client, times(7)).threadPool(); + verify(client, times(2)).execute(same(TransportIndicesAliasesAction.TYPE), any(), any()); // create rollover alias and update + verify(client).execute(same(RolloverAction.INSTANCE), any(), any()); // index rolled over + verifyNoMoreInteractions(client); + } + + private record AliasActionMatcher(String aliasName, String index, IndicesAliasesRequest.AliasActions.Type actionType) { + boolean matches(IndicesAliasesRequest.AliasActions aliasAction) { + return aliasAction.actionType() == actionType + && aliasAction.aliases()[0].equals(aliasName) + && aliasAction.indices()[0].equals(index); + } + } + + private IndexMetadata.Builder createSharedResultsIndex(String indexName, IndexVersion indexVersion, List jobs) { + IndexMetadata.Builder indexMetadata = IndexMetadata.builder(indexName); + indexMetadata.settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, indexVersion) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_INDEX_UUID, "_uuid") + ); + + for (var jobId : jobs) { + indexMetadata.putAlias(AliasMetadata.builder(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)).isHidden(true).build()); + indexMetadata.putAlias( + AliasMetadata.builder(AnomalyDetectorsIndex.resultsWriteAlias(jobId)).writeIndex(true).isHidden(true).build() + ); + } + + return indexMetadata; + } + + @SuppressWarnings("unchecked") + static Client mockClientWithRolloverAndAlias(String indexName) { + var client = mock(Client.class); + + var aliasRequestCount = new AtomicInteger(0); + + doAnswer(invocationOnMock -> { + ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[2]; + actionListener.onResponse(new RolloverResponse(indexName, indexName + "-new", Map.of(), false, true, true, true, true)); + return null; + }).when(client).execute(same(RolloverAction.INSTANCE), any(RolloverRequest.class), any(ActionListener.class)); + + doAnswer(invocationOnMock -> { + ActionListener actionListener = (ActionListener) invocationOnMock + .getArguments()[2]; + var request = (IndicesAliasesRequest) invocationOnMock.getArguments()[1]; + // Check the rollover alias is create and deleted + if (aliasRequestCount.getAndIncrement() == 0) { + var addAliasAction = new AliasActionMatcher( + indexName + ".rollover_alias", + indexName, + IndicesAliasesRequest.AliasActions.Type.ADD + ); + assertEquals(1L, request.getAliasActions().stream().filter(addAliasAction::matches).count()); + } else { + var removeAliasAction = new AliasActionMatcher( + indexName + ".rollover_alias", + indexName + "-new", + IndicesAliasesRequest.AliasActions.Type.REMOVE + ); + assertEquals(1L, request.getAliasActions().stream().filter(removeAliasAction::matches).count()); + } + + actionListener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); + + return null; + }).when(client).execute(same(TransportIndicesAliasesAction.TYPE), any(IndicesAliasesRequest.class), any(ActionListener.class)); + + var threadPool = mock(ThreadPool.class); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + when(client.threadPool()).thenReturn(threadPool); + + return client; + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlIndexRolloverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlIndexRolloverTests.java index aa59028a4cc0d..491b20f0a2d3e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlIndexRolloverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlIndexRolloverTests.java @@ -258,7 +258,7 @@ public void testIsCompatibleIndexVersion() { } @SuppressWarnings("unchecked") - private Client mockClientWithRolloverAndAlias() { + static Client mockClientWithRolloverAndAlias() { var client = mock(Client.class); doAnswer(invocationOnMock -> { From 8654d6ae14c166f41f736dedfd05ed8d1c1984d0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 29 Jan 2025 07:36:41 +1100 Subject: [PATCH 158/383] Mute org.elasticsearch.xpack.security.profile.ProfileIntegTests testProfileAPIsWhenIndexNotCreated #121096 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index fbf289cb2edbb..4c4c80ebc5a11 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -286,6 +286,9 @@ tests: - class: org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilterIT method: testBulkOperations {p0=true} issue: https://github.com/elastic/elasticsearch/issues/120969 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testProfileAPIsWhenIndexNotCreated + issue: https://github.com/elastic/elasticsearch/issues/121096 # Examples: # From 8d057d89d79a2b085082baf1bfd909de41a2f863 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Tue, 28 Jan 2025 12:42:48 -0800 Subject: [PATCH 159/383] Tweak copy_to handling in synthetic _source to account for nested objects (#120974) --- docs/changelog/120974.yaml | 6 ++ .../indices.create/20_synthetic_source.yml | 68 +++++++++++++++++++ .../index/mapper/DocumentParserContext.java | 37 +++++----- .../mapper/IgnoredSourceFieldMapper.java | 30 +------- 4 files changed, 95 insertions(+), 46 deletions(-) create mode 100644 docs/changelog/120974.yaml diff --git a/docs/changelog/120974.yaml b/docs/changelog/120974.yaml new file mode 100644 index 0000000000000..ed52eefd9f5f8 --- /dev/null +++ b/docs/changelog/120974.yaml @@ -0,0 +1,6 @@ +pr: 120974 +summary: Tweak `copy_to` handling in synthetic `_source` to account for nested objects +area: Mapping +type: bug +issues: + - 120831 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml index 096ccbce9a58b..d1c492caf9b48 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml @@ -1602,6 +1602,74 @@ synthetic_source with copy_to pointing inside object: hits.hits.2.fields: c.copy: [ "100", "hello", "zap" ] +--- +synthetic_source with copy_to inside nested object: + - do: + indices.create: + index: test + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + name: + type: keyword + my_values: + type: nested + properties: + k: + type: keyword + copy_to: my_values.copy + second_level: + type: nested + properties: + k2: + type: keyword + copy_to: my_values.copy + copy: + type: keyword + dummy: + type: keyword + + - do: + index: + index: test + id: 1 + refresh: true + body: + name: "A" + my_values: + k: "hello" + + - do: + index: + index: test + id: 2 + refresh: true + body: + name: "B" + my_values: + second_level: + k2: "hello" + + - do: + search: + index: test + sort: name + + - match: + hits.hits.0._source: + name: "A" + my_values: + k: "hello" + - match: + hits.hits.1._source: + name: "B" + my_values: + second_level: + k2: "hello" + --- synthetic_source with copy_to pointing to ambiguous field: - do: diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 51e4e9f4c1b5e..ba9e902fee5d9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -367,19 +367,6 @@ public final DocumentParserContext maybeCloneForArray(Mapper mapper) throws IOEx return this; } - /** - * Creates a sub-context from the current {@link DocumentParserContext} to indicate that the source for the sub-context has been - * recorded and avoid duplicate recording for parts of the sub-context. Applies to synthetic source only. - */ - public final DocumentParserContext cloneWithRecordedSource() throws IOException { - if (canAddIgnoredField()) { - DocumentParserContext subcontext = createChildContext(parent()); - subcontext.setRecordedSource(); // Avoids double-storing parts of the source for the same parser subtree. - return subcontext; - } - return this; - } - /** * Add the given {@code field} to the _field_names field * @@ -466,10 +453,6 @@ public boolean isCopyToDestinationField(String name) { return copyToFields.contains(name); } - public Set getCopyToFields() { - return copyToFields; - } - /** * Add a new mapper dynamically created while parsing. * @@ -706,6 +689,26 @@ public LuceneDocument doc() { * @param doc the document to target */ public final DocumentParserContext createCopyToContext(String copyToField, LuceneDocument doc) throws IOException { + /* + Mark field as containing copied data meaning it should not be present + in synthetic _source (to be consistent with stored _source). + Ignored source values take precedence over standard synthetic source implementation + so by adding the `XContentDataHelper.voidValue()` entry we disable the field in synthetic source. + Otherwise, it would be constructed f.e. from doc_values which leads to duplicate values + in copied field after reindexing. + */ + if (mappingLookup.isSourceSynthetic() && indexSettings().getSkipIgnoredSourceWrite() == false) { + ObjectMapper parent = root().findParentMapper(copyToField); + // There are scenarios when this is false: + // 1. all values of the field that is the source of copy_to are null + // 2. copy_to points at a field inside a disabled object + // 3. copy_to points at dynamic field which is not yet applied to mapping, we will process it properly after the dynamic update + if (parent != null) { + int offset = parent.isRoot() ? 0 : parent.fullPath().length() + 1; + ignoredFieldValues.add(new IgnoredSourceFieldMapper.NameValue(copyToField, offset, XContentDataHelper.voidValue(), doc)); + } + } + ContentPath path = new ContentPath(); XContentParser parser = DotExpandingXContentParser.expandDots(new CopyToParser(copyToField, parser()), path); return new Wrapper(root(), this) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java index 5f553ac8d2252..d8d8200baac31 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java @@ -25,8 +25,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Set; @@ -161,33 +159,7 @@ public void postParse(DocumentParserContext context) { return; } - Collection ignoredValuesToWrite = context.getIgnoredFieldValues(); - if (context.getCopyToFields().isEmpty() == false && indexSettings.getSkipIgnoredSourceWrite() == false) { - /* - Mark fields as containing copied data meaning they should not be present - in synthetic _source (to be consistent with stored _source). - Ignored source values take precedence over standard synthetic source implementation - so by adding the `XContentDataHelper.voidValue()` entry we disable the field in synthetic source. - Otherwise, it would be constructed f.e. from doc_values which leads to duplicate values - in copied field after reindexing. - */ - var mutableList = new ArrayList<>(ignoredValuesToWrite); - for (String copyToField : context.getCopyToFields()) { - ObjectMapper parent = context.parent().findParentMapper(copyToField); - if (parent == null) { - // There are scenarios when this can happen: - // 1. all values of the field that is the source of copy_to are null - // 2. copy_to points at a field inside a disabled object - // 3. copy_to points at dynamic field which is not yet applied to mapping, we will process it properly on re-parse. - continue; - } - int offset = parent.isRoot() ? 0 : parent.fullPath().length() + 1; - mutableList.add(new IgnoredSourceFieldMapper.NameValue(copyToField, offset, XContentDataHelper.voidValue(), context.doc())); - } - ignoredValuesToWrite = mutableList; - } - - for (NameValue nameValue : ignoredValuesToWrite) { + for (NameValue nameValue : context.getIgnoredFieldValues()) { nameValue.doc().add(new StoredField(NAME, encode(nameValue))); } } From f209c2bf23a39bd35859d91042fd13846a0a55e4 Mon Sep 17 00:00:00 2001 From: John Verwolf Date: Tue, 28 Jan 2025 12:57:22 -0800 Subject: [PATCH 160/383] Reduce Data Loss in System Indices Migration (#120168) This PR removes a potential cause of data loss when migrating system indices. It does this by changing the way we set a "write-block" on the system index to migrate - now using a dedicated transport request rather than a settings update. Furthermore, we no longer delete the write-block prior to deleting the index, as this was another source of potential data loss. Additionally, we now remove the block if the migration fails. --- docs/changelog/120168.yaml | 5 + .../AbstractFeatureMigrationIntegTest.java | 37 +++++++- .../migration/FeatureMigrationIT.java | 59 ++++++++++++ .../indices/alias/IndicesAliasesResponse.java | 18 ++++ .../upgrades/SystemIndexMigrator.java | 92 +++++++++++++------ 5 files changed, 180 insertions(+), 31 deletions(-) create mode 100644 docs/changelog/120168.yaml diff --git a/docs/changelog/120168.yaml b/docs/changelog/120168.yaml new file mode 100644 index 0000000000000..d4bb321895160 --- /dev/null +++ b/docs/changelog/120168.yaml @@ -0,0 +1,5 @@ +pr: 120168 +summary: Reduce Data Loss in System Indices Migration +area: Infra/Core +type: bug +issues: [] diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java index 860d63000f124..84e45024b69ff 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java @@ -9,14 +9,17 @@ package org.elasticsearch.migration; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgradeStatusAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; @@ -28,6 +31,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.AssociatedIndexDescriptor; import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.SystemIndexPlugin; @@ -50,6 +54,10 @@ import java.util.function.BiConsumer; import java.util.function.Function; +import static java.util.Collections.emptySet; +import static java.util.Collections.singletonList; +import static java.util.Collections.unmodifiableSet; +import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; @@ -255,12 +263,18 @@ protected void assertIndexHasCorrectProperties( assertThat(thisIndexStats.getTotal().getDocs().getCount(), is((long) INDEX_DOC_COUNT)); } - public static class TestPlugin extends Plugin implements SystemIndexPlugin { + public static class TestPlugin extends Plugin implements SystemIndexPlugin, ActionPlugin { public final AtomicReference>> preMigrationHook = new AtomicReference<>(); public final AtomicReference>> postMigrationHook = new AtomicReference<>(); + private final BlockingActionFilter blockingActionFilter; public TestPlugin() { + blockingActionFilter = new BlockingActionFilter(); + } + @Override + public List getActionFilters() { + return singletonList(blockingActionFilter); } @Override @@ -299,5 +313,26 @@ public void indicesMigrationComplete( postMigrationHook.get().accept(clusterService.state(), preUpgradeMetadata); listener.onResponse(true); } + + public static class BlockingActionFilter extends org.elasticsearch.action.support.ActionFilter.Simple { + private Set blockedActions = emptySet(); + + @Override + protected boolean apply(String action, ActionRequest request, ActionListener listener) { + if (blockedActions.contains(action)) { + throw new ElasticsearchException("force exception on [" + action + "]"); + } + return true; + } + + @Override + public int order() { + return 0; + } + + public void blockActions(String... actions) { + blockedActions = unmodifiableSet(newHashSet(actions)); + } + } } } diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java index cdf817a6b17b8..ee95ce5513820 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java @@ -17,11 +17,14 @@ import org.elasticsearch.action.admin.cluster.migration.PostFeatureUpgradeRequest; import org.elasticsearch.action.admin.cluster.migration.PostFeatureUpgradeResponse; import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.template.put.PutComponentTemplateAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.support.ActionFilter; +import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -36,10 +39,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.migration.AbstractFeatureMigrationIntegTest.TestPlugin.BlockingActionFilter; import org.elasticsearch.painless.PainlessPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.upgrades.FeatureMigrationResults; import org.elasticsearch.upgrades.SingleFeatureMigrationResult; @@ -272,6 +277,60 @@ public void testMigrateIndexWithWriteBlock() throws Exception { }); } + @AwaitsFix(bugUrl = "ES-10666") // This test uncovered an existing issue + public void testIndexBlockIsRemovedWhenAliasRequestFails() throws Exception { + createSystemIndexForDescriptor(INTERNAL_UNMANAGED); + ensureGreen(); + + // Block the alias request to simulate a failure + InternalTestCluster internalTestCluster = internalCluster(); + ActionFilters actionFilters = internalTestCluster.getInstance(ActionFilters.class, internalTestCluster.getMasterName()); + BlockingActionFilter blockingActionFilter = null; + for (ActionFilter filter : actionFilters.filters()) { + if (filter instanceof BlockingActionFilter) { + blockingActionFilter = (BlockingActionFilter) filter; + break; + } + } + assertNotNull("BlockingActionFilter should exist", blockingActionFilter); + blockingActionFilter.blockActions(TransportIndicesAliasesAction.NAME); + + // Start the migration + client().execute(PostFeatureUpgradeAction.INSTANCE, new PostFeatureUpgradeRequest(TEST_REQUEST_TIMEOUT)).get(); + + // Wait till the migration fails + assertBusy(() -> { + GetFeatureUpgradeStatusResponse statusResp = client().execute( + GetFeatureUpgradeStatusAction.INSTANCE, + new GetFeatureUpgradeStatusRequest(TEST_REQUEST_TIMEOUT) + ).get(); + logger.info(Strings.toString(statusResp)); + assertThat(statusResp.getUpgradeStatus(), equalTo(GetFeatureUpgradeStatusResponse.UpgradeStatus.ERROR)); + }); + + // Get the settings to see if the write block was removed + var allsettings = client().admin().indices().prepareGetSettings(INTERNAL_UNMANAGED.getIndexPattern()).get().getIndexToSettings(); + var internalUnmanagedOldIndexSettings = allsettings.get(".int-unman-old"); + var writeBlock = internalUnmanagedOldIndexSettings.get(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey()); + assertThat("Write block on old index should be removed on migration ERROR status", writeBlock, equalTo("false")); + + // Unblock the alias request + blockingActionFilter.blockActions(); + + // Retry the migration + client().execute(PostFeatureUpgradeAction.INSTANCE, new PostFeatureUpgradeRequest(TEST_REQUEST_TIMEOUT)).get(); + + // Ensure that the migration is successful after the alias request is unblocked + assertBusy(() -> { + GetFeatureUpgradeStatusResponse statusResp = client().execute( + GetFeatureUpgradeStatusAction.INSTANCE, + new GetFeatureUpgradeStatusRequest(TEST_REQUEST_TIMEOUT) + ).get(); + logger.info(Strings.toString(statusResp)); + assertThat(statusResp.getUpgradeStatus(), equalTo(GetFeatureUpgradeStatusResponse.UpgradeStatus.NO_MIGRATION_NEEDED)); + }); + } + public void testMigrationWillRunAfterError() throws Exception { createSystemIndexForDescriptor(INTERNAL_MANAGED); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java index 69ab9f57d2be7..071e9b42752c0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java @@ -77,6 +77,17 @@ public boolean hasErrors() { return errors; } + /** + * Get a list of all errors from the response. If there are no errors, an empty list is returned. + */ + public List getErrors() { + if (errors == false) { + return List.of(); + } else { + return actionResults.stream().filter(a -> a.getError() != null).map(AliasActionResult::getError).toList(); + } + } + /** * Build a response from a list of action results. Sets the errors boolean based * on whether an of the individual results contain an error. @@ -165,6 +176,13 @@ public static AliasActionResult buildSuccess(List indices, AliasActions return new AliasActionResult(indices, action, null); } + /** + * The error result if the action failed, null if the action succeeded. + */ + public ElasticsearchException getError() { + return error; + } + private int getStatus() { return error == null ? 200 : error.status().getStatus(); } diff --git a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java index 186618f3662fb..cdd466c567e8b 100644 --- a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java +++ b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java @@ -15,7 +15,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; +import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsClusterStateUpdateRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -32,7 +34,6 @@ import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.metadata.MetadataUpdateSettingsService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -59,6 +60,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgradeStatusAction.NO_UPGRADE_REQUIRED_INDEX_VERSION; +import static org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock.WRITE; import static org.elasticsearch.cluster.metadata.IndexMetadata.State.CLOSE; import static org.elasticsearch.core.Strings.format; @@ -448,12 +450,33 @@ private void migrateSingleIndex(ClusterState clusterState, Consumer { + if (aliasesResponse.hasErrors()) { + var e = new ElasticsearchException("Aliases request had errors"); + for (var error : aliasesResponse.getErrors()) { + e.addSuppressed(error); + } + throw e; + } + logger.info( + "Successfully migrated old index [{}] to new index [{}] from feature [{}]", + oldIndexName, + migrationInfo.getNextIndexName(), + migrationInfo.getFeatureName() + ); + delegate2.onResponse(bulkByScrollResponse); + }, e -> { + logger.error( + () -> format( + "An error occurred while changing aliases and removing the old index [%s] from feature [%s]", + oldIndexName, + migrationInfo.getFeatureName() + ), + e + ); + removeReadOnlyBlockOnReindexFailure(oldIndex, delegate2, e); + })); } }, e -> { logger.error( @@ -511,10 +534,7 @@ private void createIndex(SystemIndexMigrationInfo migrationInfo, ActionListener< ); } - private CheckedBiConsumer, AcknowledgedResponse, Exception> setAliasAndRemoveOldIndex( - SystemIndexMigrationInfo migrationInfo, - BulkByScrollResponse bulkByScrollResponse - ) { + private void setAliasAndRemoveOldIndex(SystemIndexMigrationInfo migrationInfo, ActionListener listener) { final IndicesAliasesRequestBuilder aliasesRequest = migrationInfo.createClient(baseClient).admin().indices().prepareAliases(); aliasesRequest.removeIndex(migrationInfo.getCurrentIndexName()); aliasesRequest.addAlias(migrationInfo.getNextIndexName(), migrationInfo.getCurrentIndexName()); @@ -533,30 +553,42 @@ private CheckedBiConsumer, AcknowledgedResp ); }); - // Technically this callback might have a different cluster state, but it shouldn't matter - these indices shouldn't be changing - // while we're trying to migrate them. - return (listener, unsetReadOnlyResponse) -> aliasesRequest.execute( - listener.delegateFailureAndWrap((l, deleteIndexResponse) -> l.onResponse(bulkByScrollResponse)) - ); + aliasesRequest.execute(listener); } /** - * Makes the index readonly if it's not set as a readonly yet + * Sets the write block on the index to the given value. */ private void setWriteBlock(Index index, boolean readOnlyValue, ActionListener listener) { - final Settings readOnlySettings = Settings.builder().put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), readOnlyValue).build(); - - metadataUpdateSettingsService.updateSettings( - new UpdateSettingsClusterStateUpdateRequest( - MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT, - TimeValue.ZERO, - readOnlySettings, - UpdateSettingsClusterStateUpdateRequest.OnExisting.OVERWRITE, - UpdateSettingsClusterStateUpdateRequest.OnStaticSetting.REJECT, - index - ), - listener - ); + if (readOnlyValue) { + // Setting the Block with an AddIndexBlockRequest ensures all shards have accounted for the block and all + // in-flight writes are completed before returning. + baseClient.admin() + .indices() + .addBlock( + new AddIndexBlockRequest(WRITE, index.getName()).masterNodeTimeout(MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT), + listener.delegateFailureAndWrap((l, response) -> { + if (response.isAcknowledged() == false) { + throw new ElasticsearchException("Failed to acknowledge read-only block index request"); + } + l.onResponse(response); + }) + ); + } else { + // The only way to remove a Block is via a settings update. + final Settings readOnlySettings = Settings.builder().put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), false).build(); + metadataUpdateSettingsService.updateSettings( + new UpdateSettingsClusterStateUpdateRequest( + MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT, + TimeValue.ZERO, + readOnlySettings, + UpdateSettingsClusterStateUpdateRequest.OnExisting.OVERWRITE, + UpdateSettingsClusterStateUpdateRequest.OnStaticSetting.REJECT, + index + ), + listener + ); + } } private void reindex(SystemIndexMigrationInfo migrationInfo, ActionListener listener) { From 847b8ea2f4e648dde48dbdbfa0e55fde0fa0ae96 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 28 Jan 2025 21:16:01 +0000 Subject: [PATCH 161/383] [ML] Update Inference Update API documentation to use the correct PUT method (#121048) --- docs/changelog/121048.yaml | 5 +++++ .../main/resources/rest-api-spec/api/inference.update.json | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/121048.yaml diff --git a/docs/changelog/121048.yaml b/docs/changelog/121048.yaml new file mode 100644 index 0000000000000..e1a9d665315ff --- /dev/null +++ b/docs/changelog/121048.yaml @@ -0,0 +1,5 @@ +pr: 121048 +summary: Updating Inference Update API documentation to have the correct PUT method +area: Machine Learning +type: bug +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.update.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.update.json index 6c458ce080aa7..133354e3ec5be 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.update.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.update.json @@ -14,7 +14,7 @@ "paths": [ { "path": "/_inference/{inference_id}/_update", - "methods": ["POST"], + "methods": ["PUT"], "parts": { "inference_id": { "type": "string", @@ -24,7 +24,7 @@ }, { "path": "/_inference/{task_type}/{inference_id}/_update", - "methods": ["POST"], + "methods": ["PUT"], "parts": { "task_type": { "type": "string", From 1277d3a7ac8adb20af37f398ac20d45f3470f1c5 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 28 Jan 2025 21:18:07 +0000 Subject: [PATCH 162/383] [ML] Add _stream path for chat_completions task (#121006) As a replacement for the _unified route --- .../inference/InferenceBaseRestTest.java | 3 +- .../xpack/inference/rest/Paths.java | 1 + .../rest/RestStreamInferenceAction.java | 33 +++++++++++++++++++ 3 files changed, 36 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index 546eab471a077..bb3f3e9b46c4d 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -355,7 +355,8 @@ protected Deque unifiedCompletionInferOnMockService( List input, @Nullable Consumer responseConsumerCallback ) throws Exception { - var endpoint = Strings.format("_inference/%s/%s/_unified", taskType, modelId); + var route = randomBoolean() ? "_stream" : "_unified"; // TODO remove unified route + var endpoint = Strings.format("_inference/%s/%s/%s", taskType, modelId, route); return callAsyncUnified(endpoint, input, "user", responseConsumerCallback); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java index 57c06df8d8dfe..7f43676dfb5f0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java @@ -31,6 +31,7 @@ public final class Paths { + INFERENCE_ID + "}/_stream"; + // TODO remove the _unified path public static final String UNIFIED_SUFFIX = "_unified"; static final String UNIFIED_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/" + UNIFIED_SUFFIX; static final String UNIFIED_TASK_TYPE_INFERENCE_ID_PATH = "_inference/{" diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java index 881af435b29b6..518056365d88b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java @@ -9,12 +9,17 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; +import java.io.IOException; import java.util.List; import java.util.Objects; @@ -50,4 +55,32 @@ protected InferenceAction.Request prepareInferenceRequest(InferenceAction.Reques protected ActionListener listener(RestChannel channel) { return new ServerSentEventsRestActionListener(channel, threadPool); } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + var params = parseParams(restRequest); + var inferTimeout = parseTimeout(restRequest); + + if (params.taskType() == TaskType.CHAT_COMPLETION) { + UnifiedCompletionAction.Request request; + try (var parser = restRequest.contentParser()) { + request = UnifiedCompletionAction.Request.parseRequest(params.inferenceEntityId(), params.taskType(), inferTimeout, parser); + } + + return channel -> client.execute( + UnifiedCompletionAction.INSTANCE, + request, + new ServerSentEventsRestActionListener(channel, threadPool) + ); + } else { + InferenceAction.Request.Builder requestBuilder; + try (var parser = restRequest.contentParser()) { + requestBuilder = InferenceAction.Request.parseRequest(params.inferenceEntityId(), params.taskType(), parser); + } + + requestBuilder.setInferenceTimeout(inferTimeout); + var request = prepareInferenceRequest(requestBuilder); + return channel -> client.execute(InferenceAction.INSTANCE, request, listener(channel)); + } + } } From e1467c216ddefe476555fd4520d7013594bb13c2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 29 Jan 2025 08:19:06 +1100 Subject: [PATCH 163/383] Mute org.elasticsearch.xpack.security.profile.ProfileIntegTests testGetProfiles #121101 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 4c4c80ebc5a11..0d62ef0530c27 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -289,6 +289,9 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testProfileAPIsWhenIndexNotCreated issue: https://github.com/elastic/elasticsearch/issues/121096 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testGetProfiles + issue: https://github.com/elastic/elasticsearch/issues/121101 # Examples: # From 1a21acedc8f03b40ee62f62879a74c8f5a15ca02 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 29 Jan 2025 08:34:44 +1100 Subject: [PATCH 164/383] Mute org.elasticsearch.xpack.security.authc.service.ServiceAccountSingleNodeTests testAuthenticateWithServiceFileToken #120988 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 0d62ef0530c27..4908ad5c37e56 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -292,6 +292,9 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testGetProfiles issue: https://github.com/elastic/elasticsearch/issues/121101 +- class: org.elasticsearch.xpack.security.authc.service.ServiceAccountSingleNodeTests + method: testAuthenticateWithServiceFileToken + issue: https://github.com/elastic/elasticsearch/issues/120988 # Examples: # From a0f00f23b49c65d0a39edb28c65efaf29554fb32 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 28 Jan 2025 13:55:53 -0800 Subject: [PATCH 165/383] Relax translog assertion for inference fields (#121092) The inference fields are synthesized from doc_values, regardless of whether the synthetic source is enabled. This change relaxes the translog assertions, allowing inference fields in the same way as we did with the synthetic source. Relates #120045 Closes #120979 Closes #121007 Closes #120980 Closes #120975 --- muted-tests.yml | 8 ---- .../index/engine/TranslogDirectoryReader.java | 2 +- .../engine/TranslogOperationAsserter.java | 45 +++++++++++++------ 3 files changed, 32 insertions(+), 23 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 4908ad5c37e56..49a74350ac11e 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -255,11 +255,6 @@ tests: - class: org.elasticsearch.packaging.test.DockerTests method: test070BindMountCustomPathConfAndJvmOptions issue: https://github.com/elastic/elasticsearch/issues/120910 -- class: org.elasticsearch.xpack.esql.qa.multi_node.RestEsqlIT - issue: https://github.com/elastic/elasticsearch/issues/120948 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=nodes.stats/11_indices_metrics/indices mappings exact count test for indices level} - issue: https://github.com/elastic/elasticsearch/issues/120950 - class: org.elasticsearch.packaging.test.DockerTests method: test071BindMountCustomPathWithDifferentUID issue: https://github.com/elastic/elasticsearch/issues/120918 @@ -283,9 +278,6 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testSuggestProfilesWithName issue: https://github.com/elastic/elasticsearch/issues/121022 -- class: org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilterIT - method: testBulkOperations {p0=true} - issue: https://github.com/elastic/elasticsearch/issues/120969 - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testProfileAPIsWhenIndexNotCreated issue: https://github.com/elastic/elasticsearch/issues/121096 diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java index ac5bf31c2b730..9537fd0703149 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java @@ -97,7 +97,7 @@ static DirectoryReader create( // When using synthetic source, the translog operation must always be reindexed into an in-memory Lucene to ensure consistent // output for realtime-get operations. However, this can degrade the performance of realtime-get and update operations. // If slight inconsistencies in realtime-get operations are acceptable, the translog operation can be reindexed lazily. - if (mappingLookup.isSourceSynthetic()) { + if (mappingLookup.isSourceSynthetic() || mappingLookup.inferenceFields().isEmpty() == false) { onSegmentCreated.run(); leafReader = createInMemoryReader(shardId, engineConfig, directory, documentParser, mappingLookup, false, operation); } else { diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java index 4170d06c4d6ea..90eaea78b3893 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java @@ -38,7 +38,8 @@ public boolean assertSameIndexOperation(Translog.Index o1, Translog.Index o2) th if (super.assertSameIndexOperation(o1, o2)) { return true; } - if (engineConfig.getIndexSettings().isRecoverySourceSyntheticEnabled()) { + if (engineConfig.getIndexSettings().isRecoverySourceSyntheticEnabled() + || engineConfig.getMapperService().mappingLookup().inferenceFields().isEmpty() == false) { return super.assertSameIndexOperation(synthesizeSource(engineConfig, o1), o2) || super.assertSameIndexOperation(o1, synthesizeSource(engineConfig, o2)); } @@ -60,19 +61,7 @@ static Translog.Index synthesizeSource(EngineConfig engineConfig, Translog.Index TrivialQueryCachingPolicy.NEVER, () -> {} ); - try ( - LuceneSyntheticSourceChangesSnapshot snapshot = new LuceneSyntheticSourceChangesSnapshot( - engineConfig.getMapperService(), - searcher, - LuceneSyntheticSourceChangesSnapshot.DEFAULT_BATCH_SIZE, - Integer.MAX_VALUE, - op.seqNo(), - op.seqNo(), - true, - false, - engineConfig.getIndexSettings().getIndexVersionCreated() - ) - ) { + try (var snapshot = newSnapshot(engineConfig, op, searcher);) { final Translog.Operation normalized = snapshot.next(); assert normalized != null : "expected one operation; got zero"; return (Translog.Index) normalized; @@ -80,6 +69,34 @@ static Translog.Index synthesizeSource(EngineConfig engineConfig, Translog.Index } } + static Translog.Snapshot newSnapshot(EngineConfig engineConfig, Translog.Index op, Engine.Searcher searcher) throws IOException { + if (engineConfig.getIndexSettings().isRecoverySourceSyntheticEnabled()) { + return new LuceneSyntheticSourceChangesSnapshot( + engineConfig.getMapperService(), + searcher, + LuceneSyntheticSourceChangesSnapshot.DEFAULT_BATCH_SIZE, + Integer.MAX_VALUE, + op.seqNo(), + op.seqNo(), + true, + false, + engineConfig.getIndexSettings().getIndexVersionCreated() + ); + } else { + return new LuceneChangesSnapshot( + engineConfig.getMapperService(), + searcher, + LuceneSyntheticSourceChangesSnapshot.DEFAULT_BATCH_SIZE, + op.seqNo(), + op.seqNo(), + true, + false, + false, + engineConfig.getIndexSettings().getIndexVersionCreated() + ); + } + } + public boolean assertSameIndexOperation(Translog.Index o1, Translog.Index o2) throws IOException { return Translog.Index.equalsWithoutAutoGeneratedTimestamp(o1, o2); } From d27a8e0b8934b0a5dbbefc92713c041e3facc365 Mon Sep 17 00:00:00 2001 From: Pawan Kartik Date: Tue, 28 Jan 2025 21:59:08 +0000 Subject: [PATCH 166/383] Add a user-configurable timeout parameter to the `_resolve/cluster` API (#120542) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previously, should a remote cluster be unresponsive, _resolve/cluster would wait until Netty stepped in and terminated the connection. We initially responded to this issue by switching the disconnect strategy. However, this was problematic because it defeated the whole purpose of this API call—re-establish connection if and when possible. We now attempt to respond to it by adding a user-configurable GET parameter. This PR also reverses the problematic disconnect strategy. Example: ``` GET _resolve/cluster/*:*?timeout=5s ``` --- docs/changelog/120542.yaml | 6 ++ .../cluster/ResolveClusterTimeoutIT.java | 91 +++++++++++++++++++ .../org/elasticsearch/TransportVersions.java | 1 + .../resolve/ResolveClusterActionRequest.java | 22 ++++- .../TransportResolveClusterAction.java | 43 +++++++-- .../indices/RestResolveClusterAction.java | 7 ++ 6 files changed, 161 insertions(+), 9 deletions(-) create mode 100644 docs/changelog/120542.yaml create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ResolveClusterTimeoutIT.java diff --git a/docs/changelog/120542.yaml b/docs/changelog/120542.yaml new file mode 100644 index 0000000000000..9e91146fc3366 --- /dev/null +++ b/docs/changelog/120542.yaml @@ -0,0 +1,6 @@ +pr: 120542 +summary: "Feat: add a user-configurable timeout parameter to the `_resolve/cluster`\ + \ API" +area: Search +type: enhancement +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ResolveClusterTimeoutIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ResolveClusterTimeoutIT.java new file mode 100644 index 0000000000000..7236702af12e2 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ResolveClusterTimeoutIT.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.indices.cluster; + +import org.elasticsearch.action.admin.indices.resolve.ResolveClusterActionRequest; +import org.elasticsearch.action.admin.indices.resolve.ResolveClusterActionResponse; +import org.elasticsearch.action.admin.indices.resolve.ResolveClusterInfo; +import org.elasticsearch.action.admin.indices.resolve.TransportResolveClusterAction; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.AbstractMultiClustersTestCase; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.transport.TransportService; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +public class ResolveClusterTimeoutIT extends AbstractMultiClustersTestCase { + private static final String REMOTE_CLUSTER_1 = "cluster-a"; + + @Override + protected List remoteClusterAlias() { + return List.of(REMOTE_CLUSTER_1); + } + + public void testTimeoutParameter() { + long maxTimeoutInMillis = 500; + + // First part: we query _resolve/cluster without stalling a remote. + ResolveClusterActionRequest resolveClusterActionRequest; + if (randomBoolean()) { + resolveClusterActionRequest = new ResolveClusterActionRequest(new String[0], IndicesOptions.DEFAULT, true, true); + } else { + resolveClusterActionRequest = new ResolveClusterActionRequest(new String[] { "*:*" }); + } + + // We set a timeout but since we don't stall any cluster, we should always get back response just fine before the timeout. + resolveClusterActionRequest.setTimeout(TimeValue.timeValueSeconds(10)); + ResolveClusterActionResponse clusterActionResponse = safeGet( + client().execute(TransportResolveClusterAction.TYPE, resolveClusterActionRequest) + ); + Map clusterInfo = clusterActionResponse.getResolveClusterInfo(); + + // Remote is connected and error message is null. + assertThat(clusterInfo.get(REMOTE_CLUSTER_1).isConnected(), equalTo(true)); + assertThat(clusterInfo.get(REMOTE_CLUSTER_1).getError(), is(nullValue())); + + // Second part: now we stall the remote and utilise the timeout feature. + CountDownLatch latch = new CountDownLatch(1); + + // Add an override so that the remote cluster receives the TransportResolveClusterAction request but stalls. + for (var nodes : cluster(REMOTE_CLUSTER_1).getNodeNames()) { + ((MockTransportService) cluster(REMOTE_CLUSTER_1).getInstance(TransportService.class, nodes)).addRequestHandlingBehavior( + TransportResolveClusterAction.REMOTE_TYPE.name(), + (requestHandler, transportRequest, transportChannel, transportTask) -> { + // Wait until the TransportResolveRequestAction times out following which the latch is released. + latch.await(); + requestHandler.messageReceived(transportRequest, transportChannel, transportTask); + } + ); + } + + long randomlyChosenTimeout = randomLongBetween(100, maxTimeoutInMillis); + // We now randomly choose a timeout which is guaranteed to hit since the remote is stalled. + resolveClusterActionRequest.setTimeout(TimeValue.timeValueMillis(randomlyChosenTimeout)); + + clusterActionResponse = safeGet(client().execute(TransportResolveClusterAction.TYPE, resolveClusterActionRequest)); + latch.countDown(); + + clusterInfo = clusterActionResponse.getResolveClusterInfo(); + + // Ensure that the request timed out and that the remote is marked as not connected. + assertThat(clusterInfo.get(REMOTE_CLUSTER_1).isConnected(), equalTo(false)); + assertThat( + clusterInfo.get(REMOTE_CLUSTER_1).getError(), + equalTo("Request timed out before receiving a response from the remote cluster") + ); + } +} diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 14078fad9e20d..94e018535908c 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -169,6 +169,7 @@ static TransportVersion def(int id) { public static final TransportVersion INGEST_REQUEST_INCLUDE_SOURCE_ON_ERROR = def(8_835_00_0); public static final TransportVersion RESOURCE_DEPRECATION_CHECKS = def(8_836_00_0); public static final TransportVersion LINEAR_RETRIEVER_SUPPORT = def(8_837_00_0); + public static final TransportVersion TIMEOUT_GET_PARAM_FOR_RESOLVE_CLUSTER = def(8_838_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java index 0fcc8e1115209..3f9e0cbf299e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; @@ -51,6 +52,7 @@ public class ResolveClusterActionRequest extends ActionRequest implements Indice */ private boolean localIndicesRequested = false; private IndicesOptions indicesOptions; + private TimeValue timeout; // true if the user did not provide any index expression - they only want cluster level info, not index matching private final boolean clusterInfoOnly; @@ -89,6 +91,9 @@ public ResolveClusterActionRequest(StreamInput in) throws IOException { this.clusterInfoOnly = false; this.isQueryingCluster = false; } + if (in.getTransportVersion().onOrAfter(TransportVersions.TIMEOUT_GET_PARAM_FOR_RESOLVE_CLUSTER)) { + this.timeout = in.readOptionalTimeValue(); + } } @Override @@ -103,6 +108,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(clusterInfoOnly); out.writeBoolean(isQueryingCluster); } + if (out.getTransportVersion().onOrAfter(TransportVersions.TIMEOUT_GET_PARAM_FOR_RESOLVE_CLUSTER)) { + out.writeOptionalTimeValue(timeout); + } } static String createVersionErrorMessage(TransportVersion versionFound) { @@ -124,12 +132,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ResolveClusterActionRequest request = (ResolveClusterActionRequest) o; - return Arrays.equals(names, request.names) && indicesOptions.equals(request.indicesOptions()); + return Arrays.equals(names, request.names) + && indicesOptions.equals(request.indicesOptions()) + && Objects.equals(timeout, request.timeout); } @Override public int hashCode() { - int result = Objects.hash(indicesOptions); + int result = Objects.hash(indicesOptions, timeout); result = 31 * result + Arrays.hashCode(names); return result; } @@ -139,6 +149,10 @@ public String[] indices() { return names; } + public TimeValue getTimeout() { + return timeout; + } + public boolean clusterInfoOnly() { return clusterInfoOnly; } @@ -202,6 +216,10 @@ boolean localIndicesPresent(String[] indices) { return false; } + public void setTimeout(TimeValue timeout) { + this.timeout = timeout; + } + @Override public String toString() { return "ResolveClusterActionRequest{" diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterAction.java index 9d82b1edff0a9..32ab8bf1220ff 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.ListenerTimeouts; import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.cluster.ClusterState; @@ -29,12 +30,14 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Strings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.search.SearchService; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.TransportService; @@ -60,12 +63,14 @@ public class TransportResolveClusterAction extends HandledTransportAction resultsListener; + TimeValue timeout = request.getTimeout(); + // Wrap the listener with a timeout since a timeout was specified. + if (timeout != null) { + var releaserListener = ActionListener.releaseAfter(remoteListener, refs.acquire()); + resultsListener = ListenerTimeouts.wrapWithTimeout( + threadPool, + timeout, + searchCoordinationExecutor, + releaserListener, + ignored -> releaserListener.onFailure(new ConnectTransportException(null, REMOTE_CONNECTION_TIMEOUT_ERROR)) + ); + } else { + resultsListener = ActionListener.releaseAfter(remoteListener, refs.acquire()); + } + + remoteClusterClient.execute(TransportResolveClusterAction.REMOTE_TYPE, remoteRequest, resultsListener); } } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResolveClusterAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResolveClusterAction.java index 886c4da47d974..3dbbde65c6428 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResolveClusterAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResolveClusterAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestCancellableNodeClient; @@ -71,6 +72,12 @@ protected BaseRestHandler.RestChannelConsumer prepareRequest(RestRequest request clusterInfoOnly, true ); + + String timeout = request.param("timeout"); + if (timeout != null) { + resolveRequest.setTimeout(TimeValue.parseTimeValue(timeout, "timeout")); + } + return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).admin() .indices() .execute(TransportResolveClusterAction.TYPE, resolveRequest, new RestToXContentListener<>(channel)); From 4127f5c92f1ba067fd7c6f69c8127b31258766bb Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 28 Jan 2025 17:08:13 -0500 Subject: [PATCH 167/383] Extend geoip FullClusterRestartIT (#121085) --- .../qa/full-cluster-restart/build.gradle | 2 - .../ingest/geoip/FullClusterRestartIT.java | 85 ++++++++++++++++--- 2 files changed, 75 insertions(+), 12 deletions(-) diff --git a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle index e53e0e080cce6..71f95a990c6c8 100644 --- a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle +++ b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle @@ -7,8 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java b/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java index 1dfcb524f46a0..49c4aaea0a728 100644 --- a/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java +++ b/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java @@ -14,6 +14,12 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.WarningsHandler; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Nullable; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; @@ -25,28 +31,46 @@ import org.junit.rules.TestRule; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Base64; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; -import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.is; public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCase { private static final boolean useFixture = Boolean.getBoolean("geoip_use_service") == false; - private static GeoIpHttpFixture fixture = new GeoIpHttpFixture(useFixture); + private static final GeoIpHttpFixture fixture = new GeoIpHttpFixture(useFixture); - private static ElasticsearchCluster cluster = ElasticsearchCluster.local() + // e.g. use ./gradlew -Dtests.jvm.argline="-Dgeoip_test_with_security=false" ":modules:ingest-geoip:qa:full-cluster-restart:check" + // to set this to false, if you so desire + private static final boolean useSecurity = Boolean.parseBoolean(System.getProperty("geoip_test_with_security", "true")); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) .version(getOldClusterTestVersion()) .nodes(2) .setting("ingest.geoip.downloader.endpoint", () -> fixture.getAddress(), s -> useFixture) - .setting("xpack.security.enabled", "false") + .setting("xpack.security.enabled", useSecurity ? "true" : "false") .feature(FeatureFlag.TIME_SERIES_MODE) .build(); + @Override + protected Settings restClientSettings() { + Settings settings = super.restClientSettings(); + if (useSecurity) { + String token = "Basic " + Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8)); + settings = Settings.builder().put(settings).put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + return settings; + } + @ClassRule public static TestRule ruleChain = RuleChain.outerRule(fixture).around(cluster); @@ -60,6 +84,9 @@ protected ElasticsearchCluster getUpgradeCluster() { } public void testGeoIpSystemFeaturesMigration() throws Exception { + final List maybeSecurityIndex = useSecurity ? List.of(".security-7") : List.of(); + final List maybeSecurityIndexReindexed = useSecurity ? List.of(".security-7-reindexed-for-10") : List.of(); + if (isRunningAgainstOldCluster()) { Request enableDownloader = new Request("PUT", "/_cluster/settings"); enableDownloader.setJsonEntity(""" @@ -86,15 +113,28 @@ public void testGeoIpSystemFeaturesMigration() throws Exception { assertBusy(() -> testDatabasesLoaded(), 30, TimeUnit.SECONDS); // the geoip index should be created - assertBusy(() -> testCatIndices(".geoip_databases")); + assertBusy(() -> testCatIndices(List.of(".geoip_databases"), maybeSecurityIndex)); assertBusy(() -> testIndexGeoDoc()); + + // before the upgrade, Kibana should work + assertBusy(() -> testGetStarAsKibana(List.of("my-index-00001"), maybeSecurityIndex)); } else { + // after the upgrade, but before the migration, Kibana should work + assertBusy(() -> testGetStarAsKibana(List.of("my-index-00001"), maybeSecurityIndex)); + + // migrate the system features and give the cluster a moment to settle Request migrateSystemFeatures = new Request("POST", "/_migration/system_features"); assertOK(client().performRequest(migrateSystemFeatures)); + ensureHealth(request -> request.addParameter("wait_for_status", "yellow")); - assertBusy(() -> testCatIndices(".geoip_databases-reindexed-for-10", "my-index-00001")); + assertBusy(() -> testCatIndices(List.of(".geoip_databases-reindexed-for-10", "my-index-00001"), maybeSecurityIndexReindexed)); assertBusy(() -> testIndexGeoDoc()); + // after the migration, Kibana should work + if (useSecurity == false) { // BUT IT DOESN'T if security is enabled + assertBusy(() -> testGetStarAsKibana(List.of("my-index-00001"), maybeSecurityIndexReindexed)); + } + Request disableDownloader = new Request("PUT", "/_cluster/settings"); disableDownloader.setJsonEntity(""" {"persistent": {"ingest.geoip.downloader.enabled": false}} @@ -102,7 +142,7 @@ public void testGeoIpSystemFeaturesMigration() throws Exception { assertOK(client().performRequest(disableDownloader)); // the geoip index should be deleted - assertBusy(() -> testCatIndices("my-index-00001")); + assertBusy(() -> testCatIndices(List.of("my-index-00001"), maybeSecurityIndexReindexed)); Request enableDownloader = new Request("PUT", "/_cluster/settings"); enableDownloader.setJsonEntity(""" @@ -114,7 +154,7 @@ public void testGeoIpSystemFeaturesMigration() throws Exception { assertBusy(() -> testDatabasesLoaded(), 30, TimeUnit.SECONDS); // the geoip index should be recreated - assertBusy(() -> testCatIndices(".geoip_databases", "my-index-00001")); + assertBusy(() -> testCatIndices(List.of(".geoip_databases", "my-index-00001"), maybeSecurityIndexReindexed)); assertBusy(() -> testIndexGeoDoc()); } } @@ -146,11 +186,17 @@ private void testDatabasesLoaded() throws IOException { } } - private void testCatIndices(String... indexNames) throws IOException { + private void testCatIndices(List indexNames, @Nullable List additionalIndexNames) throws IOException { Request catIndices = new Request("GET", "_cat/indices/*?s=index&h=index&expand_wildcards=all"); String response = EntityUtils.toString(client().performRequest(catIndices).getEntity()); List indices = List.of(response.trim().split("\\s+")); - assertThat(indices, contains(indexNames)); + + if (additionalIndexNames != null && additionalIndexNames.isEmpty() == false) { + indexNames = new ArrayList<>(indexNames); // recopy into a mutable list + indexNames.addAll(additionalIndexNames); + } + + assertThat(new HashSet<>(indices), is(new HashSet<>(indexNames))); } private void testIndexGeoDoc() throws IOException { @@ -165,4 +211,23 @@ private void testIndexGeoDoc() throws IOException { assertNull(doc.evaluate("_source.tags")); assertEquals("Sweden", doc.evaluate("_source.geo.country_name")); } + + private void testGetStarAsKibana(List indexNames, @Nullable List additionalIndexNames) throws IOException { + Request getStar = new Request("GET", "*?expand_wildcards=all"); + getStar.setOptions( + RequestOptions.DEFAULT.toBuilder() + .addHeader("X-elastic-product-origin", "kibana") + .setWarningsHandler(WarningsHandler.PERMISSIVE) // we don't care about warnings, just errors + ); + Response response = client().performRequest(getStar); + assertOK(response); + + if (additionalIndexNames != null && additionalIndexNames.isEmpty() == false) { + indexNames = new ArrayList<>(indexNames); // recopy into a mutable list + indexNames.addAll(additionalIndexNames); + } + + Map map = responseAsMap(response); + assertThat(map.keySet(), is(new HashSet<>(indexNames))); + } } From 54317f4d1708a5aac755c163d456e92f69c7e744 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 29 Jan 2025 09:18:10 +1100 Subject: [PATCH 168/383] Mute org.elasticsearch.xpack.security.profile.ProfileIntegTests testUpdateProfileData #121108 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 49a74350ac11e..ec828772c45b0 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -287,6 +287,9 @@ tests: - class: org.elasticsearch.xpack.security.authc.service.ServiceAccountSingleNodeTests method: testAuthenticateWithServiceFileToken issue: https://github.com/elastic/elasticsearch/issues/120988 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testUpdateProfileData + issue: https://github.com/elastic/elasticsearch/issues/121108 # Examples: # From ed22d4e1160b8a2e66116e553e56a51c1cc1f928 Mon Sep 17 00:00:00 2001 From: Adam Demjen Date: Tue, 28 Jan 2025 17:55:44 -0500 Subject: [PATCH 169/383] [Inference API] Pass model ID in sparse model inference request body (#120981) * Pass model ID in Inference Service sparse embedding request * Adapt tests * Fix after rebasing * Fix after rebasing * [CI] Auto commit changes from spotless --------- Co-authored-by: elasticsearchmachine --- ...ferenceServiceSparseEmbeddingsRequest.java | 6 ++++- ...eServiceSparseEmbeddingsRequestEntity.java | 11 +++++--- ...InferenceServiceSparseEmbeddingsModel.java | 27 +------------------ ...erviceSparseEmbeddingsServiceSettings.java | 5 ---- ...ticInferenceServiceActionCreatorTests.java | 14 +++++----- ...iceSparseEmbeddingsRequestEntityTests.java | 22 ++++++++++++--- ...ceServiceSparseEmbeddingsRequestTests.java | 22 +++++++++------ ...enceServiceSparseEmbeddingsModelTests.java | 9 +++---- ...eSparseEmbeddingsServiceSettingsTests.java | 18 +------------ .../elastic/ElasticInferenceServiceTests.java | 12 ++++----- 10 files changed, 64 insertions(+), 82 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequest.java index 6acaf74a33338..18fc7d9f8c32d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequest.java @@ -55,7 +55,11 @@ public HttpRequest createHttpRequest() { var httpPost = new HttpPost(uri); var usageContext = inputTypeToUsageContext(inputType); var requestEntity = Strings.toString( - new ElasticInferenceServiceSparseEmbeddingsRequestEntity(truncationResult.input(), usageContext) + new ElasticInferenceServiceSparseEmbeddingsRequestEntity( + truncationResult.input(), + model.getServiceSettings().modelId(), + usageContext + ) ); ByteArrayEntity byteEntity = new ByteArrayEntity(requestEntity.getBytes(StandardCharsets.UTF_8)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java index deecd9186aca5..77ae48e6ccdc2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntity.java @@ -18,14 +18,17 @@ public record ElasticInferenceServiceSparseEmbeddingsRequestEntity( List inputs, + String modelId, @Nullable ElasticInferenceServiceUsageContext usageContext ) implements ToXContentObject { private static final String INPUT_FIELD = "input"; + private static final String MODEL_ID_FIELD = "model_id"; private static final String USAGE_CONTEXT = "usage_context"; public ElasticInferenceServiceSparseEmbeddingsRequestEntity { Objects.requireNonNull(inputs); + Objects.requireNonNull(modelId); } @Override @@ -33,14 +36,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.startArray(INPUT_FIELD); - { - for (String input : inputs) { - builder.value(input); - } + for (String input : inputs) { + builder.value(input); } builder.endArray(); + builder.field(MODEL_ID_FIELD, modelId); + // optional field if ((usageContext == ElasticInferenceServiceUsageContext.UNSPECIFIED) == false) { builder.field(USAGE_CONTEXT, usageContext); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java index 4c1cac4d7a77b..ac6a389914a10 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java @@ -20,15 +20,11 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.elastic.ElasticInferenceServiceActionVisitor; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; -import org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels; import java.net.URI; import java.net.URISyntaxException; -import java.util.Locale; import java.util.Map; -import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; - public class ElasticInferenceServiceSparseEmbeddingsModel extends ElasticInferenceServiceExecutableActionModel { private final URI uri; @@ -95,36 +91,15 @@ public URI uri() { } private URI createUri() throws ElasticsearchStatusException { - String modelId = getServiceSettings().modelId(); - String modelIdUriPath; - - switch (modelId) { - case ElserModels.ELSER_V2_MODEL -> modelIdUriPath = "ELSERv2"; - default -> throw new ElasticsearchStatusException( - String.format( - Locale.ROOT, - "Unsupported model [%s] for service [%s] and task type [%s]", - modelId, - ELASTIC_INFERENCE_SERVICE_IDENTIFIER, - TaskType.SPARSE_EMBEDDING - ), - RestStatus.BAD_REQUEST - ); - } - try { // TODO, consider transforming the base URL into a URI for better error handling. - return new URI( - elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/embed/text/sparse/" + modelIdUriPath - ); + return new URI(elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/embed/text/sparse"); } catch (URISyntaxException e) { throw new ElasticsearchStatusException( "Failed to create URI for service [" + this.getConfigurations().getService() + "] with taskType [" + this.getTaskType() - + "] with model [" - + this.getServiceSettings().modelId() + "]: " + e.getMessage(), RestStatus.BAD_REQUEST, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettings.java index 3af404aeef36b..175f03f14673e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettings.java @@ -17,7 +17,6 @@ import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; -import org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels; import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -61,10 +60,6 @@ public static ElasticInferenceServiceSparseEmbeddingsServiceSettings fromMap( context ); - if (modelId != null && ElserModels.isValidEisModel(modelId) == false) { - validationException.addValidationError("unknown ELSER model id [" + modelId + "]"); - } - if (validationException.validationErrors().isEmpty() == false) { throw validationException; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java index b142371ae1b4b..e1d2ee56733e3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreatorTests.java @@ -90,7 +90,7 @@ public void testExecute_ReturnsSuccessfulResponse_ForElserAction() throws IOExce webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer)); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id"); var actionCreator = new ElasticInferenceServiceActionCreator( sender, createWithEmptySettings(threadPool), @@ -120,10 +120,11 @@ public void testExecute_ReturnsSuccessfulResponse_ForElserAction() throws IOExce assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - assertThat(requestMap.size(), is(1)); + assertThat(requestMap.size(), is(2)); assertThat(requestMap.get("input"), instanceOf(List.class)); var inputList = (List) requestMap.get("input"); assertThat(inputList, contains("hello world")); + assertThat(requestMap.get("model_id"), is("my-model-id")); } } @@ -151,7 +152,7 @@ public void testSend_FailsFromInvalidResponseFormat_ForElserAction() throws IOEx webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer)); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id"); var actionCreator = new ElasticInferenceServiceActionCreator( sender, createWithEmptySettings(threadPool), @@ -174,10 +175,11 @@ public void testSend_FailsFromInvalidResponseFormat_ForElserAction() throws IOEx assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - assertThat(requestMap.size(), is(1)); + assertThat(requestMap.size(), is(2)); assertThat(requestMap.get("input"), instanceOf(List.class)); var inputList = (List) requestMap.get("input"); assertThat(inputList, contains("hello world")); + assertThat(requestMap.get("model_id"), is("my-model-id")); } } @@ -208,7 +210,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating() throws IOExc webServer.enqueue(new MockResponse().setResponseCode(413).setBody(responseJsonContentTooLarge)); webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer)); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id"); var actionCreator = new ElasticInferenceServiceActionCreator( sender, createWithEmptySettings(threadPool), @@ -273,7 +275,7 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); // truncated to 1 token = 3 characters - var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), 1); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id", 1); var actionCreator = new ElasticInferenceServiceActionCreator( sender, createWithEmptySettings(threadPool), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java index 5920e70cfdd18..c0ebaf8668c5c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestEntityTests.java @@ -24,18 +24,21 @@ public class ElasticInferenceServiceSparseEmbeddingsRequestEntityTests extends E public void testToXContent_SingleInput_UnspecifiedUsageContext() throws IOException { var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity( List.of("abc"), + "my-model-id", ElasticInferenceServiceUsageContext.UNSPECIFIED ); String xContentString = xContentEntityToString(entity); assertThat(xContentString, equalToIgnoringWhitespaceInJsonString(""" { - "input": ["abc"] + "input": ["abc"], + "model_id": "my-model-id" }""")); } public void testToXContent_MultipleInputs_UnspecifiedUsageContext() throws IOException { var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity( List.of("abc", "def"), + "my-model-id", ElasticInferenceServiceUsageContext.UNSPECIFIED ); String xContentString = xContentEntityToString(entity); @@ -44,28 +47,39 @@ public void testToXContent_MultipleInputs_UnspecifiedUsageContext() throws IOExc "input": [ "abc", "def" - ] + ], + "model_id": "my-model-id" } """)); } public void testToXContent_MultipleInputs_SearchUsageContext() throws IOException { - var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity(List.of("abc"), ElasticInferenceServiceUsageContext.SEARCH); + var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity( + List.of("abc"), + "my-model-id", + ElasticInferenceServiceUsageContext.SEARCH + ); String xContentString = xContentEntityToString(entity); assertThat(xContentString, equalToIgnoringWhitespaceInJsonString(""" { "input": ["abc"], + "model_id": "my-model-id", "usage_context": "search" } """)); } public void testToXContent_MultipleInputs_IngestUsageContext() throws IOException { - var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity(List.of("abc"), ElasticInferenceServiceUsageContext.INGEST); + var entity = new ElasticInferenceServiceSparseEmbeddingsRequestEntity( + List.of("abc"), + "my-model-id", + ElasticInferenceServiceUsageContext.INGEST + ); String xContentString = xContentEntityToString(entity); assertThat(xContentString, equalToIgnoringWhitespaceInJsonString(""" { "input": ["abc"], + "model_id": "my-model-id", "usage_context": "ingest" } """)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java index cb867f15b6d4f..abcc94640981c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceSparseEmbeddingsRequestTests.java @@ -34,8 +34,9 @@ public class ElasticInferenceServiceSparseEmbeddingsRequestTests extends ESTestC public void testCreateHttpRequest_UsageContextSearch() throws IOException { var url = "http://eis-gateway.com"; var input = "input"; + var modelId = "my-model-id"; - var request = createRequest(url, input, InputType.SEARCH); + var request = createRequest(url, modelId, input, InputType.SEARCH); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); @@ -43,16 +44,18 @@ public void testCreateHttpRequest_UsageContextSearch() throws IOException { assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap.size(), equalTo(2)); + assertThat(requestMap.size(), equalTo(3)); assertThat(requestMap.get("input"), is(List.of(input))); + assertThat(requestMap.get("model_id"), is(modelId)); assertThat(requestMap.get("usage_context"), equalTo("search")); } public void testTraceContextPropagatedThroughHTTPHeaders() { var url = "http://eis-gateway.com"; var input = "input"; + var modelId = "my-model-id"; - var request = createRequest(url, input, InputType.UNSPECIFIED); + var request = createRequest(url, modelId, input, InputType.UNSPECIFIED); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); @@ -68,8 +71,9 @@ public void testTraceContextPropagatedThroughHTTPHeaders() { public void testTruncate_ReducesInputTextSizeByHalf() throws IOException { var url = "http://eis-gateway.com"; var input = "abcd"; + var modelId = "my-model-id"; - var request = createRequest(url, input, InputType.UNSPECIFIED); + var request = createRequest(url, modelId, input, InputType.UNSPECIFIED); var truncatedRequest = request.truncate(); var httpRequest = truncatedRequest.createHttpRequest(); @@ -77,15 +81,17 @@ public void testTruncate_ReducesInputTextSizeByHalf() throws IOException { var httpPost = (HttpPost) httpRequest.httpRequestBase(); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap, aMapWithSize(1)); + assertThat(requestMap, aMapWithSize(2)); assertThat(requestMap.get("input"), is(List.of("ab"))); + assertThat(requestMap.get("model_id"), is(modelId)); } public void testIsTruncated_ReturnsTrue() { var url = "http://eis-gateway.com"; var input = "abcd"; + var modelId = "my-model-id"; - var request = createRequest(url, input, InputType.UNSPECIFIED); + var request = createRequest(url, modelId, input, InputType.UNSPECIFIED); assertFalse(request.getTruncationInfo()[0]); var truncatedRequest = request.truncate(); @@ -109,8 +115,8 @@ public void testInputTypeToUsageContext_Unknown_DefaultToUnspecified() { assertThat(inputTypeToUsageContext(InputType.CLUSTERING), equalTo(ElasticInferenceServiceUsageContext.UNSPECIFIED)); } - public ElasticInferenceServiceSparseEmbeddingsRequest createRequest(String url, String input, InputType inputType) { - var embeddingsModel = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(url); + public ElasticInferenceServiceSparseEmbeddingsRequest createRequest(String url, String modelId, String input, InputType inputType) { + var embeddingsModel = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(url, modelId); return new ElasticInferenceServiceSparseEmbeddingsRequest( TruncatorTests.createTruncator(), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java index c9f4234331221..02bbbb844c04f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModelTests.java @@ -11,20 +11,19 @@ import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels; public class ElasticInferenceServiceSparseEmbeddingsModelTests extends ESTestCase { - public static ElasticInferenceServiceSparseEmbeddingsModel createModel(String url) { - return createModel(url, null); + public static ElasticInferenceServiceSparseEmbeddingsModel createModel(String url, String modelId) { + return createModel(url, modelId, null); } - public static ElasticInferenceServiceSparseEmbeddingsModel createModel(String url, Integer maxInputTokens) { + public static ElasticInferenceServiceSparseEmbeddingsModel createModel(String url, String modelId, Integer maxInputTokens) { return new ElasticInferenceServiceSparseEmbeddingsModel( "id", TaskType.SPARSE_EMBEDDING, "service", - new ElasticInferenceServiceSparseEmbeddingsServiceSettings(ElserModels.ELSER_V2_MODEL, maxInputTokens, null), + new ElasticInferenceServiceSparseEmbeddingsServiceSettings(modelId, maxInputTokens, null), EmptyTaskSettings.INSTANCE, EmptySecretSettings.INSTANCE, new ElasticInferenceServiceComponents(url) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettingsTests.java index dd205b12408ba..be6057bcddce0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsServiceSettingsTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.inference.services.elastic; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xcontent.XContentBuilder; @@ -23,7 +22,6 @@ import java.util.Map; import static org.elasticsearch.xpack.inference.services.elasticsearch.ElserModelsTests.randomElserModel; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; public class ElasticInferenceServiceSparseEmbeddingsServiceSettingsTests extends AbstractWireSerializingTestCase< @@ -47,7 +45,7 @@ protected ElasticInferenceServiceSparseEmbeddingsServiceSettings mutateInstance( } public void testFromMap() { - var modelId = ElserModels.ELSER_V2_MODEL; + var modelId = "my-model-id"; var serviceSettings = ElasticInferenceServiceSparseEmbeddingsServiceSettings.fromMap( new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId)), @@ -57,20 +55,6 @@ public void testFromMap() { assertThat(serviceSettings, is(new ElasticInferenceServiceSparseEmbeddingsServiceSettings(modelId, null, null))); } - public void testFromMap_InvalidElserModelId() { - var invalidModelId = "invalid"; - - ValidationException validationException = expectThrows( - ValidationException.class, - () -> ElasticInferenceServiceSparseEmbeddingsServiceSettings.fromMap( - new HashMap<>(Map.of(ServiceFields.MODEL_ID, invalidModelId)), - ConfigurationParseContext.REQUEST - ) - ); - - assertThat(validationException.getMessage(), containsString(Strings.format("unknown ELSER model id [%s]", invalidModelId))); - } - public void testToXContent_WritesAllFields() throws IOException { var modelId = ElserModels.ELSER_V1_MODEL; var maxInputTokens = 10; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index 5a3a9a29d7564..b46fd4941e6f6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -308,12 +308,12 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists public void testCheckModelConfig_ReturnsNewModelReference() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); try (var service = createService(senderFactory, getUrl(webServer))) { - var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer)); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id"); PlainActionFuture listener = new PlainActionFuture<>(); service.checkModelConfig(model, listener); var returnedModel = listener.actionGet(TIMEOUT); - assertThat(returnedModel, is(ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer)))); + assertThat(returnedModel, is(ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id"))); } } @@ -457,7 +457,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(eisGatewayUrl); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(eisGatewayUrl, "my-model-id"); PlainActionFuture listener = new PlainActionFuture<>(); service.infer( model, @@ -486,7 +486,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), Matchers.equalTo(XContentType.JSON.mediaType())); var requestMap = entityAsMap(request.getBody()); - assertThat(requestMap, is(Map.of("input", List.of("input text"), "usage_context", "search"))); + assertThat(requestMap, is(Map.of("input", List.of("input text"), "model_id", "my-model-id", "usage_context", "search"))); } } @@ -508,7 +508,7 @@ public void testChunkedInfer_PassesThrough() throws IOException { webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(eisGatewayUrl); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(eisGatewayUrl, "my-model-id"); PlainActionFuture> listener = new PlainActionFuture<>(); service.chunkedInfer( model, @@ -544,7 +544,7 @@ public void testChunkedInfer_PassesThrough() throws IOException { ); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - assertThat(requestMap, is(Map.of("input", List.of("input text"), "usage_context", "ingest"))); + assertThat(requestMap, is(Map.of("input", List.of("input text"), "model_id", "my-model-id", "usage_context", "ingest"))); } } From bfeba89e0c0fa778a57cf91597ff1b0c0054d6f5 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 28 Jan 2025 14:56:24 -0800 Subject: [PATCH 170/383] [DOCS] Move ML function reference out of appendix (#121111) --- .../ml/anomaly-detection/functions/ml-count-functions.asciidoc | 2 +- .../ml/anomaly-detection/functions/ml-functions.asciidoc | 1 - .../ml/anomaly-detection/functions/ml-geo-functions.asciidoc | 2 +- .../ml/anomaly-detection/functions/ml-info-functions.asciidoc | 2 +- .../ml/anomaly-detection/functions/ml-metric-functions.asciidoc | 2 +- .../ml/anomaly-detection/functions/ml-rare-functions.asciidoc | 2 +- .../ml/anomaly-detection/functions/ml-sum-functions.asciidoc | 2 +- .../ml/anomaly-detection/functions/ml-time-functions.asciidoc | 2 +- 8 files changed, 7 insertions(+), 8 deletions(-) diff --git a/docs/reference/ml/anomaly-detection/functions/ml-count-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-count-functions.asciidoc index aef8e13bd429e..54298e80b92e2 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-count-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-count-functions.asciidoc @@ -1,4 +1,4 @@ -["appendix",role="exclude",id="ml-count-functions"] +[[ml-count-functions]] = Count functions Count functions detect anomalies when the number of events in a bucket is diff --git a/docs/reference/ml/anomaly-detection/functions/ml-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-functions.asciidoc index ec5e429bfc584..a44e3ceaa3158 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-functions.asciidoc @@ -1,4 +1,3 @@ -[role="xpack"] [[ml-functions]] = Function reference diff --git a/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc index 63a0f047db647..6c5f075ab24b2 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc @@ -1,4 +1,4 @@ -["appendix",role="exclude",id="ml-geo-functions"] +[[ml-geo-functions]] = Geographic functions The geographic functions detect anomalies in the geographic location of the diff --git a/docs/reference/ml/anomaly-detection/functions/ml-info-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-info-functions.asciidoc index 7197e535e55e3..d1cbf39cdbe3b 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-info-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-info-functions.asciidoc @@ -1,4 +1,4 @@ -["appendix",role="exclude",id="ml-info-functions"] +[[ml-info-functions]] = Information content functions The information content functions detect anomalies in the amount of information diff --git a/docs/reference/ml/anomaly-detection/functions/ml-metric-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-metric-functions.asciidoc index 31ce07b01570f..bbd9dfc8f09d1 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-metric-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-metric-functions.asciidoc @@ -1,4 +1,4 @@ -["appendix",role="exclude",id="ml-metric-functions"] +[[ml-metric-functions]] = Metric functions The metric functions include functions such as mean, min and max. These values diff --git a/docs/reference/ml/anomaly-detection/functions/ml-rare-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-rare-functions.asciidoc index c993800a9f65b..69378d64f525c 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-rare-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-rare-functions.asciidoc @@ -1,4 +1,4 @@ -["appendix",role="exclude",id="ml-rare-functions"] +[[ml-rare-functions]] = Rare functions The rare functions detect values that occur rarely in time or rarely for a diff --git a/docs/reference/ml/anomaly-detection/functions/ml-sum-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-sum-functions.asciidoc index 423a00154fe88..f0b8b838933b5 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-sum-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-sum-functions.asciidoc @@ -1,4 +1,4 @@ -["appendix",role="exclude",id="ml-sum-functions"] +[[ml-sum-functions]] = Sum functions The sum functions detect anomalies when the sum of a field in a bucket is diff --git a/docs/reference/ml/anomaly-detection/functions/ml-time-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-time-functions.asciidoc index 096fd817ccc4c..7e2301dca0a25 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-time-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-time-functions.asciidoc @@ -1,4 +1,4 @@ -["appendix",role="exclude",id="ml-time-functions"] +[[ml-time-functions]] = Time functions The time functions detect events that happen at unusual times, either of the day From f403e38ef25e0829f6fe67d4556c8ea1e0b23c83 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 29 Jan 2025 09:58:27 +1100 Subject: [PATCH 171/383] Mute org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT test {p0=nodes.stats/11_indices_metrics/indices mappings exact count test for indices level} #120950 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index ec828772c45b0..c0cbd07db403d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -290,6 +290,9 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testUpdateProfileData issue: https://github.com/elastic/elasticsearch/issues/121108 +- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT + method: test {p0=nodes.stats/11_indices_metrics/indices mappings exact count test for indices level} + issue: https://github.com/elastic/elasticsearch/issues/120950 # Examples: # From 3df200384f424558c7f41bf82e6202a3cdb2a4f7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 29 Jan 2025 10:03:47 +1100 Subject: [PATCH 172/383] Mute org.elasticsearch.ingest.geoip.FullClusterRestartIT testGeoIpSystemFeaturesMigration {cluster=UPGRADED} #121115 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c0cbd07db403d..3c4c555393957 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -293,6 +293,9 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=nodes.stats/11_indices_metrics/indices mappings exact count test for indices level} issue: https://github.com/elastic/elasticsearch/issues/120950 +- class: org.elasticsearch.ingest.geoip.FullClusterRestartIT + method: testGeoIpSystemFeaturesMigration {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/121115 # Examples: # From 635a4c21de29b072c60c08c0dddb35a8c480722f Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Tue, 28 Jan 2025 19:44:56 -0600 Subject: [PATCH 173/383] Add docs for reindex data stream REST endpoints (#120653) Add documentation for new REST endpoints related to data stream upgrade. Endpoints: - /_migration/reindex - /_migration/reindex/{index}/_status - /_migration/reindex/{index}/_cancel - /_create_from/{source}/{dest} --- .../apis/create-index-from-source.asciidoc | 142 +++++++ .../apis/data-stream-reindex-cancel.asciidoc | 64 ++++ .../apis/data-stream-reindex-status.asciidoc | 157 ++++++++ .../apis/data-stream-reindex.asciidoc | 358 ++++++++++++++++++ docs/reference/migration/migration.asciidoc | 8 + .../api/indices.cancel_migrate_reindex.json | 2 +- .../api/indices.create_from.json | 2 +- .../indices.get_migrate_reindex_status.json | 2 +- .../api/indices.migrate_reindex.json | 2 +- 9 files changed, 733 insertions(+), 4 deletions(-) create mode 100644 docs/reference/migration/apis/create-index-from-source.asciidoc create mode 100644 docs/reference/migration/apis/data-stream-reindex-cancel.asciidoc create mode 100644 docs/reference/migration/apis/data-stream-reindex-status.asciidoc create mode 100644 docs/reference/migration/apis/data-stream-reindex.asciidoc diff --git a/docs/reference/migration/apis/create-index-from-source.asciidoc b/docs/reference/migration/apis/create-index-from-source.asciidoc new file mode 100644 index 0000000000000..601650c0dea31 --- /dev/null +++ b/docs/reference/migration/apis/create-index-from-source.asciidoc @@ -0,0 +1,142 @@ +[[indices-create-index-from-source]] +=== Create index from source API +++++ +Create index from source +++++ + +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[Index APIs]. +-- + +[[indices-create-index-from-source-api-request]] +==== {api-request-title} + +`PUT /_create_from//` + +`POST/_create_from//` + +[[indices-create-index-from-source-api-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have the `manage` +<> for the index. + +[[indices-create-index-from-source-api-desc]] +==== {api-description-title} +This api allows you to add a new index to an {es} cluster, using an existing source index as a basis for the new index. +The settings and mappings from the source index will copied over to the destination index. You can also provide +override settings and mappings which will be combined with the source settings and mappings when creating the +destination index. + +[[indices-create-index-from-source-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) Name of the existing source index which will be used as a basis. + +``:: +(Required, string) Name of the destination index which will be created. + + +[role="child_attributes"] +[[indices-create-index-from-source-api-request-body]] +==== {api-request-body-title} + +`settings_override`:: +(Optional, <>) Settings which override the source settings. + +`mappings_override`:: +(Optional, <>) Mappings which override the source mappings. + +`remove_index_blocks`:: +(Optional, boolean) Filter out any index blocks from the source index when creating the destination index. +Defaults to `true`. + +[[indices-create-index-from-source-api-example]] +==== {api-examples-title} + +Start by creating a source index that we'll copy using this API. + +[source,console] +-------------------------------------------------- +PUT /my-index +{ + "settings": { + "index": { + "number_of_shards": 3, + "blocks.write": true + } + }, + "mappings": { + "properties": { + "field1": { "type": "text" } + } + } +} +-------------------------------------------------- +// TESTSETUP + +Now we create a destination index from the source index. This new index will have the same mappings and settings +as the source index. + +[source,console] +-------------------------------------------------- +POST _create_from/my-index/my-new-index +-------------------------------------------------- + + +Alternatively, we could override some of the source's settings and mappings. This will use the source settings +and mappings as a basis and combine these with the overrides to create the destination settings and mappings. + +[source,console] +-------------------------------------------------- +POST _create_from/my-index/my-new-index +{ + "settings_override": { + "index": { + "number_of_shards": 5 + } + }, + "mappings_override": { + "properties": { + "field2": { "type": "boolean" } + } + } +} +-------------------------------------------------- + +Since the destination index is empty, we very likely will want to write into the index after creation. +This would not be possible if the source index contains an <> which is copied over to the destination index. +One way to handle this is to remove the index write block using a settings override. For example, the following +settings override removes all index blocks. + + +[source,console] +-------------------------------------------------- +POST _create_from/my-index/my-new-index +{ + "settings_override": { + "index": { + "blocks.write": null, + "blocks.read": null, + "blocks.read_only": null, + "blocks.read_only_allow_delete": null, + "blocks.metadata": null + } + } +} +-------------------------------------------------- + +Since this is a common scenario, index blocks are actually removed by default. This is controlled with the parameter +`remove_index_blocks`, which defaults to `true`. If we want the destination index to contains the index blocks from +the source index, we can do the following: + +[source,console] +-------------------------------------------------- +POST _create_from/my-index/my-new-index +{ + "remove_index_blocks": false +} +-------------------------------------------------- diff --git a/docs/reference/migration/apis/data-stream-reindex-cancel.asciidoc b/docs/reference/migration/apis/data-stream-reindex-cancel.asciidoc new file mode 100644 index 0000000000000..8866fc5332a1b --- /dev/null +++ b/docs/reference/migration/apis/data-stream-reindex-cancel.asciidoc @@ -0,0 +1,64 @@ +[role="xpack"] +[[data-stream-reindex-cancel-api]] +=== Reindex data stream cancel API +++++ +Reindex data stream cancel +++++ + +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-migration[Migration APIs]. +-- + +include::{es-ref-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] + +Cancels a running data stream reindex task which was started by the <>. +Any backing indices that have already been reindexed and swapped into the data stream will remain in the data stream. +Only backing indices which are currently being reindexed, or pending backing indices which are still waiting to be reindexed, will be cancelled. +Once a data stream reindex task is cancelled it will no longer be accessible through the +<>. If a reindex task is not currently running +this API will return `resource_not_found_exception`. + + +/////////////////////////////////////////////////////////// +[source,console] +------------------------------------------------------ +POST _migration/reindex +{ + "source": { + "index": "my-data-stream" + }, + "mode": "upgrade" +} +------------------------------------------------------ +// TESTSETUP +// TEST[setup:my_data_stream] +/////////////////////////////////////////////////////////// + + +[source,console] +---- +POST _migration/reindex/my-data-stream/_cancel +---- +// TEST[teardown:data_stream_cleanup] + +[[data-stream-reindex-cancel-request]] +==== {api-request-title} + +`GET /_migration/reindex//_cancel` + + +[[data-stream-reindex-cancel-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have the `manage` +<> for the data stream. + +[[data-stream-reindex-cancel-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Name of data stream to cancel reindexing. + diff --git a/docs/reference/migration/apis/data-stream-reindex-status.asciidoc b/docs/reference/migration/apis/data-stream-reindex-status.asciidoc new file mode 100644 index 0000000000000..6c391d7571a8a --- /dev/null +++ b/docs/reference/migration/apis/data-stream-reindex-status.asciidoc @@ -0,0 +1,157 @@ +[role="xpack"] +[[data-stream-reindex-status-api]] +=== Reindex data stream status API +++++ +Reindex data stream status +++++ + +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-migration[Migration APIs]. +-- + +include::{es-ref-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] + +Obtains the current status of a reindex task for the requested data stream. This status is +available while the reindex task is running and for 24 hours after completion of the task, +whether it succeeds or fails. If the task is cancelled, the status is no longer available. +If the task fails, the exception will be listed within the status. + +/////////////////////////////////////////////////////////// +[source,console] +------------------------------------------------------ +POST _migration/reindex +{ + "source": { + "index": "my-data-stream" + }, + "mode": "upgrade" +} +------------------------------------------------------ +// TESTSETUP +// TEST[setup:my_data_stream] + +[source,console] +------------------------------------------------------ +POST /_migration/reindex/my-data-stream/_cancel +DELETE _data_stream/my-data-stream +DELETE _index_template/my-data-stream-template +------------------------------------------------------ +// TEARDOWN +/////////////////////////////////////////////////////////// + + +[[data-stream-reindex-status-api-request]] +==== {api-request-title} + +`GET /_migration/reindex//_status` + + +[[data-stream-reindex-status-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have the `manage` +<> for the data stream. + +[[data-stream-reindex-status-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Name of data stream to get status for. The reindex task for the +data stream should be currently running or have been completed in the last 24 hours. + + +[role="child_attributes"] +[[data-stream-reindex-status-response-body]] +==== {api-response-body-title} + +`start_time`:: +(Optional, <>) The time when the reindex task started. + +`start_time_millis`:: +(integer) The time when the reindex task started, in milliseconds since the epoch. + +`complete`:: +(boolean) `false` if the reindex task is still running, and `true` if the task has completed with success or failure. + +`total_indices_in_data_stream`:: +(integer) The total number of backing indices in the data stream, including the write index. + +`total_indices_requiring_upgrade`:: +(integer) The number of backing indices that need to be upgraded. These will consist of the indices which have an +older version and are not read-only. + +`successes`:: +(integer) The number of backing indices which have already been successfully upgraded. + +`in_progress`:: +(array of objects) Information on the backing indices which are currently being reindexed. ++ +.Properties of objects in `in_progress` +[%collapsible%open] +===== +`index`:: +(string) The name of the source backing index. + +`total_doc_count`:: +(integer) The number of documents in the source backing index. + +`reindexed_doc_count`:: +(integer) The number of documents which have already been added to the destination backing index. +===== + +`pending`:: +(integer) The number of backing indices which still need to be upgraded and have not yet been started. + +`errors`:: +(array of objects) Information on any errors which have occurred. ++ +.Properties of objects in `errors` +[%collapsible%open] +===== +`index`:: +(string) The name of a backing index which has had an error during reindex. + +`message`:: +(string) Description of the error. +===== + +`exceptions`:: +(Optional, string) +Exception message for a reindex failure if the failure could not be tied to a particular index. + + +[[data-stream-reindex-status-example]] +==== {api-examples-title} + +[source,console] +---- +GET _migration/reindex/my-data-stream/_status +---- + +The following is a typical response: +[source,console-result] +---- +{ + "start_time_millis": 1737676174349, + "complete": false, + "total_indices_in_data_stream": 4, + "total_indices_requiring_upgrade": 3, + "successes": 1, + "in_progress": [ + { + "index": ".ds-my-data-stream-2025.01.23-000002", + "total_doc_count": 10000000, + "reindexed_doc_count": 1000 + } + ], + "pending": 1, + "errors": [] +} +---- +// TEST[skip:cannot easily clean up reindex task between tests] + +For a more in-depth example showing the usage of this API along with the <> and <> APIs, +see this <>. diff --git a/docs/reference/migration/apis/data-stream-reindex.asciidoc b/docs/reference/migration/apis/data-stream-reindex.asciidoc new file mode 100644 index 0000000000000..4641e0fe0911a --- /dev/null +++ b/docs/reference/migration/apis/data-stream-reindex.asciidoc @@ -0,0 +1,358 @@ +[role="xpack"] +[[data-stream-reindex-api]] +=== Reindex data stream API +++++ +Reindex data stream +++++ + +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-migration[Migration APIs]. +-- + +include::{es-ref-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] + +The reindex data stream API is used to upgrade the backing indices of a data stream to the most +recent major version. It works by reindexing each backing index into a new index, then replacing the original +backing index with its replacement and deleting the original backing index. The settings and mappings +from the original backing indices are copied to the resulting backing indices. + +This api runs in the background because reindexing all indices in a large data stream +is expected to take a large amount of time and resources. The endpoint will return immediately and a persistent +task will be created to run in the background. The current status of the task can be checked with +the <>. This status will be available for 24 hours after the task completes, whether +it finished successfully or failed. If the status is still available for a task, the task must be cancelled before it can be re-run. +A running or recently completed data stream reindex task can be cancelled using the <>. + +/////////////////////////////////////////////////////////// +[source,console] +------------------------------------------------------ +POST /_migration/reindex/my-data-stream/_cancel +DELETE _data_stream/my-data-stream +DELETE _index_template/my-data-stream-template +------------------------------------------------------ +// TEARDOWN +/////////////////////////////////////////////////////////// + + +[[data-stream-reindex-api-request]] +==== {api-request-title} + +`POST /_migration/reindex` + + +[[data-stream-reindex-api-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have the `manage` +<> for the data stream. + +[[data-stream-reindex-body]] +==== {api-request-body-title} + +`source`:: +`index`::: +(Required, string) The name of the data stream to upgrade. + +`mode`:: +(Required, enum) Set to `upgrade` to upgrade the data stream in-place, using the same source and destination +data stream. Each out-of-date backing index will be reindexed. Then the new backing index is swapped into the data stream and the old index is deleted. +Currently, the only allowed value for this parameter is `upgrade`. + +[[reindex-data-stream-api-settings]] +==== Settings + +You can use the following settings to control the behavior of the reindex data stream API: + +[[migrate_max_concurrent_indices_reindexed_per_data_stream-setting]] +// tag::migrate_max_concurrent_indices_reindexed_per_data_stream-setting-tag[] +`migrate.max_concurrent_indices_reindexed_per_data_stream` +(<>) +The number of backing indices within a given data stream which will be reindexed concurrently. Defaults to `1`. +// end::migrate_max_concurrent_indices_reindexed_per_data_stream-tag[] + +[[migrate_data_stream_reindex_max_request_per_second-setting]] +// tag::migrate_data_stream_reindex_max_request_per_second-setting-tag[] +`migrate.data_stream_reindex_max_request_per_second` +(<>) +The average maximum number of documents within a given backing index to reindex per second. +Defaults to `1000`, though can be any decimal number greater than `0`. +To remove throttling, set to `-1`. +This setting can be used to throttle the reindex process and manage resource usage. +Consult the <> for more information. +// end::migrate_data_stream_reindex_max_request_per_second-tag[] + + +[[reindex-data-stream-api-example]] +==== {api-examples-title} + +Assume we have a data stream `my-data-stream` with the following backing indices, all of which have index major version 7.x. + +* .ds-my-data-stream-2025.01.23-000001 +* .ds-my-data-stream-2025.01.23-000002 +* .ds-my-data-stream-2025.01.23-000003 + +Let's also assume that `.ds-my-data-stream-2025.01.23-000003` is the write index. +If {es} is version 8.x and we wish to upgrade to major version 9.x, the version 7.x indices must be upgraded in preparation. +We can use this API to reindex a data stream with version 7.x backing indices and make them version 8 backing indices. + +Start by calling the API: + +[[reindex-data-stream-start]] +[source,console] +---- +POST _migration/reindex +{ + "source": { + "index": "my-data-stream" + }, + "mode": "upgrade" +} +---- +// TEST[setup:my_data_stream] + + +As this task runs in the background this API will return immediately. +The task will do the following. + +First, the data stream is rolled over. So that no documents are lost during the reindex, we add <> + to the existing backing indices before reindexing them. Since a data stream's write index cannot have a write block, + the data stream is must be rolled over. This will produce a new write index, `.ds-my-data-stream-2025.01.23-000004`; which + has an 8.x version and thus does not need to be upgraded. + +Once the data stream has a write index with an 8.x version we can proceed with reindexing the old indices. +For each of the version 7.x indices, we now do the following: + +* Add a write block to the source index to guarantee that no writes are lost. +* Open the source index if it is closed. +* Delete the destination index if one exists. This is done in case we are retrying after a failure, so that we start with a fresh index. +* Create the destination index using the <>. +This copies the settings and mappings from the old backing index to the new backing index. +* Use the <> to copy the contents of the old backing index to the new backing index. +* Close the destination index if the source index was originally closed. +* Replace the old index in the data stream with the new index, using the <>. +* Finally, the old backing index is deleted. + +By default only one backing index will be processed at a time. +This can be modified using the <>. + +While the reindex data stream task is running, we can inspect the current status using the <>: +[source,console] +---- +GET /_migration/reindex/my-data-stream/_status +---- +// TEST[continued] + +For the above example, the following would be a possible status: + +[source,console-result] +---- +{ + "start_time_millis": 1737676174349, + "complete": false, + "total_indices_in_data_stream": 4, + "total_indices_requiring_upgrade": 3, + "successes": 0, + "in_progress": [ + { + "index": ".ds-my-data-stream-2025.01.23-000001", + "total_doc_count": 10000000, + "reindexed_doc_count": 999999 + } + ], + "pending": 2, + "errors": [] +} +---- +// TEST[skip:specific value is part of explanation] + +This output means that the first backing index, `.ds-my-data-stream-2025.01.23-000001`, is currently being processed, +and none of the backing indices have yet completed. Notice that `total_indices_in_data_stream` has a value of `4`, +because after the rollover, there are 4 indices in the data stream. But the new write index has an 8.x version, and +thus doesn't need to be reindexed, so `total_indices_requiring_upgrade` is only 3. + + + +[[reindex-data-stream-cancel-restart]] +===== Cancelling and Restarting +The <> provide a few ways to control the performance and +resource usage of a reindex task. This example shows how we can stop a running reindex task, modify the settings, +and restart the task. + +Continuing with the above example, assume the reindexing task has not yet completed, and the <> +returns the following: + +[source,console-result] +---- +{ + "start_time_millis": 1737676174349, + "complete": false, + "total_indices_in_data_stream": 4, + "total_indices_requiring_upgrade": 3, + "successes": 1, + "in_progress": [ + { + "index": ".ds-my-data-stream-2025.01.23-000002", + "total_doc_count": 10000000, + "reindexed_doc_count": 1000 + } + ], + "pending": 1, + "errors": [] +} +---- +// TEST[skip:specific value is part of explanation] + +Let's assume the task has been running for a long time. By default, we throttle how many requests the reindex operation +can execute per second. This keeps the reindex process from consuming too many resources. +But the default value of `1000` request per second will not be correct for all use cases. +The <> +can be used to increase or decrease the number of requests per second, or to remove the throttle entirely. + +Changing this setting won't have an effect on the backing index that is currently being reindexed. +For example, changing the setting won't have an effect on `.ds-my-data-stream-2025.01.23-000002`, but would have an +effect on the next backing index. + +But in the above status, `.ds-my-data-stream-2025.01.23-000002` has values of 1000 and 10M for the +`reindexed_doc_count` and `total_doc_count`, respectively. This means it has only reindexed 0.01% of the documents in the index. +It might be a good time to cancel the run and optimize some settings without losing much work. +So we call the <>: + +[source,console] +---- +POST /_migration/reindex/my-data-stream/_cancel +---- +// TEST[skip:task will not be present] + +Now we can use the <> to increase the throttle: + +[source,console] +-------------------------------------------------- +PUT /_cluster/settings +{ + "persistent" : { + "migrate.data_stream_reindex_max_request_per_second" : 10000 + } +} +-------------------------------------------------- +// TEST[continued] + +The <> can now be used to restart reindexing. +Because the first backing index, `.ds-my-data-stream-2025.01.23-000001`, has already been reindexed and thus is already version 8.x, +it will be skipped. The task will start by reindexing `.ds-my-data-stream-2025.01.23-000002` again from the beginning. + +Later, once all the backing indices have finished, the <> will return something like the following: + +[source,console-result] +---- +{ + "start_time_millis": 1737676174349, + "complete": true, + "total_indices_in_data_stream": 4, + "total_indices_requiring_upgrade": 2, + "successes": 2, + "in_progress": [], + "pending": 0, + "errors": [] +} +---- +// TEST[skip:specific value is part of explanation] + +Notice that the value of `total_indices_requiring_upgrade` is `2`, unlike the previous status, which had a value of `3`. +This is because `.ds-my-data-stream-2025.01.23-000001` was upgraded before the task cancellation. +After the restart, the API sees that it does not need to be upgraded, thus does not include it in `total_indices_requiring_upgrade` or `successes`, +despite the fact that it upgraded successfully. + +The completed status will be accessible from the status API for 24 hours after completion of the task. + +We can now check the data stream to verify that indices were upgraded: + +[source,console] +---- +GET _data_stream/my-data-stream?filter_path=data_streams.indices.index_name +---- +// TEST[continued] + + +which returns: +[source,console-result] +---- +{ + "data_streams": [ + { + "indices": [ + { + "index_name": ".migrated-ds-my-data-stream-2025.01.23-000003" + }, + { + "index_name": ".migrated-ds-my-data-stream-2025.01.23-000002" + }, + { + "index_name": ".migrated-ds-my-data-stream-2025.01.23-000001" + }, + { + "index_name": ".ds-my-data-stream-2025.01.23-000004" + } + ] + } + ] +} +---- +// TEST[skip:did not actually run reindex] + +Index `.ds-my-data-stream-2025.01.23-000004` is the write index and didn't need to be upgraded because it was created with version 8.x. +The other three backing indices are now prefixed with `.migrated` because they have been upgraded. + +We can now check the indices and verify that they have version 8.x: +[source,console] +---- +GET .migrated-ds-my-data-stream-2025.01.23-000001?human&filter_path=*.settings.index.version.created_string +---- +// TEST[skip:migrated index does not exist] + +which returns: +[source,console-result] +---- +{ + ".migrated-ds-my-data-stream-2025.01.23-000001": { + "settings": { + "index": { + "version": { + "created_string": "8.18.0" + } + } + } + } +} +---- +// TEST[skip:migrated index does not exist] + +[[reindex-data-stream-handling-failure]] +===== Handling Failures +Since the reindex data stream API runs in the background, failure information can be obtained through the <>. +For example, if the backing index `.ds-my-data-stream-2025.01.23-000002` was accidentally deleted by a user, we would see a status like the following: + +[source,console-result] +---- +{ + "start_time_millis": 1737676174349, + "complete": false, + "total_indices_in_data_stream": 4, + "total_indices_requiring_upgrade": 3, + "successes": 1, + "in_progress": [], + "pending": 1, + "errors": [ + { + "index": ".ds-my-data-stream-2025.01.23-000002", + "message": "index [.ds-my-data-stream-2025.01.23-000002] does not exist" + } + ] +} +---- +// TEST[skip:result just part of explanation] + +Once the issue has been fixed, the failed reindex task can be re-run. First, the failed run's status must be cleared +using the <>. Then the +<> can be called to pick up where it left off. diff --git a/docs/reference/migration/migration.asciidoc b/docs/reference/migration/migration.asciidoc index 57b6c88aefea4..850d1b1edd05d 100644 --- a/docs/reference/migration/migration.asciidoc +++ b/docs/reference/migration/migration.asciidoc @@ -14,6 +14,14 @@ include::apis/shared-migration-apis-tip.asciidoc[] * <> * <> +* <> +* <> +* <> +* <> include::apis/deprecation.asciidoc[] include::apis/feature-migration.asciidoc[] +include::apis/data-stream-reindex.asciidoc[] +include::apis/data-stream-reindex-status.asciidoc[] +include::apis/data-stream-reindex-cancel.asciidoc[] +include::apis/create-index-from-source.asciidoc[] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.cancel_migrate_reindex.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.cancel_migrate_reindex.json index 4b5f53c275d79..31cff458e357a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.cancel_migrate_reindex.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.cancel_migrate_reindex.json @@ -1,7 +1,7 @@ { "indices.cancel_migrate_reindex":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex.html", + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex-cancel-api.html", "description":"This API returns the status of a migration reindex attempt for a data stream or index" }, "stability":"experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_from.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_from.json index 05d64598dd1ee..435430b7a2673 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_from.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create_from.json @@ -1,7 +1,7 @@ { "indices.create_from":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex.html", + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-create-index-from-source.html", "description":"This API creates a destination from a source index. It copies the mappings and settings from the source index while allowing request settings and mappings to override the source values." }, "stability":"experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_migrate_reindex_status.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_migrate_reindex_status.json index 12151321ac827..d166f3e99197b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_migrate_reindex_status.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_migrate_reindex_status.json @@ -1,7 +1,7 @@ { "indices.get_migrate_reindex_status":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex.html", + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex-status-api.html", "description":"This API returns the status of a migration reindex attempt for a data stream or index" }, "stability":"experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_reindex.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_reindex.json index 66a73c6438142..28cef97c7360a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_reindex.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.migrate_reindex.json @@ -1,7 +1,7 @@ { "indices.migrate_reindex":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex.html", + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex-api.html", "description":"This API reindexes all legacy backing indices for a data stream. It does this in a persistent task. The persistent task id is returned immediately, and the reindexing work is completed in that task" }, "stability":"experimental", From 26b23d88aa77b43a48cd6a121519c5abc136818f Mon Sep 17 00:00:00 2001 From: John Verwolf Date: Tue, 28 Jan 2025 17:49:38 -0800 Subject: [PATCH 174/383] Revert "Reduce Data Loss in System Indices Migration" (#121119) --- docs/changelog/120168.yaml | 5 - docs/changelog/121119.yaml | 5 + .../AbstractFeatureMigrationIntegTest.java | 37 +------- .../migration/FeatureMigrationIT.java | 59 ------------ .../indices/alias/IndicesAliasesResponse.java | 18 ---- .../upgrades/SystemIndexMigrator.java | 92 ++++++------------- 6 files changed, 36 insertions(+), 180 deletions(-) delete mode 100644 docs/changelog/120168.yaml create mode 100644 docs/changelog/121119.yaml diff --git a/docs/changelog/120168.yaml b/docs/changelog/120168.yaml deleted file mode 100644 index d4bb321895160..0000000000000 --- a/docs/changelog/120168.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 120168 -summary: Reduce Data Loss in System Indices Migration -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/121119.yaml b/docs/changelog/121119.yaml new file mode 100644 index 0000000000000..ad05011affbb5 --- /dev/null +++ b/docs/changelog/121119.yaml @@ -0,0 +1,5 @@ +pr: 121119 +summary: Revert "Reduce Data Loss in System Indices Migration" +area: Infra/Core +type: bug +issues: [] diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java index 84e45024b69ff..860d63000f124 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/AbstractFeatureMigrationIntegTest.java @@ -9,17 +9,14 @@ package org.elasticsearch.migration; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgradeStatusAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; @@ -31,7 +28,6 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.AssociatedIndexDescriptor; import org.elasticsearch.indices.SystemIndexDescriptor; -import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.SystemIndexPlugin; @@ -54,10 +50,6 @@ import java.util.function.BiConsumer; import java.util.function.Function; -import static java.util.Collections.emptySet; -import static java.util.Collections.singletonList; -import static java.util.Collections.unmodifiableSet; -import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; @@ -263,18 +255,12 @@ protected void assertIndexHasCorrectProperties( assertThat(thisIndexStats.getTotal().getDocs().getCount(), is((long) INDEX_DOC_COUNT)); } - public static class TestPlugin extends Plugin implements SystemIndexPlugin, ActionPlugin { + public static class TestPlugin extends Plugin implements SystemIndexPlugin { public final AtomicReference>> preMigrationHook = new AtomicReference<>(); public final AtomicReference>> postMigrationHook = new AtomicReference<>(); - private final BlockingActionFilter blockingActionFilter; public TestPlugin() { - blockingActionFilter = new BlockingActionFilter(); - } - @Override - public List getActionFilters() { - return singletonList(blockingActionFilter); } @Override @@ -313,26 +299,5 @@ public void indicesMigrationComplete( postMigrationHook.get().accept(clusterService.state(), preUpgradeMetadata); listener.onResponse(true); } - - public static class BlockingActionFilter extends org.elasticsearch.action.support.ActionFilter.Simple { - private Set blockedActions = emptySet(); - - @Override - protected boolean apply(String action, ActionRequest request, ActionListener listener) { - if (blockedActions.contains(action)) { - throw new ElasticsearchException("force exception on [" + action + "]"); - } - return true; - } - - @Override - public int order() { - return 0; - } - - public void blockActions(String... actions) { - blockedActions = unmodifiableSet(newHashSet(actions)); - } - } } } diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java index ee95ce5513820..cdf817a6b17b8 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java @@ -17,14 +17,11 @@ import org.elasticsearch.action.admin.cluster.migration.PostFeatureUpgradeRequest; import org.elasticsearch.action.admin.cluster.migration.PostFeatureUpgradeResponse; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.template.put.PutComponentTemplateAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.support.ActionFilter; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -39,12 +36,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.SystemIndexDescriptor; -import org.elasticsearch.migration.AbstractFeatureMigrationIntegTest.TestPlugin.BlockingActionFilter; import org.elasticsearch.painless.PainlessPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.reindex.ReindexPlugin; -import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.upgrades.FeatureMigrationResults; import org.elasticsearch.upgrades.SingleFeatureMigrationResult; @@ -277,60 +272,6 @@ public void testMigrateIndexWithWriteBlock() throws Exception { }); } - @AwaitsFix(bugUrl = "ES-10666") // This test uncovered an existing issue - public void testIndexBlockIsRemovedWhenAliasRequestFails() throws Exception { - createSystemIndexForDescriptor(INTERNAL_UNMANAGED); - ensureGreen(); - - // Block the alias request to simulate a failure - InternalTestCluster internalTestCluster = internalCluster(); - ActionFilters actionFilters = internalTestCluster.getInstance(ActionFilters.class, internalTestCluster.getMasterName()); - BlockingActionFilter blockingActionFilter = null; - for (ActionFilter filter : actionFilters.filters()) { - if (filter instanceof BlockingActionFilter) { - blockingActionFilter = (BlockingActionFilter) filter; - break; - } - } - assertNotNull("BlockingActionFilter should exist", blockingActionFilter); - blockingActionFilter.blockActions(TransportIndicesAliasesAction.NAME); - - // Start the migration - client().execute(PostFeatureUpgradeAction.INSTANCE, new PostFeatureUpgradeRequest(TEST_REQUEST_TIMEOUT)).get(); - - // Wait till the migration fails - assertBusy(() -> { - GetFeatureUpgradeStatusResponse statusResp = client().execute( - GetFeatureUpgradeStatusAction.INSTANCE, - new GetFeatureUpgradeStatusRequest(TEST_REQUEST_TIMEOUT) - ).get(); - logger.info(Strings.toString(statusResp)); - assertThat(statusResp.getUpgradeStatus(), equalTo(GetFeatureUpgradeStatusResponse.UpgradeStatus.ERROR)); - }); - - // Get the settings to see if the write block was removed - var allsettings = client().admin().indices().prepareGetSettings(INTERNAL_UNMANAGED.getIndexPattern()).get().getIndexToSettings(); - var internalUnmanagedOldIndexSettings = allsettings.get(".int-unman-old"); - var writeBlock = internalUnmanagedOldIndexSettings.get(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey()); - assertThat("Write block on old index should be removed on migration ERROR status", writeBlock, equalTo("false")); - - // Unblock the alias request - blockingActionFilter.blockActions(); - - // Retry the migration - client().execute(PostFeatureUpgradeAction.INSTANCE, new PostFeatureUpgradeRequest(TEST_REQUEST_TIMEOUT)).get(); - - // Ensure that the migration is successful after the alias request is unblocked - assertBusy(() -> { - GetFeatureUpgradeStatusResponse statusResp = client().execute( - GetFeatureUpgradeStatusAction.INSTANCE, - new GetFeatureUpgradeStatusRequest(TEST_REQUEST_TIMEOUT) - ).get(); - logger.info(Strings.toString(statusResp)); - assertThat(statusResp.getUpgradeStatus(), equalTo(GetFeatureUpgradeStatusResponse.UpgradeStatus.NO_MIGRATION_NEEDED)); - }); - } - public void testMigrationWillRunAfterError() throws Exception { createSystemIndexForDescriptor(INTERNAL_MANAGED); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java index 071e9b42752c0..69ab9f57d2be7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java @@ -77,17 +77,6 @@ public boolean hasErrors() { return errors; } - /** - * Get a list of all errors from the response. If there are no errors, an empty list is returned. - */ - public List getErrors() { - if (errors == false) { - return List.of(); - } else { - return actionResults.stream().filter(a -> a.getError() != null).map(AliasActionResult::getError).toList(); - } - } - /** * Build a response from a list of action results. Sets the errors boolean based * on whether an of the individual results contain an error. @@ -176,13 +165,6 @@ public static AliasActionResult buildSuccess(List indices, AliasActions return new AliasActionResult(indices, action, null); } - /** - * The error result if the action failed, null if the action succeeded. - */ - public ElasticsearchException getError() { - return error; - } - private int getStatus() { return error == null ? 200 : error.status().getStatus(); } diff --git a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java index cdd466c567e8b..186618f3662fb 100644 --- a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java +++ b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java @@ -15,9 +15,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; -import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsClusterStateUpdateRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -34,6 +32,7 @@ import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.metadata.MetadataUpdateSettingsService; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -60,7 +59,6 @@ import java.util.stream.Collectors; import static org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgradeStatusAction.NO_UPGRADE_REQUIRED_INDEX_VERSION; -import static org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock.WRITE; import static org.elasticsearch.cluster.metadata.IndexMetadata.State.CLOSE; import static org.elasticsearch.core.Strings.format; @@ -450,33 +448,12 @@ private void migrateSingleIndex(ClusterState clusterState, Consumer { - if (aliasesResponse.hasErrors()) { - var e = new ElasticsearchException("Aliases request had errors"); - for (var error : aliasesResponse.getErrors()) { - e.addSuppressed(error); - } - throw e; - } - logger.info( - "Successfully migrated old index [{}] to new index [{}] from feature [{}]", - oldIndexName, - migrationInfo.getNextIndexName(), - migrationInfo.getFeatureName() - ); - delegate2.onResponse(bulkByScrollResponse); - }, e -> { - logger.error( - () -> format( - "An error occurred while changing aliases and removing the old index [%s] from feature [%s]", - oldIndexName, - migrationInfo.getFeatureName() - ), - e - ); - removeReadOnlyBlockOnReindexFailure(oldIndex, delegate2, e); - })); + // Successful completion of reindexing - remove read only and delete old index + setWriteBlock( + oldIndex, + false, + delegate2.delegateFailureAndWrap(setAliasAndRemoveOldIndex(migrationInfo, bulkByScrollResponse)) + ); } }, e -> { logger.error( @@ -534,7 +511,10 @@ private void createIndex(SystemIndexMigrationInfo migrationInfo, ActionListener< ); } - private void setAliasAndRemoveOldIndex(SystemIndexMigrationInfo migrationInfo, ActionListener listener) { + private CheckedBiConsumer, AcknowledgedResponse, Exception> setAliasAndRemoveOldIndex( + SystemIndexMigrationInfo migrationInfo, + BulkByScrollResponse bulkByScrollResponse + ) { final IndicesAliasesRequestBuilder aliasesRequest = migrationInfo.createClient(baseClient).admin().indices().prepareAliases(); aliasesRequest.removeIndex(migrationInfo.getCurrentIndexName()); aliasesRequest.addAlias(migrationInfo.getNextIndexName(), migrationInfo.getCurrentIndexName()); @@ -553,42 +533,30 @@ private void setAliasAndRemoveOldIndex(SystemIndexMigrationInfo migrationInfo, A ); }); - aliasesRequest.execute(listener); + // Technically this callback might have a different cluster state, but it shouldn't matter - these indices shouldn't be changing + // while we're trying to migrate them. + return (listener, unsetReadOnlyResponse) -> aliasesRequest.execute( + listener.delegateFailureAndWrap((l, deleteIndexResponse) -> l.onResponse(bulkByScrollResponse)) + ); } /** - * Sets the write block on the index to the given value. + * Makes the index readonly if it's not set as a readonly yet */ private void setWriteBlock(Index index, boolean readOnlyValue, ActionListener listener) { - if (readOnlyValue) { - // Setting the Block with an AddIndexBlockRequest ensures all shards have accounted for the block and all - // in-flight writes are completed before returning. - baseClient.admin() - .indices() - .addBlock( - new AddIndexBlockRequest(WRITE, index.getName()).masterNodeTimeout(MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT), - listener.delegateFailureAndWrap((l, response) -> { - if (response.isAcknowledged() == false) { - throw new ElasticsearchException("Failed to acknowledge read-only block index request"); - } - l.onResponse(response); - }) - ); - } else { - // The only way to remove a Block is via a settings update. - final Settings readOnlySettings = Settings.builder().put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), false).build(); - metadataUpdateSettingsService.updateSettings( - new UpdateSettingsClusterStateUpdateRequest( - MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT, - TimeValue.ZERO, - readOnlySettings, - UpdateSettingsClusterStateUpdateRequest.OnExisting.OVERWRITE, - UpdateSettingsClusterStateUpdateRequest.OnStaticSetting.REJECT, - index - ), - listener - ); - } + final Settings readOnlySettings = Settings.builder().put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), readOnlyValue).build(); + + metadataUpdateSettingsService.updateSettings( + new UpdateSettingsClusterStateUpdateRequest( + MasterNodeRequest.INFINITE_MASTER_NODE_TIMEOUT, + TimeValue.ZERO, + readOnlySettings, + UpdateSettingsClusterStateUpdateRequest.OnExisting.OVERWRITE, + UpdateSettingsClusterStateUpdateRequest.OnStaticSetting.REJECT, + index + ), + listener + ); } private void reindex(SystemIndexMigrationInfo migrationInfo, ActionListener listener) { From be4f82d3d87041266d63074bb0baa739fe8b2612 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 28 Jan 2025 22:25:32 -0500 Subject: [PATCH 175/383] Revert "Mute org.elasticsearch.ingest.geoip.FullClusterRestartIT testGeoIpSystemFeaturesMigration {cluster=UPGRADED} #121115" (#121122) This reverts commit 3df200384f424558c7f41bf82e6202a3cdb2a4f7. --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 3c4c555393957..c0cbd07db403d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -293,9 +293,6 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=nodes.stats/11_indices_metrics/indices mappings exact count test for indices level} issue: https://github.com/elastic/elasticsearch/issues/120950 -- class: org.elasticsearch.ingest.geoip.FullClusterRestartIT - method: testGeoIpSystemFeaturesMigration {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/121115 # Examples: # From 30f35e337283cb56345388044a792e2b10d4b05f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 29 Jan 2025 17:32:29 +1100 Subject: [PATCH 176/383] Mute org.elasticsearch.xpack.shutdown.AllocationFailuresResetOnShutdownIT testResetAllocationFailuresOnNodeShutdown #121129 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c0cbd07db403d..d3ddc4559a9df 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -293,6 +293,9 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=nodes.stats/11_indices_metrics/indices mappings exact count test for indices level} issue: https://github.com/elastic/elasticsearch/issues/120950 +- class: org.elasticsearch.xpack.shutdown.AllocationFailuresResetOnShutdownIT + method: testResetAllocationFailuresOnNodeShutdown + issue: https://github.com/elastic/elasticsearch/issues/121129 # Examples: # From 5c5c2b1d148d69799ea54aee2e32c254c5c8bacd Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 29 Jan 2025 17:47:40 +1100 Subject: [PATCH 177/383] Mute org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests testActivateProfileForJWT #120983 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index d3ddc4559a9df..19efcabf49273 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -296,6 +296,9 @@ tests: - class: org.elasticsearch.xpack.shutdown.AllocationFailuresResetOnShutdownIT method: testResetAllocationFailuresOnNodeShutdown issue: https://github.com/elastic/elasticsearch/issues/121129 +- class: org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests + method: testActivateProfileForJWT + issue: https://github.com/elastic/elasticsearch/issues/120983 # Examples: # From 523e9e0ce177c2a8951f268791d11c4a61279661 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 29 Jan 2025 17:57:53 +1100 Subject: [PATCH 178/383] Mute org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT test {yaml=cluster.health/20_request_timeout/cluster health request timeout waiting for active shards} #121130 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 19efcabf49273..8d6d493848fd1 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -299,6 +299,9 @@ tests: - class: org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests method: testActivateProfileForJWT issue: https://github.com/elastic/elasticsearch/issues/120983 +- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT + method: test {yaml=cluster.health/20_request_timeout/cluster health request timeout waiting for active shards} + issue: https://github.com/elastic/elasticsearch/issues/121130 # Examples: # From 8185cafaf26b40a9791850cd240ba3df20fee42d Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 29 Jan 2025 08:48:53 +0100 Subject: [PATCH 179/383] Emit deprecation warning when executing one of the rollup APIs (#113131) Relates to #112690 --- docs/build.gradle | 27 ++++++--- docs/changelog/113131.yaml | 10 ++++ .../reference/rollup/apis/delete-job.asciidoc | 1 + docs/reference/rollup/apis/get-job.asciidoc | 2 + docs/reference/rollup/apis/put-job.asciidoc | 1 + .../rollup/apis/rollup-caps.asciidoc | 4 ++ .../rollup/apis/rollup-index-caps.asciidoc | 3 + .../rollup/apis/rollup-search.asciidoc | 4 ++ docs/reference/rollup/apis/start-job.asciidoc | 3 +- docs/reference/rollup/apis/stop-job.asciidoc | 1 + .../rollup/migrating-to-downsampling.asciidoc | 1 + .../rollup/rollup-getting-started.asciidoc | 5 ++ .../rollup/rollup-search-limitations.asciidoc | 1 + .../test/rest/ESRestTestCase.java | 20 ++++++- x-pack/plugin/build.gradle | 2 + .../ml/integration/DatafeedJobsRestIT.java | 18 ++++-- .../elasticsearch/xpack/rollup/Rollup.java | 4 ++ .../TransportDeleteRollupJobAction.java | 9 ++- .../action/TransportGetRollupCapsAction.java | 8 +++ .../TransportGetRollupIndexCapsAction.java | 8 +++ .../action/TransportGetRollupJobAction.java | 8 +++ .../action/TransportPutRollupJobAction.java | 7 ++- .../action/TransportRollupSearchAction.java | 7 +++ .../action/TransportStartRollupAction.java | 14 +++++ .../action/TransportStopRollupAction.java | 8 +++ .../10_data_stream_resolvability.yml | 4 ++ .../rest-api-spec/test/rollup/delete_job.yml | 29 +++++++++- .../rest-api-spec/test/rollup/get_jobs.yml | 10 +++- .../test/rollup/get_rollup_caps.yml | 16 ++++- .../test/rollup/get_rollup_index_caps.yml | 32 +++++++++- .../rest-api-spec/test/rollup/put_job.yml | 23 ++++++++ .../test/rollup/rollup_search.yml | 58 ++++++++++++++++++- .../rest-api-spec/test/rollup/start_job.yml | 10 +++- .../rest-api-spec/test/rollup/stop_job.yml | 22 ++++++- .../xpack/restart/FullClusterRestartIT.java | 5 +- .../elasticsearch/multi_node/RollupIT.java | 8 +++ 36 files changed, 366 insertions(+), 27 deletions(-) create mode 100644 docs/changelog/113131.yaml diff --git a/docs/build.gradle b/docs/build.gradle index cdb879485ae3c..505bf2fb1ddfb 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -855,6 +855,9 @@ buildRestTests.setups['library'] = ''' ''' buildRestTests.setups['sensor_rollup_job'] = ''' + - requires: + test_runner_features: [ "allowed_warnings" ] + - do: indices.create: index: dummy-rollup-index @@ -885,9 +888,10 @@ buildRestTests.setups['sensor_rollup_job'] = ''' node: type: keyword - do: - raw: - method: PUT - path: _rollup/job/sensor + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." + rollup.put_job: + id: sensor body: > { "index_pattern": "sensor-*", @@ -917,6 +921,9 @@ buildRestTests.setups['sensor_rollup_job'] = ''' } ''' buildRestTests.setups['sensor_started_rollup_job'] = ''' + - requires: + test_runner_features: [ "allowed_warnings" ] + - do: indices.create: index: dummy-rollup-index @@ -966,9 +973,10 @@ buildRestTests.setups['sensor_started_rollup_job'] = ''' {"timestamp": 1516297294000, "temperature": 202, "voltage": 4.0, "node": "c"} - do: - raw: - method: PUT - path: _rollup/job/sensor + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." + rollup.put_job: + id: sensor body: > { "index_pattern": "sensor-*", @@ -997,9 +1005,10 @@ buildRestTests.setups['sensor_started_rollup_job'] = ''' ] } - do: - raw: - method: POST - path: _rollup/job/sensor/_start + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." + rollup.start_job: + id: sensor ''' buildRestTests.setups['sensor_index'] = ''' diff --git a/docs/changelog/113131.yaml b/docs/changelog/113131.yaml new file mode 100644 index 0000000000000..684ec2e64fb5b --- /dev/null +++ b/docs/changelog/113131.yaml @@ -0,0 +1,10 @@ +pr: 113131 +summary: Emit deprecation warning when executing one of the rollup APIs +area: Rollup +type: deprecation +issues: [] +deprecation: + title: Emit deprecation warning when executing one of the rollup APIs + area: Rollup + details: Rollup is already deprecated since 8.11.0 via documentation and since 8.15.0 it is no longer possible to create new rollup jobs in clusters without rollup usage. This change updates the rollup APIs to emit a deprecation warning. + impact: Returning a deprecation warning when using one of the rollup APIs. diff --git a/docs/reference/rollup/apis/delete-job.asciidoc b/docs/reference/rollup/apis/delete-job.asciidoc index 03f5349e15d4f..59d4aa9b395db 100644 --- a/docs/reference/rollup/apis/delete-job.asciidoc +++ b/docs/reference/rollup/apis/delete-job.asciidoc @@ -86,6 +86,7 @@ If we have a rollup job named `sensor`, it can be deleted with: DELETE _rollup/job/sensor -------------------------------------------------- // TEST[setup:sensor_rollup_job] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] Which will return the response: diff --git a/docs/reference/rollup/apis/get-job.asciidoc b/docs/reference/rollup/apis/get-job.asciidoc index 9fff4d665f5fd..6138be8a015a5 100644 --- a/docs/reference/rollup/apis/get-job.asciidoc +++ b/docs/reference/rollup/apis/get-job.asciidoc @@ -95,6 +95,7 @@ job can be retrieved with: GET _rollup/job/sensor -------------------------------------------------- // TEST[setup:sensor_rollup_job] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] The API yields the following response: @@ -198,6 +199,7 @@ PUT _rollup/job/sensor2 <1> GET _rollup/job/_all <2> -------------------------------------------------- // TEST[setup:sensor_rollup_job] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] <1> We create a second job with name `sensor2` <2> Then request all jobs by using `_all` in the GetJobs API diff --git a/docs/reference/rollup/apis/put-job.asciidoc b/docs/reference/rollup/apis/put-job.asciidoc index a60f20a3de5bf..0aed61f629156 100644 --- a/docs/reference/rollup/apis/put-job.asciidoc +++ b/docs/reference/rollup/apis/put-job.asciidoc @@ -287,6 +287,7 @@ PUT _rollup/job/sensor } -------------------------------------------------- // TEST[setup:sensor_index] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] <1> This configuration enables date histograms to be used on the `timestamp` field and `terms` aggregations to be used on the `node` field. <2> This configuration defines metrics over two fields: `temperature` and diff --git a/docs/reference/rollup/apis/rollup-caps.asciidoc b/docs/reference/rollup/apis/rollup-caps.asciidoc index be1c3ed171a23..68fc5b0b20b5f 100644 --- a/docs/reference/rollup/apis/rollup-caps.asciidoc +++ b/docs/reference/rollup/apis/rollup-caps.asciidoc @@ -89,6 +89,7 @@ PUT _rollup/job/sensor } -------------------------------------------------- // TEST[setup:sensor_index] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] We can then retrieve the rollup capabilities of that index pattern (`sensor-*`) via the following command: @@ -98,6 +99,7 @@ via the following command: GET _rollup/data/sensor-* -------------------------------------------------- // TEST[continued] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] Which will yield the following response: @@ -170,6 +172,7 @@ We could also retrieve the same information with a request to `_all`: GET _rollup/data/_all -------------------------------------------------- // TEST[continued] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] But note that if we use the concrete index name (`sensor-1`), we'll retrieve no rollup capabilities: @@ -179,6 +182,7 @@ rollup capabilities: GET _rollup/data/sensor-1 -------------------------------------------------- // TEST[continued] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] [source,console-result] ---- diff --git a/docs/reference/rollup/apis/rollup-index-caps.asciidoc b/docs/reference/rollup/apis/rollup-index-caps.asciidoc index 830cc332e8f40..e9d8e5c886e5a 100644 --- a/docs/reference/rollup/apis/rollup-index-caps.asciidoc +++ b/docs/reference/rollup/apis/rollup-index-caps.asciidoc @@ -86,6 +86,7 @@ PUT _rollup/job/sensor } -------------------------------------------------- // TEST[setup:sensor_index] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] If at a later date, we'd like to determine what jobs and capabilities were stored in the `sensor_rollup` index, we can use the get rollup index API: @@ -95,6 +96,7 @@ stored in the `sensor_rollup` index, we can use the get rollup index API: GET /sensor_rollup/_rollup/data -------------------------------------------------- // TEST[continued] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] Note how we are requesting the concrete rollup index name (`sensor_rollup`) as the first part of the URL. This will yield the following response: @@ -170,3 +172,4 @@ instead of explicit indices: GET /*_rollup/_rollup/data -------------------------------------------------- // TEST[continued] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] diff --git a/docs/reference/rollup/apis/rollup-search.asciidoc b/docs/reference/rollup/apis/rollup-search.asciidoc index 088a74973806b..135fa79e8a375 100644 --- a/docs/reference/rollup/apis/rollup-search.asciidoc +++ b/docs/reference/rollup/apis/rollup-search.asciidoc @@ -111,6 +111,7 @@ PUT _rollup/job/sensor } -------------------------------------------------- // TEST[setup:sensor_index] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] This rolls up the `sensor-*` pattern and stores the results in `sensor_rollup`. To search this rolled up data, we need to use the `_rollup_search` endpoint. @@ -133,6 +134,7 @@ GET /sensor_rollup/_rollup_search -------------------------------------------------- // TEST[setup:sensor_prefab_data] // TEST[s/_rollup_search/_rollup_search?filter_path=took,timed_out,terminated_early,_shards,hits,aggregations/] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] The query is targeting the `sensor_rollup` data, since this contains the rollup data as configured in the job. A `max` aggregation has been used on the @@ -188,6 +190,7 @@ GET sensor_rollup/_rollup_search -------------------------------------------------- // TEST[continued] // TEST[catch:/illegal_argument_exception/] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] [source,console-result] ---- @@ -231,6 +234,7 @@ GET sensor-1,sensor_rollup/_rollup_search <1> -------------------------------------------------- // TEST[continued] // TEST[s/_rollup_search/_rollup_search?filter_path=took,timed_out,terminated_early,_shards,hits,aggregations/] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] <1> Note the URI now searches `sensor-1` and `sensor_rollup` at the same time When the search is executed, the rollup search endpoint does two things: diff --git a/docs/reference/rollup/apis/start-job.asciidoc b/docs/reference/rollup/apis/start-job.asciidoc index dbeed8b09d1c8..69cdc62f9640a 100644 --- a/docs/reference/rollup/apis/start-job.asciidoc +++ b/docs/reference/rollup/apis/start-job.asciidoc @@ -57,6 +57,7 @@ If we have already created a {rollup-job} named `sensor`, it can be started with POST _rollup/job/sensor/_start -------------------------------------------------- // TEST[setup:sensor_rollup_job] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] Which will return the response: @@ -65,4 +66,4 @@ Which will return the response: { "started": true } ----- \ No newline at end of file +---- diff --git a/docs/reference/rollup/apis/stop-job.asciidoc b/docs/reference/rollup/apis/stop-job.asciidoc index 8c0fd6ab2f3af..4d80b56667b83 100644 --- a/docs/reference/rollup/apis/stop-job.asciidoc +++ b/docs/reference/rollup/apis/stop-job.asciidoc @@ -82,6 +82,7 @@ the indexer has fully stopped. This is accomplished with the POST _rollup/job/sensor/_stop?wait_for_completion=true&timeout=10s -------------------------------------------------- // TEST[setup:sensor_started_rollup_job] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] The parameter blocks the API call from returning until either the job has moved to `STOPPED` or the specified time has elapsed. If the specified time elapses diff --git a/docs/reference/rollup/migrating-to-downsampling.asciidoc b/docs/reference/rollup/migrating-to-downsampling.asciidoc index de0089230cae2..995d2418b52a6 100644 --- a/docs/reference/rollup/migrating-to-downsampling.asciidoc +++ b/docs/reference/rollup/migrating-to-downsampling.asciidoc @@ -51,6 +51,7 @@ PUT _rollup/job/sensor } -------------------------------------------------- // TEST[setup:sensor_index] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] The equivalent <> setup that uses downsampling via DSL: diff --git a/docs/reference/rollup/rollup-getting-started.asciidoc b/docs/reference/rollup/rollup-getting-started.asciidoc index a2b3956c47f79..23288618e11f9 100644 --- a/docs/reference/rollup/rollup-getting-started.asciidoc +++ b/docs/reference/rollup/rollup-getting-started.asciidoc @@ -64,6 +64,7 @@ PUT _rollup/job/sensor } -------------------------------------------------- // TEST[setup:sensor_index] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] We give the job the ID of "sensor" (in the url: `PUT _rollup/job/sensor`), and tell it to rollup the index pattern `"sensor-*"`. This job will find and rollup any index that matches that pattern. Rollup summaries are then stored in the `"sensor_rollup"` index. @@ -143,6 +144,7 @@ To start the job, execute this command: POST _rollup/job/sensor/_start -------------------------------------------------- // TEST[setup:sensor_rollup_job] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] [discrete] ==== Searching the rolled results @@ -167,6 +169,7 @@ GET /sensor_rollup/_rollup_search } -------------------------------------------------- // TEST[setup:sensor_prefab_data] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] It's a simple aggregation that calculates the maximum of the `temperature` field. But you'll notice that it is being sent to the `sensor_rollup` index instead of the raw `sensor-*` indices. And you'll also notice that it is using the `_rollup_search` endpoint. Otherwise the syntax @@ -198,6 +201,7 @@ If you were to execute that query, you'd receive a result that looks like a norm ---- // TESTRESPONSE[s/"took" : 102/"took" : $body.$_path/] // TESTRESPONSE[s/"_shards" : \.\.\. /"_shards" : $body.$_path/] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] The only notable difference is that Rollup search results have zero `hits`, because we aren't really searching the original, live data any more. Otherwise it's identical syntax. @@ -244,6 +248,7 @@ GET /sensor_rollup/_rollup_search } -------------------------------------------------- // TEST[setup:sensor_prefab_data] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] Which returns a corresponding response: diff --git a/docs/reference/rollup/rollup-search-limitations.asciidoc b/docs/reference/rollup/rollup-search-limitations.asciidoc index bce90454a19ce..9135716d5eeb6 100644 --- a/docs/reference/rollup/rollup-search-limitations.asciidoc +++ b/docs/reference/rollup/rollup-search-limitations.asciidoc @@ -56,6 +56,7 @@ GET sensor_rollup/_rollup_search -------------------------------------------------- // TEST[setup:sensor_prefab_data] // TEST[catch:/illegal_argument_exception/] +// TEST[warning:The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information.] The response will tell you that the field and aggregation were not possible, because no rollup jobs were found which contained them: diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index bedddd4f381f5..5738ab60f47eb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -166,6 +166,20 @@ public abstract class ESRestTestCase extends ESTestCase { private static final Logger SUITE_LOGGER = LogManager.getLogger(ESRestTestCase.class); + private static final String EXPECTED_ROLLUP_WARNING_MESSAGE = + "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information."; + public static final RequestOptions.Builder ROLLUP_REQUESTS_OPTIONS = RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> { + // Either no warning, because of bwc integration test OR + // the expected warning, because on current version + if (warnings.isEmpty()) { + return false; + } else if (warnings.size() == 1 && EXPECTED_ROLLUP_WARNING_MESSAGE.equals(warnings.get(0))) { + return false; + } else { + return true; + } + }); + /** * Convert the entity from a {@link Response} into a map of maps. * Consumes the underlying HttpEntity, releasing any resources it may be holding. @@ -1305,7 +1319,9 @@ private static void wipeClusterSettings() throws IOException { private void wipeRollupJobs() throws IOException { final Response response; try { - response = adminClient().performRequest(new Request("GET", "/_rollup/job/_all")); + var request = new Request("GET", "/_rollup/job/_all"); + request.setOptions(ROLLUP_REQUESTS_OPTIONS); + response = adminClient().performRequest(request); } catch (ResponseException e) { // If we don't see the rollup endpoint (possibly because of running against an older ES version) we just bail if (e.getResponse().getStatusLine().getStatusCode() == RestStatus.NOT_FOUND.getStatus()) { @@ -1325,6 +1341,7 @@ private void wipeRollupJobs() throws IOException { @SuppressWarnings("unchecked") String jobId = (String) ((Map) jobConfig.get("config")).get("id"); Request request = new Request("POST", "/_rollup/job/" + jobId + "/_stop"); + request.setOptions(ROLLUP_REQUESTS_OPTIONS); setIgnoredErrorResponseCodes(request, RestStatus.NOT_FOUND); request.addParameter("wait_for_completion", "true"); request.addParameter("timeout", "10s"); @@ -1336,6 +1353,7 @@ private void wipeRollupJobs() throws IOException { @SuppressWarnings("unchecked") String jobId = (String) ((Map) jobConfig.get("config")).get("id"); Request request = new Request("DELETE", "/_rollup/job/" + jobId); + request.setOptions(ROLLUP_REQUESTS_OPTIONS); setIgnoredErrorResponseCodes(request, RestStatus.NOT_FOUND); // 404s imply someone was racing us to delete this logger.debug("deleting rollup job [{}]", jobId); adminClient().performRequest(request); diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 850dd4bbf0c59..07e4ee9294489 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -102,6 +102,8 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("esql/190_lookup_join/alias-pattern-multiple", "LOOKUP JOIN does not support index aliases for now") task.skipTest("esql/190_lookup_join/alias-pattern-single", "LOOKUP JOIN does not support index aliases for now") task.skipTest("esql/180_match_operator/match with disjunctions", "Disjunctions in full text functions work now") + // Expected deprecation warning to compat yaml tests: + task.addAllowedWarningRegex(".*rollup functionality will be removed in Elasticsearch.*") }) tasks.named('yamlRestCompatTest').configure { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java index 804033ef531b9..52958f40aa268 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java @@ -1297,19 +1297,28 @@ public void testLookbackOnlyGivenAggregationsWithHistogramAndRollupIndex() throw } ] }"""); + createRollupRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); client().performRequest(createRollupRequest); - client().performRequest(new Request("POST", "/_rollup/job/" + rollupJobId + "/_start")); + var startRequest = new Request("POST", "/_rollup/job/" + rollupJobId + "/_start"); + startRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); + client().performRequest(startRequest); assertBusy(() -> { - Response getRollup = client().performRequest(new Request("GET", "/_rollup/job/" + rollupJobId)); + var getRequest = new Request("GET", "/_rollup/job/" + rollupJobId); + getRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); + Response getRollup = client().performRequest(getRequest); String body = EntityUtils.toString(getRollup.getEntity()); assertThat(body, containsString("\"job_state\":\"started\"")); assertThat(body, containsString("\"rollups_indexed\":4")); }, 60, TimeUnit.SECONDS); - client().performRequest(new Request("POST", "/_rollup/job/" + rollupJobId + "/_stop")); + var stopRequest = new Request("POST", "/_rollup/job/" + rollupJobId + "/_stop"); + stopRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); + client().performRequest(stopRequest); assertBusy(() -> { - Response getRollup = client().performRequest(new Request("GET", "/_rollup/job/" + rollupJobId)); + var getRequest = new Request("GET", "/_rollup/job/" + rollupJobId); + getRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); + Response getRollup = client().performRequest(getRequest); assertThat(EntityUtils.toString(getRollup.getEntity()), containsString("\"job_state\":\"stopped\"")); }, 60, TimeUnit.SECONDS); @@ -1826,6 +1835,7 @@ private Response createJobAndDataFeed(String jobId, String datafeedId) throws IO String rollupJobId = "rollup-" + jobId; Request createRollupRequest = new Request("PUT", "/_rollup/job/" + rollupJobId); + createRollupRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); createRollupRequest.setJsonEntity(""" { "index_pattern": "airline-data-aggs", diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java index 7ede898fa0425..965554023643c 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java @@ -70,6 +70,10 @@ public class Rollup extends Plugin implements ActionPlugin, PersistentTaskPlugin { + public static final String DEPRECATION_MESSAGE = + "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information."; + public static final String DEPRECATION_KEY = "rollup_removal"; + // Introduced in ES version 6.3 public static final int ROLLUP_VERSION_V1 = 1; // Introduced in ES Version 6.4 diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java index 0d5a9c86cc3b8..72036f82cdd74 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java @@ -16,6 +16,8 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.injection.guice.Inject; @@ -31,12 +33,17 @@ import java.util.List; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; + public class TransportDeleteRollupJobAction extends TransportTasksAction< RollupJobTask, DeleteRollupJobAction.Request, DeleteRollupJobAction.Response, DeleteRollupJobAction.Response> { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportDeleteRollupJobAction.class); + @Inject public TransportDeleteRollupJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { super( @@ -52,6 +59,7 @@ public TransportDeleteRollupJobAction(TransportService transportService, ActionF @Override protected void doExecute(Task task, DeleteRollupJobAction.Request request, ActionListener listener) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); final ClusterState state = clusterService.state(); final DiscoveryNodes nodes = state.nodes(); @@ -93,7 +101,6 @@ protected void taskOperation( RollupJobTask jobTask, ActionListener listener ) { - assert jobTask.getConfig().getId().equals(request.getId()); IndexerState state = ((RollupJobStatus) jobTask.getStatus()).getIndexerState(); if (state.equals(IndexerState.STOPPED)) { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java index b4e468ac0bffe..39ef1b6f4ea0c 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java @@ -14,6 +14,8 @@ import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -32,8 +34,13 @@ import java.util.concurrent.Executor; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; + public class TransportGetRollupCapsAction extends HandledTransportAction { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportGetRollupCapsAction.class); + private final ClusterService clusterService; private final Executor managementExecutor; @@ -53,6 +60,7 @@ public TransportGetRollupCapsAction(TransportService transportService, ClusterSe @Override protected void doExecute(Task task, GetRollupCapsAction.Request request, ActionListener listener) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); // Workaround for https://github.com/elastic/elasticsearch/issues/97916 - TODO remove this when we can managementExecutor.execute(ActionRunnable.wrap(listener, l -> doExecuteForked(request.getIndexPattern(), l))); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java index c2a81c6bb16ef..062d06a71c10a 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java @@ -14,6 +14,8 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -32,10 +34,15 @@ import java.util.concurrent.Executor; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; + public class TransportGetRollupIndexCapsAction extends HandledTransportAction< GetRollupIndexCapsAction.Request, GetRollupIndexCapsAction.Response> { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportGetRollupCapsAction.class); + private final ClusterService clusterService; private final IndexNameExpressionResolver resolver; private final Executor managementExecutor; @@ -66,6 +73,7 @@ protected void doExecute( GetRollupIndexCapsAction.Request request, ActionListener listener ) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); // Workaround for https://github.com/elastic/elasticsearch/issues/97916 - TODO remove this when we can managementExecutor.execute(ActionRunnable.wrap(listener, l -> doExecuteForked(request, l))); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java index 12cea1c305020..e52a595c0a1f9 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java @@ -16,6 +16,8 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.injection.guice.Inject; @@ -34,12 +36,17 @@ import java.util.List; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; + public class TransportGetRollupJobAction extends TransportTasksAction< RollupJobTask, GetRollupJobsAction.Request, GetRollupJobsAction.Response, GetRollupJobsAction.Response> { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportGetRollupCapsAction.class); + @Inject public TransportGetRollupJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { super( @@ -55,6 +62,7 @@ public TransportGetRollupJobAction(TransportService transportService, ActionFilt @Override protected void doExecute(Task task, GetRollupJobsAction.Request request, ActionListener listener) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); final ClusterState state = clusterService.state(); final DiscoveryNodes nodes = state.nodes(); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java index d124d5014c7e1..a399253512503 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java @@ -63,10 +63,13 @@ import java.util.Set; import static org.elasticsearch.xpack.core.ClientHelper.assertNoAuthorizationHeader; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; public class TransportPutRollupJobAction extends AcknowledgedTransportMasterNodeAction { private static final Logger LOGGER = LogManager.getLogger(TransportPutRollupJobAction.class); + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportPutRollupJobAction.class); private static final XContentParserConfiguration PARSER_CONFIGURATION = XContentParserConfiguration.EMPTY.withFiltering( null, Set.of("_doc._meta._rollup"), @@ -76,7 +79,6 @@ public class TransportPutRollupJobAction extends AcknowledgedTransportMasterNode private final PersistentTasksService persistentTasksService; private final Client client; - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TransportPutRollupJobAction.class); @Inject public TransportPutRollupJobAction( @@ -109,6 +111,7 @@ protected void masterOperation( ClusterState clusterState, ActionListener listener ) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); checkForDeprecatedTZ(request); @@ -150,7 +153,7 @@ static void checkForDeprecatedTZ(PutRollupJobAction.Request request) { String timeZone = request.getConfig().getGroupConfig().getDateHistogram().getTimeZone(); String modernTZ = DateUtils.DEPRECATED_LONG_TIMEZONES.get(timeZone); if (modernTZ != null) { - deprecationLogger.warn( + DEPRECATION_LOGGER.warn( DeprecationCategory.PARSING, "deprecated_timezone", "Creating Rollup job [" diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 34d788d5f094d..c9294c8080421 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -27,6 +27,8 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -73,9 +75,13 @@ import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; public class TransportRollupSearchAction extends TransportAction { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportRollupSearchAction.class); + private final Client client; private final NamedWriteableRegistry registry; private final BigArrays bigArrays; @@ -115,6 +121,7 @@ public TransportRollupSearchAction( @Override protected void doExecute(Task task, SearchRequest request, ActionListener listener) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); String[] indices = resolver.concreteIndexNames(clusterService.state(), request); RollupSearchContext rollupSearchContext = separateIndices(indices, clusterService.state().getMetadata().indices()); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java index 24a04cac40092..aa09fb0cd9f1d 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java @@ -13,21 +13,29 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction; import org.elasticsearch.xpack.rollup.job.RollupJobTask; import java.util.List; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; + public class TransportStartRollupAction extends TransportTasksAction< RollupJobTask, StartRollupJobAction.Request, StartRollupJobAction.Response, StartRollupJobAction.Response> { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportStartRollupAction.class); + @Inject public TransportStartRollupAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { super( @@ -46,6 +54,12 @@ protected List processTasks(StartRollupJobAction.Request request) return TransportTaskHelper.doProcessTasks(request.getId(), taskManager); } + @Override + protected void doExecute(Task task, StartRollupJobAction.Request request, ActionListener listener) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); + super.doExecute(task, request, listener); + } + @Override protected void taskOperation( CancellableTask actionTask, diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStopRollupAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStopRollupAction.java index e4fe926f18feb..833e2dff9485d 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStopRollupAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStopRollupAction.java @@ -14,6 +14,8 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.injection.guice.Inject; @@ -29,12 +31,17 @@ import java.util.List; import java.util.function.BooleanSupplier; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_KEY; +import static org.elasticsearch.xpack.rollup.Rollup.DEPRECATION_MESSAGE; + public class TransportStopRollupAction extends TransportTasksAction< RollupJobTask, StopRollupJobAction.Request, StopRollupJobAction.Response, StopRollupJobAction.Response> { + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportStopRollupAction.class); + private final ThreadPool threadPool; @Inject @@ -63,6 +70,7 @@ protected List processTasks(StopRollupJobAction.Request request) @Override protected void doExecute(Task task, StopRollupJobAction.Request request, ActionListener listener) { + DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE); super.doExecute(task, request, listener); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml index f8c3a4d660fee..289b7ee3f93c6 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml @@ -148,6 +148,8 @@ - do: # Should not raise error + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_index_caps: index: "logs*" - do: @@ -473,6 +475,8 @@ index: simple-data-stream1 - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "simple-data-stream1" body: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/delete_job.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/delete_job.yml index bf1a91b5c81fa..9f23ba791b7b3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/delete_job.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/delete_job.yml @@ -1,6 +1,8 @@ setup: + - requires: + test_runner_features: [ "allowed_warnings" ] - skip: - features: headers + features: ["allowed_warnings", "headers"] - do: indices.create: index: foo @@ -48,11 +50,16 @@ setup: } ] } + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." --- "Test basic delete_job": - + - requires: + test_runner_features: ["allowed_warnings"] - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: foo @@ -93,11 +100,15 @@ setup: job_state: "stopped" - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.delete_job: id: foo - is_true: acknowledged - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: foo - match: @@ -107,6 +118,8 @@ setup: "Test delete job twice": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: foo @@ -147,11 +160,15 @@ setup: job_state: "stopped" - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.delete_job: id: foo - is_true: acknowledged - do: + allowed_warnings: + - The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information. rollup.get_jobs: id: foo - match: @@ -161,6 +178,8 @@ setup: "Test delete running job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: foo @@ -201,11 +220,15 @@ setup: job_state: "stopped" - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.start_job: id: foo - is_true: started - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: request rollup.delete_job: id: foo @@ -217,6 +240,8 @@ setup: "Test delete non-existent job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /the task with id \[does_not_exist\] doesn't exist/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml index ff99c39ef9afc..2f14a8d87954b 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml @@ -1,6 +1,6 @@ setup: - skip: - features: headers + features: ["allowed_warnings", "headers"] - do: indices.create: index: foo @@ -29,6 +29,8 @@ setup: "Test basic get_jobs": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -55,6 +57,8 @@ setup: - is_true: acknowledged - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: foo @@ -97,6 +101,8 @@ setup: "Test get with no jobs": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: "_all" @@ -106,6 +112,8 @@ setup: "Test get missing job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: foo diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_caps.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_caps.yml index 834141343dcbc..61f91f0dd3cea 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_caps.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_caps.yml @@ -1,6 +1,6 @@ setup: - skip: - features: headers + features: ["allowed_warnings", "headers"] - do: indices.create: index: foo @@ -46,6 +46,8 @@ setup: my-id: { } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -78,6 +80,8 @@ setup: "Verify one job caps": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_caps: id: "foo" @@ -101,6 +105,8 @@ setup: "Verify two job caps": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -125,6 +131,8 @@ setup: ] } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_caps: id: "foo" @@ -160,6 +168,8 @@ setup: "Verify all caps": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -185,6 +195,8 @@ setup: } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -210,6 +222,8 @@ setup: } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_caps: id: "_all" diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml index dca96eb325b87..4245d1efeed4d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml @@ -1,6 +1,6 @@ setup: - skip: - features: headers + features: ["allowed_warnings", "headers"] - do: indices.create: index: foo @@ -47,6 +47,8 @@ setup: my-id: { } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -79,6 +81,8 @@ setup: "Verify one job caps by rollup index": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_index_caps: index: "foo_rollup" @@ -102,6 +106,8 @@ setup: "Verify two job caps by rollup index": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -126,6 +132,8 @@ setup: ] } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_index_caps: index: "foo_rollup" @@ -162,6 +170,8 @@ setup: "Verify two different job caps by rollup index": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -186,6 +196,8 @@ setup: ] } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_index_caps: index: "foo_rollup" @@ -209,6 +221,8 @@ setup: "Verify all job caps by rollup index": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -233,6 +247,8 @@ setup: ] } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -258,6 +274,8 @@ setup: } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_index_caps: index: "_all" @@ -308,6 +326,8 @@ setup: "Verify job caps by rollup index comma delimited list": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -332,6 +352,8 @@ setup: ] } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -357,6 +379,8 @@ setup: } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_index_caps: index: "foo_rollup2,foo_rollup" @@ -407,6 +431,8 @@ setup: "Verify index pattern": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -431,6 +457,8 @@ setup: ] } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -456,6 +484,8 @@ setup: } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_rollup_index_caps: index: "*_rollup2" diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml index d45c13a2b8adb..6876bb6aff8eb 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml @@ -42,6 +42,8 @@ setup: --- "Test basic put_job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -68,6 +70,8 @@ setup: - is_true: acknowledged - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: foo @@ -116,6 +120,8 @@ setup: "Test put_job with existing name": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -142,6 +148,8 @@ setup: - is_true: acknowledged - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /Cannot create rollup job \[foo\] because job was previously created \(existing metadata\)/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -173,6 +181,8 @@ setup: indices.create: index: non-rollup - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /Rollup data cannot be added to existing indices that contain non-rollup data/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -203,6 +213,8 @@ setup: "Try to include headers": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /unknown field \[headers\]/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -235,6 +247,8 @@ setup: "Validation failures": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /Could not find a \[numeric\] or \[date,date_nanos\] field with name \[field_doesnt_exist\] in any of the indices matching the index pattern/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -268,6 +282,8 @@ setup: index: dummy-rollup-index - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /new rollup jobs are not allowed in clusters that don't have any rollup usage, since rollup has been deprecated/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -297,6 +313,8 @@ setup: "Unknown Metric": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /Unsupported metric \[does_not_exist\]/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -338,6 +356,7 @@ setup: - do: allowed_warnings: - "index [foo_rollup] matches multiple legacy templates [global, test], composable templates will only match a single template" + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -413,6 +432,8 @@ setup: reason: Comma delimited index pattern introduced in 8.2.0 - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -439,6 +460,8 @@ setup: - is_true: acknowledged - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.get_jobs: id: bar diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/rollup_search.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/rollup_search.yml index d3f21f16c3a30..65708235f30c7 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/rollup_search.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/rollup_search.yml @@ -1,6 +1,6 @@ setup: - skip: - features: headers + features: ["allowed_warnings", "headers"] - do: indices.create: index: foo @@ -29,6 +29,8 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.put_job: id: foo body: > @@ -139,6 +141,8 @@ setup: "Basic Search": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -166,6 +170,8 @@ setup: cluster_features: ["gte_v6.6.0"] reason: rest_total_hits_as_int was introduced in 6.6.0 - do: + allowed_warnings: + - The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information. rollup.rollup_search: index: "foo_rollup" body: @@ -191,6 +197,8 @@ setup: "Formatted Date Histo": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -217,6 +225,8 @@ setup: "Empty aggregation": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: rest_total_hits_as_int: true index: "foo_rollup" @@ -232,6 +242,8 @@ setup: "Empty aggregation with new response format": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -249,6 +261,8 @@ setup: "Search with Metric": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -282,6 +296,8 @@ setup: "Search with Query": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -317,6 +333,8 @@ setup: "Search with MatchAll and Second Job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -427,6 +445,8 @@ setup: - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -460,6 +480,8 @@ setup: "Search with Query and Second Job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -570,6 +592,8 @@ setup: - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -605,6 +629,8 @@ setup: "Search with Query and Second Job different intervals": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -714,6 +740,8 @@ setup: "_rollup.version": 1 - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" body: @@ -749,6 +777,8 @@ setup: "Wildcards matching single rollup index": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup*" body: @@ -787,6 +817,8 @@ setup: type: integer - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -837,6 +869,8 @@ setup: name: rollup_alias - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "rollup_alias" body: @@ -875,6 +909,8 @@ setup: type: integer - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -908,6 +944,8 @@ setup: name: rollup_alias - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /RollupSearch currently only supports searching one rollup index at a time\./ rollup.rollup_search: index: "rollup_alias" @@ -943,6 +981,8 @@ setup: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser warnings: - "Creating Rollup job [tz] with timezone [Canada/Mountain], but [Canada/Mountain] has been deprecated by the IANA. Use [America/Edmonton] instead." + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.put_job: id: tz body: > @@ -1012,6 +1052,8 @@ setup: "_rollup.version": 2 - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "tz_rollup" body: @@ -1039,6 +1081,8 @@ setup: - match: { aggregations.histo.buckets.2.the_max.value: 3 } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "tz_rollup" body: @@ -1162,6 +1206,8 @@ setup: "_rollup.version": 2 - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "tz_rollup" body: @@ -1190,6 +1236,8 @@ setup: - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "tz_rollup" body: @@ -1221,6 +1269,8 @@ setup: "Search with typed_keys": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." rollup.rollup_search: index: "foo_rollup" typed_keys: true @@ -1254,6 +1304,8 @@ setup: "Search error against live index": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: bad_request rollup.rollup_search: index: "foo" @@ -1270,6 +1322,8 @@ setup: "Search error against rollup and live index": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: bad_request rollup.rollup_search: index: "foo*" @@ -1285,6 +1339,8 @@ setup: "Search error no matching indices": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /Must specify at least one concrete index/ rollup.rollup_search: index: "bar*" diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/start_job.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/start_job.yml index 50e6c46016348..a5ded0c138385 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/start_job.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/start_job.yml @@ -1,6 +1,6 @@ setup: - skip: - features: headers + features: ["allowed_warnings", "headers"] - do: indices.create: index: foo @@ -25,6 +25,8 @@ setup: my-id: { } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -53,6 +55,8 @@ setup: "Test start nonexistent job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /Task for Rollup Job \[does_not_exist\] not found/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -64,6 +68,8 @@ setup: "Test start job twice": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.start_job: @@ -71,6 +77,8 @@ setup: - is_true: started - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.start_job: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/stop_job.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/stop_job.yml index 187c190a9efef..8f746420e78ae 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/stop_job.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/stop_job.yml @@ -1,6 +1,6 @@ setup: - skip: - features: headers + features: ["allowed_warnings", "headers"] - do: indices.create: index: foo @@ -25,6 +25,8 @@ setup: my-id: { } - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.put_job: @@ -53,6 +55,8 @@ setup: "Test stop nonexistent job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." catch: /Task for Rollup Job \[does_not_exist\] not found/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -64,6 +68,8 @@ setup: "Test stop job twice": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.start_job: @@ -71,6 +77,8 @@ setup: - is_true: started - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.stop_job: @@ -78,6 +86,8 @@ setup: - is_true: stopped - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.stop_job: @@ -88,6 +98,8 @@ setup: "Test stop non-started job": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.stop_job: @@ -98,6 +110,8 @@ setup: --- "Test wait_for_completion default timeout": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.start_job: @@ -105,6 +119,8 @@ setup: - is_true: started - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.stop_job: @@ -115,6 +131,8 @@ setup: --- "Test wait_for_completion with custom timeout": - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.start_job: @@ -122,6 +140,8 @@ setup: - is_true: started - do: + allowed_warnings: + - "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information." headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser rollup.stop_job: diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 516dd4759861f..762d8b4ac8655 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -430,6 +430,7 @@ public void testRollupAfterRestart() throws Exception { // create the rollup job final Request createRollupJobRequest = new Request("PUT", "/_rollup/job/rollup-job-test"); + createRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); createRollupJobRequest.setJsonEntity(""" { "index_pattern": "rollup-*", @@ -455,6 +456,7 @@ public void testRollupAfterRestart() throws Exception { // start the rollup job final Request startRollupJobRequest = new Request("POST", "/_rollup/job/rollup-job-test/_start"); + startRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); Map startRollupJobResponse = entityAsMap(client().performRequest(startRollupJobRequest)); assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE)); @@ -823,6 +825,7 @@ private void assertRollUpJob(final String rollupJob) throws Exception { // check that the rollup job is started using the RollUp API final Request getRollupJobRequest = new Request("GET", "_rollup/job/" + rollupJob); + getRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); Map getRollupJobResponse = entityAsMap(client().performRequest(getRollupJobRequest)); Map job = getJob(getRollupJobResponse, rollupJob); assertNotNull(job); @@ -865,7 +868,7 @@ private void assertRollUpJob(final String rollupJob) throws Exception { private void waitForRollUpJob(final String rollupJob, final Matcher expectedStates) throws Exception { assertBusy(() -> { final Request getRollupJobRequest = new Request("GET", "/_rollup/job/" + rollupJob); - + getRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); Response getRollupJobResponse = client().performRequest(getRollupJobRequest); assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); diff --git a/x-pack/qa/multi-node/src/javaRestTest/java/org/elasticsearch/multi_node/RollupIT.java b/x-pack/qa/multi-node/src/javaRestTest/java/org/elasticsearch/multi_node/RollupIT.java index e8fce8e513165..4afcf8c20344a 100644 --- a/x-pack/qa/multi-node/src/javaRestTest/java/org/elasticsearch/multi_node/RollupIT.java +++ b/x-pack/qa/multi-node/src/javaRestTest/java/org/elasticsearch/multi_node/RollupIT.java @@ -128,6 +128,7 @@ public void testBigRollup() throws Exception { // create the rollup job final Request createRollupJobRequest = new Request("PUT", "/_rollup/job/rollup-job-test"); + createRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); int pageSize = randomIntBetween(2, 50); // fast cron so test runs quickly createRollupJobRequest.setJsonEntity(Strings.format(""" @@ -154,11 +155,13 @@ public void testBigRollup() throws Exception { ] }""", pageSize)); + assertWarnings(); var createRollupJobResponse = responseAsMap(client().performRequest(createRollupJobRequest)); assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE)); // start the rollup job final Request startRollupJobRequest = new Request("POST", "_rollup/job/rollup-job-test/_start"); + startRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); var startRollupJobResponse = responseAsMap(client().performRequest(startRollupJobRequest)); assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE)); @@ -167,6 +170,7 @@ public void testBigRollup() throws Exception { // Wait for the job to finish, by watching how many rollup docs we've indexed assertBusy(() -> { final Request getRollupJobRequest = new Request("GET", "_rollup/job/rollup-job-test"); + getRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); Response getRollupJobResponse = client().performRequest(getRollupJobRequest); assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); @@ -211,6 +215,7 @@ public void testBigRollup() throws Exception { var liveBody = responseAsMap(liveResponse); request = new Request("GET", "results-rollup/_rollup_search"); + request.setOptions(ROLLUP_REQUESTS_OPTIONS); request.setJsonEntity(jsonRequestBody); Response rollupResponse = client().performRequest(request); var rollupBody = responseAsMap(rollupResponse); @@ -223,6 +228,7 @@ public void testBigRollup() throws Exception { request = new Request("GET", "rollup-docs/_rollup_search"); request.setJsonEntity(jsonRequestBody); + request.setOptions(ROLLUP_REQUESTS_OPTIONS); Response liveRollupResponse = client().performRequest(request); var liveRollupBody = responseAsMap(liveRollupResponse); @@ -241,6 +247,7 @@ private void assertRollUpJob(final String rollupJob) throws Exception { // check that the rollup job is started using the RollUp API final Request getRollupJobRequest = new Request("GET", "_rollup/job/" + rollupJob); + getRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); var getRollupJobResponse = responseAsMap(client().performRequest(getRollupJobRequest)); Map job = getJob(getRollupJobResponse, rollupJob); if (job != null) { @@ -286,6 +293,7 @@ private void assertRollUpJob(final String rollupJob) throws Exception { private void waitForRollUpJob(final String rollupJob, String[] expectedStates) throws Exception { assertBusy(() -> { final Request getRollupJobRequest = new Request("GET", "_rollup/job/" + rollupJob); + getRollupJobRequest.setOptions(ROLLUP_REQUESTS_OPTIONS); Response getRollupJobResponse = client().performRequest(getRollupJobRequest); assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); From 63a890e30dba6416b762f5d65605fb69a0e53038 Mon Sep 17 00:00:00 2001 From: Kofi B Date: Wed, 29 Jan 2025 03:46:01 -0500 Subject: [PATCH 180/383] [DOCS] Upsert documentation clarification (#120684) Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> --- docs/reference/docs/update.asciidoc | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index ae9ae8fe73fc6..62201f5748b7d 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -307,23 +307,19 @@ POST test/_update/1 [discrete] ===== Upsert -If the document does not already exist, the contents of the `upsert` element -are inserted as a new document. If the document exists, the -`script` is executed: +An upsert operation lets you update an existing document or insert a new one if it doesn't exist, in a single request. + +In this example, if the product with ID `1` exists, its price will be updated to `100`. If the product does not exist, a new document with ID `1` and a price of `50` will be inserted. [source,console] ---- -POST test/_update/1 +POST /test/_update/1 { - "script": { - "source": "ctx._source.counter += params.count", - "lang": "painless", - "params": { - "count": 4 - } + "doc": { + "product_price": 100 }, "upsert": { - "counter": 1 + "product_price": 50 } } ---- From cb2e5cf6f788ad6cc15b5e5c8a317f31e686caa2 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Wed, 29 Jan 2025 00:46:22 -0800 Subject: [PATCH 181/383] Fix matching of half_float and scaled_float values in LogsDB tests (#121098) --- .../logsdb/datageneration/FieldType.java | 8 +++++++- .../datasource/DefaultMappingParametersHandler.java | 3 ++- .../datageneration/matchers/source/SourceMatcher.java | 11 ----------- 3 files changed, 9 insertions(+), 13 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java index 07744851aba3e..96b75f29382e2 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java @@ -13,9 +13,11 @@ import org.elasticsearch.logsdb.datageneration.fields.leaf.ByteFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.DoubleFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.FloatFieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.fields.leaf.HalfFloatFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.IntegerFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.KeywordFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.LongFieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.fields.leaf.ScaledFloatFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.ShortFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.UnsignedLongFieldDataGenerator; @@ -30,7 +32,9 @@ public enum FieldType { SHORT("short"), BYTE("byte"), DOUBLE("double"), - FLOAT("float"); + FLOAT("float"), + HALF_FLOAT("half_float"), + SCALED_FLOAT("scaled_float"); private final String name; @@ -48,6 +52,8 @@ public FieldDataGenerator generator(String fieldName, DataSource dataSource) { case BYTE -> new ByteFieldDataGenerator(fieldName, dataSource); case DOUBLE -> new DoubleFieldDataGenerator(fieldName, dataSource); case FLOAT -> new FloatFieldDataGenerator(fieldName, dataSource); + case HALF_FLOAT -> new HalfFloatFieldDataGenerator(fieldName, dataSource); + case SCALED_FLOAT -> new ScaledFloatFieldDataGenerator(fieldName, dataSource); }; } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java index db13867fe71ad..b639108ea6ad2 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java @@ -32,7 +32,8 @@ public DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceReques return new DataSourceResponse.LeafMappingParametersGenerator(switch (request.fieldType()) { case KEYWORD -> keywordMapping(request, map); - case LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, UNSIGNED_LONG -> plain(map); + case LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, HALF_FLOAT, UNSIGNED_LONG -> plain(map); + case SCALED_FLOAT -> scaledFloatMapping(map); }); } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java index eb62598712f03..d58d081e7c9f9 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java @@ -159,10 +159,6 @@ private Optional matchWithFieldSpecificMatcher(String fieldName, Li } } - if (sourceMatchesExactly(expectedFieldMapping, expectedValues)) { - return Optional.empty(); - } - var fieldSpecificMatcher = fieldSpecificMatchers.get(actualFieldType); if (fieldSpecificMatcher == null) { return Optional.empty(); @@ -177,13 +173,6 @@ private Optional matchWithFieldSpecificMatcher(String fieldName, Li return Optional.of(matched); } - // Checks for scenarios when source is stored exactly and therefore can be compared without special logic. - private boolean sourceMatchesExactly(MappingTransforms.FieldMapping mapping, List expectedValues) { - return mapping.parentMappingParameters().stream().anyMatch(m -> m.getOrDefault("enabled", "true").equals("false")) - || mapping.mappingParameters().getOrDefault("synthetic_source_keep", "none").equals("all") - || expectedValues.size() > 1 && mapping.mappingParameters().getOrDefault("synthetic_source_keep", "none").equals("arrays"); - } - private MatchResult matchWithGenericMatcher(List actualValues, List expectedValues) { var genericListMatcher = new ListEqualMatcher( actualMappings, From 2258911112092f6dc05143366fa1095aaf6e0cc1 Mon Sep 17 00:00:00 2001 From: Kofi B Date: Wed, 29 Jan 2025 03:46:39 -0500 Subject: [PATCH 182/383] [DOCS] Search multiple indices added info (#120572) * [DOCS] Search multiple indices added info * Update docs/reference/search/search-your-data/search-multiple-indices.asciidoc Co-authored-by: George Wallace * Update docs/reference/search/search-your-data/search-multiple-indices.asciidoc Co-authored-by: George Wallace * Update docs/reference/search/search-your-data/search-multiple-indices.asciidoc Co-authored-by: George Wallace * Update docs/reference/search/search-your-data/search-multiple-indices.asciidoc Co-authored-by: George Wallace * Update docs/reference/search/search-your-data/search-multiple-indices.asciidoc Co-authored-by: George Wallace --------- Co-authored-by: George Wallace --- .../search-multiple-indices.asciidoc | 36 ++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/docs/reference/search/search-your-data/search-multiple-indices.asciidoc b/docs/reference/search/search-your-data/search-multiple-indices.asciidoc index 4052097e73c91..f02e10640a6d9 100644 --- a/docs/reference/search/search-your-data/search-multiple-indices.asciidoc +++ b/docs/reference/search/search-your-data/search-multiple-indices.asciidoc @@ -1,5 +1,11 @@ [[search-multiple-indices]] -=== Search multiple data streams and indices +=== Search multiple data streams and indices using a query + +There are two main methods for searching across multiple data streams and indices in {es}: + +* *Query Level*: Directly specify indices in the search request path or use index patterns to target multiple indices. + +* *Index level*: Use <>, which act as pointers to one or more backing indices, enabling logical grouping and management of indices. To search multiple data streams and indices, add them as comma-separated values in the <>'s request path. @@ -39,6 +45,34 @@ GET /my-index-*/_search ---- // TEST[setup:my_index] +You can exclude specific indices from a search. The request will retrieve data from all indices starting with `my-index-`, except for `my-index-01`. + +[source,console] +---- +GET /my-index-*/_search +{ + "query": { + "bool": { + "must": [ + { + "match": { + "user.id": "kimchy" + } + } + ], + "must_not": [ + { + "terms": { + "_index": ["my-index-01"] + } + } + ] + } + } +} +---- +// TEST[setup:my_index] + To search all data streams and indices in a cluster, omit the target from the request path. Alternatively, you can use `_all` or `*`. From 5bcd170a0bde0c155a36751e034381de3606208c Mon Sep 17 00:00:00 2001 From: Kofi B Date: Wed, 29 Jan 2025 03:48:25 -0500 Subject: [PATCH 183/383] [DOCS] Added additional context to page (#120569) --- .../search/search-your-data/sort-search-results.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/search/search-your-data/sort-search-results.asciidoc b/docs/reference/search/search-your-data/sort-search-results.asciidoc index 3e32573d7d8ae..e0c1cf910ac1f 100644 --- a/docs/reference/search/search-your-data/sort-search-results.asciidoc +++ b/docs/reference/search/search-your-data/sort-search-results.asciidoc @@ -5,6 +5,8 @@ Allows you to add one or more sorts on specific fields. Each sort can be reversed as well. The sort is defined on a per field level, with special field name for `_score` to sort by score, and `_doc` to sort by index order. +To optimize sorting performance, avoid sorting by <>fields; instead, use <> or <> fields. Additionally, you can improve performance by enabling pre-sorting at index time using <>. While this can speed up query-time sorting, it may reduce indexing performance and increase memory usage. + Assuming the following index mapping: [source,console] From 843f1b8dfc5eb4bdabe4e07b9611a97ef74f03ba Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Wed, 29 Jan 2025 10:57:40 +0100 Subject: [PATCH 184/383] ESQL: Fix LOOKUP JOIN with limit (#120411) For queries like ... | LOOKUP JOIN lookup_index ON key | LIMIT 10 the limit cannot be simply pushed past the join - but it can be duplicated past the join. In such cases, leave an explicit Limit plan node downstream from the Join (in addition to pushing down the limit), but mark it in a way that prevents being duplicated multiple times (which would cause infinite loops). Align the logic for MV_EXPAND, which used to, instead, internalize a limit into the MvExpand node. --- .../xpack/esql/EsqlTestUtils.java | 17 + .../src/main/resources/lookup-join.csv-spec | 69 +- .../xpack/esql/action/EsqlCapabilities.java | 5 + .../xpack/esql/analysis/Analyzer.java | 3 +- .../rules/logical/AddDefaultTopN.java | 2 +- .../logical/PushDownAndCombineLimits.java | 64 +- .../xpack/esql/plan/logical/Limit.java | 55 +- .../xpack/esql/plan/logical/MvExpand.java | 24 +- .../esql/planner/mapper/MapperUtils.java | 12 +- .../LocalLogicalPlanOptimizerTests.java | 18 +- .../optimizer/LogicalPlanOptimizerTests.java | 633 ++++++++++++++---- .../esql/parser/StatementParserTests.java | 3 +- .../plan/AbstractNodeSerializationTests.java | 2 +- .../plan/logical/LimitSerializationTests.java | 23 +- 14 files changed, 707 insertions(+), 223 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index f3b2ea0d864ff..217bf6692aa27 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -66,6 +66,7 @@ import org.elasticsearch.xpack.esql.parser.QueryParam; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.Limit; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; @@ -111,6 +112,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.test.ESTestCase.assertEquals; import static org.elasticsearch.test.ESTestCase.between; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomBoolean; @@ -403,6 +405,21 @@ public static T as(Object node, Class type) { return type.cast(node); } + public static Limit asLimit(Object node, Integer limitLiteral) { + return asLimit(node, limitLiteral, null); + } + + public static Limit asLimit(Object node, Integer limitLiteral, Boolean duplicated) { + Limit limit = as(node, Limit.class); + if (limitLiteral != null) { + assertEquals(as(limit.limit(), Literal.class).value(), limitLiteral); + } + if (duplicated != null) { + assertEquals(limit.duplicated(), duplicated); + } + return limit; + } + public static Map loadMapping(String name) { return LoadMapping.loadMapping(name); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index dbeaedd7e0416..d4a98fdc70a9a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -273,6 +273,58 @@ emp_no:integer 10001 ; + +lookupIndexInFromRepeatedRowBug +// Test for https://github.com/elastic/elasticsearch/issues/118852 +required_capability: join_lookup_v12 +FROM languages_lookup_non_unique_key +| WHERE language_code == 1 +| LOOKUP JOIN languages_lookup ON language_code +| KEEP language_code, language_name, country +| SORT language_code, language_name, country +; + +language_code:integer | language_name:keyword | country:text +1 | English | Canada +1 | English | United Kingdom +1 | English | United States of America +1 | English | null +; + +nonUniqueRightKeyOnTheCoordinatorLateLimit +required_capability: join_lookup_v12 +required_capability: join_lookup_fix_limit_pushdown + +FROM employees +| SORT emp_no +| EVAL language_code = emp_no % 10 +| LOOKUP JOIN languages_lookup_non_unique_key ON language_code +| KEEP emp_no, language_code, language_name, country +| LIMIT 4 +| SORT country +; + +emp_no:integer | language_code:integer | language_name:keyword | country:text +10001 | 1 | English | Canada +10001 | 1 | null | United Kingdom +10001 | 1 | English | United States of America +10001 | 1 | English | null +; + +nonUniqueRightKeyLateLimitWithEmptyRelation +required_capability: join_lookup_v12 +required_capability: join_lookup_fix_limit_pushdown + +ROW language_code = 1 +| WHERE language_code != 1 +| LOOKUP JOIN languages_lookup_non_unique_key ON language_code +| LIMIT 1 +| KEEP language_code, language_name +; + +language_code:integer | language_name:keyword +; + ########################################################################### # null and multi-value behavior with languages_lookup_non_unique_key index ########################################################################### @@ -1278,23 +1330,6 @@ ignoreOrder:true 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 | QA | null ; -lookupIndexInFromRepeatedRowBug -// Test for https://github.com/elastic/elasticsearch/issues/118852 -required_capability: join_lookup_v12 -FROM languages_lookup_non_unique_key -| WHERE language_code == 1 -| LOOKUP JOIN languages_lookup ON language_code -| KEEP language_code, language_name, country -| SORT language_code, language_name, country -; - -language_code:integer | language_name:keyword | country:text -1 | English | Canada -1 | English | United Kingdom -1 | English | United States of America -1 | English | null -; - lookupIndexQuoting required_capability: join_lookup_v12 FROM languages_lookup_non_unique_key diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 548fb30a51355..b8b911afe7fd4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -711,6 +711,11 @@ public enum Cap { */ JOIN_LOOKUP_SKIP_MV_ON_LOOKUP_KEY(JOIN_LOOKUP_V12.isEnabled()), + /** + * Fix pushing down LIMIT past LOOKUP JOIN in case of multiple matching join keys. + */ + JOIN_LOOKUP_FIX_LIMIT_PUSHDOWN(JOIN_LOOKUP_V12.isEnabled()), + /** * Fix for https://github.com/elastic/elasticsearch/issues/117054 */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 4f5ff35b84054..fd98b2717eae0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -549,8 +549,7 @@ private LogicalPlan resolveMvExpand(MvExpand p, List childrenOutput) resolved, resolved.resolved() ? new ReferenceAttribute(resolved.source(), resolved.name(), resolved.dataType(), resolved.nullable(), null, false) - : resolved, - p.limit() + : resolved ); } return p; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/AddDefaultTopN.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/AddDefaultTopN.java index 02815d45d2896..ef091686a4b38 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/AddDefaultTopN.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/AddDefaultTopN.java @@ -34,7 +34,7 @@ * | sort first_name * | limit 15 *

- * PushDownAndCombineLimits rule will copy the "limit 15" after "sort emp_no" if there is no filter on the expanded values + * {@link PushDownAndCombineLimits} will copy the "limit 15" after "sort emp_no" if there is no filter on the expanded values * OR if there is no sort between "limit" and "mv_expand". * But, since this type of query has such a filter, the "sort emp_no" will have no limit when it reaches the current rule. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java index 969a6bb713eca..dca4dfbd533df 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; -import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -21,6 +20,9 @@ import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; +import java.util.ArrayList; +import java.util.List; + public final class PushDownAndCombineLimits extends OptimizerRules.ParameterizedOptimizerRule { public PushDownAndCombineLimits() { @@ -31,27 +33,18 @@ public PushDownAndCombineLimits() { public LogicalPlan rule(Limit limit, LogicalOptimizerContext ctx) { if (limit.child() instanceof Limit childLimit) { var limitSource = limit.limit(); - var l1 = (int) limitSource.fold(ctx.foldCtx()); - var l2 = (int) childLimit.limit().fold(ctx.foldCtx()); - return new Limit(limit.source(), Literal.of(limitSource, Math.min(l1, l2)), childLimit.child()); + var parentLimitValue = (int) limitSource.fold(ctx.foldCtx()); + var childLimitValue = (int) childLimit.limit().fold(ctx.foldCtx()); + // We want to preserve the duplicated() value of the smaller limit, so we'll use replaceChild. + return parentLimitValue < childLimitValue ? limit.replaceChild(childLimit.child()) : childLimit; } else if (limit.child() instanceof UnaryPlan unary) { if (unary instanceof Eval || unary instanceof Project || unary instanceof RegexExtract || unary instanceof Enrich) { return unary.replaceChild(limit.replaceChild(unary.child())); - } else if (unary instanceof MvExpand mvx) { + } else if (unary instanceof MvExpand) { // MV_EXPAND can increase the number of rows, so we cannot just push the limit down // (we also have to preserve the LIMIT afterwards) - // - // To avoid infinite loops, ie. - // | MV_EXPAND | LIMIT -> | LIMIT | MV_EXPAND | LIMIT -> ... | MV_EXPAND | LIMIT - // we add an inner limit to MvExpand and just push down the existing limit, ie. - // | MV_EXPAND | LIMIT N -> | LIMIT N | MV_EXPAND (with limit N) - var limitSource = limit.limit(); - var limitVal = (int) limitSource.fold(ctx.foldCtx()); - Integer mvxLimit = mvx.limit(); - if (mvxLimit == null || mvxLimit > limitVal) { - mvx = new MvExpand(mvx.source(), mvx.child(), mvx.target(), mvx.expanded(), limitVal); - } - return mvx.replaceChild(limit.replaceChild(mvx.child())); + // To avoid repeating this infinitely, we have to set duplicated = true. + return duplicateLimitAsFirstGrandchild(limit); } // check if there's a 'visible' descendant limit lower than the current one // and if so, align the current limit since it adds no value @@ -62,17 +55,15 @@ public LogicalPlan rule(Limit limit, LogicalOptimizerContext ctx) { var l1 = (int) limit.limit().fold(ctx.foldCtx()); var l2 = (int) descendantLimit.limit().fold(ctx.foldCtx()); if (l2 <= l1) { - return new Limit(limit.source(), Literal.of(limit.limit(), l2), limit.child()); + return limit.withLimit(descendantLimit.limit()); } } } - } else if (limit.child() instanceof Join join) { - if (join.config().type() == JoinTypes.LEFT) { - // NOTE! This is only correct because our LEFT JOINs preserve the number of rows from the left hand side. - // This deviates from SQL semantics. In SQL, multiple matches on the right hand side lead to multiple rows in the output. - // For us, multiple matches on the right hand side are collected into multi-values. - return join.replaceChildren(limit.replaceChild(join.left()), join.right()); - } + } else if (limit.child() instanceof Join join && join.config().type() == JoinTypes.LEFT) { + // Left joins increase the number of rows if any join key has multiple matches from the right hand side. + // Therefore, we cannot simply push down the limit - but we can add another limit before the join. + // To avoid repeating this infinitely, we have to set duplicated = true. + return duplicateLimitAsFirstGrandchild(limit); } return limit; } @@ -100,4 +91,27 @@ private static Limit descendantLimit(UnaryPlan unary) { } return null; } + + /** + * Duplicate the limit past its child if it wasn't duplicated yet. The duplicate is placed on top of its leftmost grandchild. + * Idempotent. (Sets {@link Limit#duplicated()} to {@code true} on the limit that remains at the top.) + */ + private static Limit duplicateLimitAsFirstGrandchild(Limit limit) { + if (limit.duplicated()) { + return limit; + } + + List grandChildren = limit.child().children(); + LogicalPlan firstGrandChild = grandChildren.getFirst(); + LogicalPlan newFirstGrandChild = limit.replaceChild(firstGrandChild); + + List newGrandChildren = new ArrayList<>(); + newGrandChildren.add(newFirstGrandChild); + for (int i = 1; i < grandChildren.size(); i++) { + newGrandChildren.add(grandChildren.get(i)); + } + + LogicalPlan newChild = limit.child().replaceChildren(newGrandChildren); + return limit.replaceChild(newChild).withDuplicated(true); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java index ea64b7687f4c0..09879e47859c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java @@ -21,21 +21,52 @@ public class Limit extends UnaryPlan { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Limit", Limit::new); private final Expression limit; - + /** + * Important for optimizations. This should be {@code false} in most cases, which allows this instance to be duplicated past a child + * plan node that increases the number of rows, like for LOOKUP JOIN and MV_EXPAND. + * Needs to be set to {@code true} in {@link org.elasticsearch.xpack.esql.optimizer.rules.logical.PushDownAndCombineLimits} to avoid + * infinite loops from adding a duplicate of the limit past the child over and over again. + */ + private final transient boolean duplicated; + + /** + * Default way to create a new instance. Do not use this to copy an existing instance, as this sets {@link Limit#duplicated} to + * {@code false}. + */ public Limit(Source source, Expression limit, LogicalPlan child) { + this(source, limit, child, false); + } + + public Limit(Source source, Expression limit, LogicalPlan child, boolean duplicated) { super(source, child); this.limit = limit; + this.duplicated = duplicated; } + /** + * Omits reading {@link Limit#duplicated}, c.f. {@link Limit#writeTo}. + */ private Limit(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(LogicalPlan.class)); + this( + Source.readFrom((PlanStreamInput) in), + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(LogicalPlan.class), + false + ); } + /** + * Omits serializing {@link Limit#duplicated} because when sent to a data node, this should always be {@code false}. + * That's because if it's true, this means a copy of this limit was pushed down below an MvExpand or Join, and thus there's + * another pipeline breaker further upstream - we're already on the coordinator node. + */ @Override public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); out.writeNamedWriteable(limit()); out.writeNamedWriteable(child()); + // Let's make sure we notice during tests if we ever serialize a duplicated Limit. + assert duplicated == false; } @Override @@ -45,18 +76,30 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, Limit::new, limit, child()); + return NodeInfo.create(this, Limit::new, limit, child(), duplicated); } @Override public Limit replaceChild(LogicalPlan newChild) { - return new Limit(source(), limit, newChild); + return new Limit(source(), limit, newChild, duplicated); } public Expression limit() { return limit; } + public Limit withLimit(Expression limit) { + return new Limit(source(), limit, child(), duplicated); + } + + public boolean duplicated() { + return duplicated; + } + + public Limit withDuplicated(boolean duplicated) { + return new Limit(source(), limit, child(), duplicated); + } + @Override public String commandName() { return "LIMIT"; @@ -69,7 +112,7 @@ public boolean expressionsResolved() { @Override public int hashCode() { - return Objects.hash(limit, child()); + return Objects.hash(limit, child(), duplicated); } @Override @@ -83,6 +126,6 @@ public boolean equals(Object obj) { Limit other = (Limit) obj; - return Objects.equals(limit, other.limit) && Objects.equals(child(), other.child()); + return Objects.equals(limit, other.limit) && Objects.equals(child(), other.child()) && (duplicated == other.duplicated); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java index 949e4906e5033..9b0168ddd739d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java @@ -27,19 +27,13 @@ public class MvExpand extends UnaryPlan { private final NamedExpression target; private final Attribute expanded; - private final Integer limit; private List output; public MvExpand(Source source, LogicalPlan child, NamedExpression target, Attribute expanded) { - this(source, child, target, expanded, null); - } - - public MvExpand(Source source, LogicalPlan child, NamedExpression target, Attribute expanded, Integer limit) { super(source, child); this.target = target; this.expanded = expanded; - this.limit = limit; } private MvExpand(StreamInput in) throws IOException { @@ -47,8 +41,7 @@ private MvExpand(StreamInput in) throws IOException { Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(LogicalPlan.class), in.readNamedWriteable(NamedExpression.class), - in.readNamedWriteable(Attribute.class), - null // we only need this on the coordinator + in.readNamedWriteable(Attribute.class) ); } @@ -58,7 +51,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeNamedWriteable(child()); out.writeNamedWriteable(target()); out.writeNamedWriteable(expanded()); - assert limit == null; } @Override @@ -86,10 +78,6 @@ public Attribute expanded() { return expanded; } - public Integer limit() { - return limit; - } - @Override protected AttributeSet computeReferences() { return target.references(); @@ -105,8 +93,8 @@ public boolean expressionsResolved() { } @Override - public UnaryPlan replaceChild(LogicalPlan newChild) { - return new MvExpand(source(), newChild, target, expanded, limit); + public MvExpand replaceChild(LogicalPlan newChild) { + return new MvExpand(source(), newChild, target, expanded); } @Override @@ -119,12 +107,12 @@ public List output() { @Override protected NodeInfo info() { - return NodeInfo.create(this, MvExpand::new, child(), target, expanded, limit); + return NodeInfo.create(this, MvExpand::new, child(), target, expanded); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), target, expanded, limit); + return Objects.hash(super.hashCode(), target, expanded); } @Override @@ -133,6 +121,6 @@ public boolean equals(Object obj) { return false; } MvExpand other = ((MvExpand) obj); - return Objects.equals(target, other.target) && Objects.equals(expanded, other.expanded) && Objects.equals(limit, other.limit); + return Objects.equals(target, other.target) && Objects.equals(expanded, other.expanded); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java index b8f539ea307c9..f358a77a08aec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java @@ -12,9 +12,6 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -34,7 +31,6 @@ import org.elasticsearch.xpack.esql.plan.physical.EvalExec; import org.elasticsearch.xpack.esql.plan.physical.FilterExec; import org.elasticsearch.xpack.esql.plan.physical.GrokExec; -import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -99,13 +95,7 @@ static PhysicalPlan mapUnary(UnaryPlan p, PhysicalPlan child) { } if (p instanceof MvExpand mvExpand) { - MvExpandExec result = new MvExpandExec(mvExpand.source(), child, mvExpand.target(), mvExpand.expanded()); - if (mvExpand.limit() != null) { - // MvExpand could have an inner limit - // see PushDownAndCombineLimits rule - return new LimitExec(result.source(), result, new Literal(Source.EMPTY, mvExpand.limit(), DataType.INTEGER)); - } - return result; + return new MvExpandExec(mvExpand.source(), child, mvExpand.target(), mvExpand.expanded()); } return unsupported(p); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index 310d680cfbf41..c9821aea343bf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -66,6 +66,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.THREE; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TWO; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.asLimit; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getFieldAttribute; import static org.elasticsearch.xpack.esql.EsqlTestUtils.greaterThanOf; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; @@ -75,7 +76,6 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -196,10 +196,11 @@ public void testMissingFieldInSort() { /** * Expects - * EsqlProject[[first_name{f}#9, last_name{r}#18]] - * \_MvExpand[last_name{f}#12,last_name{r}#18,1000] - * \_Limit[1000[INTEGER]] - * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] + * EsqlProject[[first_name{f}#7, last_name{r}#17]] + * \_Limit[1000[INTEGER],true] + * \_MvExpand[last_name{f}#10,last_name{r}#17] + * \_Limit[1000[INTEGER],false] + * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] */ public void testMissingFieldInMvExpand() { var plan = plan(""" @@ -215,9 +216,9 @@ public void testMissingFieldInMvExpand() { var projections = project.projections(); assertThat(Expressions.names(projections), contains("first_name", "last_name")); - var mvExpand = as(project.child(), MvExpand.class); - assertThat(mvExpand.limit(), equalTo(1000)); - var limit2 = as(mvExpand.child(), Limit.class); + var limit1 = asLimit(project.child(), 1000, true); + var mvExpand = as(limit1.child(), MvExpand.class); + var limit2 = asLimit(mvExpand.child(), 1000, false); as(limit2.child(), EsRelation.class); } @@ -269,7 +270,6 @@ protected NodeInfo info() { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/110150") public void testMissingFieldInNewCommand() { var testStats = statsForMissingField("last_name"); localPlan( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 7ceaaa740b802..c80e374540d09 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -149,6 +149,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.THREE; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TWO; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.asLimit; import static org.elasticsearch.xpack.esql.EsqlTestUtils.emptySource; import static org.elasticsearch.xpack.esql.EsqlTestUtils.fieldAttribute; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getFieldAttribute; @@ -1315,6 +1316,8 @@ public void testCombineLimits() { } public void testPushdownLimitsPastLeftJoin() { + var rule = new PushDownAndCombineLimits(); + var leftChild = emptySource(); var rightChild = new LocalRelation(Source.EMPTY, List.of(fieldAttribute()), LocalSupplier.EMPTY); assertNotEquals(leftChild, rightChild); @@ -1329,9 +1332,16 @@ public void testPushdownLimitsPastLeftJoin() { var limit = new Limit(EMPTY, L(10), join); - var optimizedPlan = new PushDownAndCombineLimits().rule(limit, logicalOptimizerCtx); + var optimizedPlan = rule.apply(limit, logicalOptimizerCtx); + + assertEquals( + new Limit(limit.source(), limit.limit(), join.replaceChildren(limit.replaceChild(join.left()), join.right()), true), + optimizedPlan + ); - assertEquals(join.replaceChildren(limit.replaceChild(join.left()), join.right()), optimizedPlan); + var optimizedTwice = rule.apply(optimizedPlan, logicalOptimizerCtx); + // We mustn't create the limit after the JOIN multiple times when the rule is applied multiple times, that'd lead to infinite loops. + assertEquals(optimizedPlan, optimizedTwice); } public void testMultipleCombineLimits() { @@ -1851,10 +1861,11 @@ public void testDontCombineOrderByThroughMvExpand() { /** * Expected - * MvExpand[x{r}#4,x{r}#18,1000] - * \_EsqlProject[[first_name{f}#9 AS x]] - * \_Limit[1000[INTEGER]] - * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] + * Limit[1000[INTEGER],true] + * \_MvExpand[x{r}#4,x{r}#19] + * \_EsqlProject[[first_name{f}#9 AS x]] + * \_Limit[1000[INTEGER],false] + * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] */ public void testCopyDefaultLimitPastMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -1864,20 +1875,44 @@ public void testCopyDefaultLimitPastMvExpand() { | mv_expand x """); - var mvExpand = as(plan, MvExpand.class); - assertThat(mvExpand.limit(), equalTo(1000)); + var limit = asLimit(plan, 1000, true); + var mvExpand = as(limit.child(), MvExpand.class); var keep = as(mvExpand.child(), EsqlProject.class); - var limitPastMvExpand = as(keep.child(), Limit.class); - assertThat(limitPastMvExpand.limit().fold(FoldContext.small()), equalTo(1000)); + var limitPastMvExpand = asLimit(keep.child(), 1000, false); as(limitPastMvExpand.child(), EsRelation.class); } /** * Expected - * MvExpand[first_name{f}#7,first_name{r}#16,10] - * \_EsqlProject[[first_name{f}#7, last_name{f}#10]] - * \_Limit[1[INTEGER]] - * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] + * Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] + * |_EsqlProject[[languages{f}#10 AS language_code]] + * | \_Limit[1000[INTEGER],false] + * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] + */ + public void testCopyDefaultLimitPastLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + from test + | rename languages AS language_code + | keep language_code + | lookup join languages_lookup ON language_code + """); + + var limit = asLimit(plan, 1000, true); + var join = as(limit.child(), Join.class); + var keep = as(join.left(), EsqlProject.class); + var limitPastMvExpand = asLimit(keep.child(), 1000, false); + as(limitPastMvExpand.child(), EsRelation.class); + } + + /** + * Expected + * Limit[10[INTEGER],true] + * \_MvExpand[first_name{f}#7,first_name{r}#17] + * \_EsqlProject[[first_name{f}#7, last_name{f}#10]] + * \_Limit[1[INTEGER],false] + * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] */ public void testDontPushDownLimitPastMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -1885,28 +1920,56 @@ public void testDontPushDownLimitPastMvExpand() { | limit 1 | keep first_name, last_name | mv_expand first_name - | limit 10"""); + | limit 10 + """); - var mvExpand = as(plan, MvExpand.class); - assertThat(mvExpand.limit(), equalTo(10)); + var limit = asLimit(plan, 10, true); + var mvExpand = as(limit.child(), MvExpand.class); var project = as(mvExpand.child(), EsqlProject.class); - var limit = as(project.child(), Limit.class); - assertThat(limit.limit().fold(FoldContext.small()), equalTo(1)); - as(limit.child(), EsRelation.class); + var limit2 = asLimit(project.child(), 1, false); + as(limit2.child(), EsRelation.class); } /** * Expected - * EsqlProject[[emp_no{f}#19, first_name{r}#29, languages{f}#22, lll{r}#9, salary{r}#30]] - * \_TopN[[Order[salary{r}#30,DESC,FIRST]],5[INTEGER]] - * \_MvExpand[salary{f}#24,salary{r}#30,5] - * \_Eval[[languages{f}#22 + 5[INTEGER] AS lll]] - * \_Limit[5[INTEGER]] - * \_Filter[languages{f}#22 > 1[INTEGER]] - * \_MvExpand[first_name{f}#20,first_name{r}#29,10] - * \_TopN[[Order[emp_no{f}#19,DESC,FIRST]],10[INTEGER]] - * \_Filter[emp_no{f}#19 ≤ 10006[INTEGER]] - * \_EsRelation[test][_meta_field{f}#25, emp_no{f}#19, first_name{f}#20, ..] + * Limit[10[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#19]] + * |_EsqlProject[[languages{f}#11 AS language_code, last_name{f}#12]] + * | \_Limit[1[INTEGER],false] + * | \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#19, language_name{f}#20] + */ + public void testDontPushDownLimitPastLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + from test + | limit 1 + | rename languages AS language_code + | keep language_code, last_name + | lookup join languages_lookup on language_code + | limit 10 + """); + + var limit = asLimit(plan, 10, true); + var join = as(limit.child(), Join.class); + var project = as(join.left(), EsqlProject.class); + var limit2 = asLimit(project.child(), 1, false); + as(limit2.child(), EsRelation.class); + } + + /** + * Expected + * EsqlProject[[emp_no{f}#19, first_name{r}#30, languages{f}#22, lll{r}#9, salary{r}#31]] + * \_TopN[[Order[salary{r}#31,DESC,FIRST]],5[INTEGER]] + * \_Limit[5[INTEGER],true] + * \_MvExpand[salary{f}#24,salary{r}#31] + * \_Eval[[languages{f}#22 + 5[INTEGER] AS lll]] + * \_Limit[5[INTEGER],false] + * \_Filter[languages{f}#22 > 1[INTEGER]] + * \_Limit[10[INTEGER],true] + * \_MvExpand[first_name{f}#20,first_name{r}#30] + * \_TopN[[Order[emp_no{f}#19,DESC,FIRST]],10[INTEGER]] + * \_Filter[emp_no{f}#19 ≤ 10006[INTEGER]] + * \_EsRelation[test][_meta_field{f}#25, emp_no{f}#19, first_name{f}#20, ..] */ public void testMultipleMvExpandWithSortAndLimit() { LogicalPlan plan = optimizedPlan(""" @@ -1921,25 +1984,86 @@ public void testMultipleMvExpandWithSortAndLimit() { | limit 5 | sort first_name | keep emp_no, first_name, languages, lll, salary - | sort salary desc"""); + | sort salary desc + """); var keep = as(plan, EsqlProject.class); var topN = as(keep.child(), TopN.class); assertThat(topN.limit().fold(FoldContext.small()), equalTo(5)); assertThat(orderNames(topN), contains("salary")); - var mvExp = as(topN.child(), MvExpand.class); - assertThat(mvExp.limit(), equalTo(5)); + var limit5Before = asLimit(topN.child(), 5, true); + var mvExp = as(limit5Before.child(), MvExpand.class); var eval = as(mvExp.child(), Eval.class); - var limit5 = as(eval.child(), Limit.class); + var limit5 = asLimit(eval.child(), 5, false); var filter = as(limit5.child(), Filter.class); - mvExp = as(filter.child(), MvExpand.class); - assertThat(mvExp.limit(), equalTo(10)); + var limit10Before = asLimit(filter.child(), 10, true); + mvExp = as(limit10Before.child(), MvExpand.class); topN = as(mvExp.child(), TopN.class); assertThat(topN.limit().fold(FoldContext.small()), equalTo(10)); filter = as(topN.child(), Filter.class); as(filter.child(), EsRelation.class); } + /** + * Expected + * EsqlProject[[emp_no{f}#24, first_name{f}#25, languages{f}#27, lll{r}#11, salary{f}#29, language_name{f}#38]] + * \_TopN[[Order[salary{f}#29,DESC,FIRST]],5[INTEGER]] + * \_Limit[5[INTEGER],true] + * \_Join[LEFT,[language_code{r}#14],[language_code{r}#14],[language_code{f}#37]] + * |_Project[[_meta_field{f}#30, emp_no{f}#24, first_name{f}#25, gender{f}#26, hire_date{f}#31, job{f}#32, job.raw{f}#33, l + * anguages{f}#27, last_name{f}#28, long_noidx{f}#34, salary{f}#29, language_name{f}#36, lll{r}#11, salary{f}#29 AS language_code]] + * | \_Eval[[languages{f}#27 + 5[INTEGER] AS lll]] + * | \_Limit[5[INTEGER],false] + * | \_Filter[languages{f}#27 > 1[INTEGER]] + * | \_Limit[10[INTEGER],true] + * | \_Join[LEFT,[language_code{r}#6],[language_code{r}#6],[language_code{f}#35]] + * | |_Project[[_meta_field{f}#30, emp_no{f}#24, first_name{f}#25, gender{f}#26, hire_date{f}#31, job{f}#32, + * | | | job.raw{f}#33, languages{f}#27, last_name{f}#28, long_noidx{f}#34, salary{f}#29, + * | | | languages{f}#27 AS language_code]] + * | | \_TopN[[Order[emp_no{f}#24,DESC,FIRST]],10[INTEGER]] + * | | \_Filter[emp_no{f}#24 ≤ 10006[INTEGER]] + * | | \_EsRelation[test][_meta_field{f}#30, emp_no{f}#24, first_name{f}#25, ..] + * | \_EsRelation[languages_lookup][LOOKUP][language_code{f}#35, language_name{f}#36] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#37, language_name{f}#38] + */ + public void testMultipleLookupJoinWithSortAndLimit() { + LogicalPlan plan = optimizedPlan(""" + from test + | where emp_no <= 10006 + | sort emp_no desc + | eval language_code = languages + | lookup join languages_lookup on language_code + | limit 10 + | where languages > 1 + | eval lll = languages + 5 + | eval language_code = salary::integer + | lookup join languages_lookup on language_code + | limit 5 + | sort first_name + | keep emp_no, first_name, languages, lll, salary, language_name + | sort salary desc + """); + + var keep = as(plan, EsqlProject.class); + var topN = as(keep.child(), TopN.class); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(5)); + assertThat(orderNames(topN), contains("salary")); + var limit5Before = asLimit(topN.child(), 5, true); + var join = as(limit5Before.child(), Join.class); + var project = as(join.left(), Project.class); + var eval = as(project.child(), Eval.class); + var limit5 = asLimit(eval.child(), 5, false); + var filter = as(limit5.child(), Filter.class); + var limit10Before = asLimit(filter.child(), 10, true); + join = as(limit10Before.child(), Join.class); + project = as(join.left(), Project.class); + topN = as(project.child(), TopN.class); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(10)); + assertThat(orderNames(topN), contains("emp_no")); + filter = as(topN.child(), Filter.class); + as(filter.child(), EsRelation.class); + } + /** * Expected * EsqlProject[[emp_no{f}#350, first_name{f}#351, salary{f}#352]] @@ -2038,12 +2162,13 @@ public void testDontPushDownLimitPastAggregate_AndMvExpand() { * TODO: Push down the filter correctly https://github.com/elastic/elasticsearch/issues/115311 * * Expected - * Limit[5[INTEGER]] - * \_Filter[ISNOTNULL(first_name{r}#22)] - * \_Aggregate[STANDARD,[first_name{r}#22],[MAX(salary{f}#17,true[BOOLEAN]) AS max_s, first_name{r}#22]] - * \_MvExpand[first_name{f}#13,first_name{r}#22,50] - * \_Limit[50[INTEGER]] - * \_EsRelation[test][_meta_field{f}#18, emp_no{f}#12, first_name{f}#13, ..] + * Limit[5[INTEGER],false] + * \_Filter[ISNOTNULL(first_name{r}#23)] + * \_Aggregate[STANDARD,[first_name{r}#23],[MAX(salary{f}#17,true[BOOLEAN]) AS max_s, first_name{r}#23]] + * \_Limit[50[INTEGER],true] + * \_MvExpand[first_name{f}#13,first_name{r}#23] + * \_Limit[50[INTEGER],false] + * \_EsRelation[test][_meta_field{f}#18, emp_no{f}#12, first_name{f}#13, ..] */ public void testPushDown_TheRightLimit_PastMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -2055,14 +2180,48 @@ public void testPushDown_TheRightLimit_PastMvExpand() { | where first_name is not null | limit 5"""); - var limit = as(plan, Limit.class); - assertThat(limit.limit().fold(FoldContext.small()), equalTo(5)); + var limit = asLimit(plan, 5, false); var filter = as(limit.child(), Filter.class); var agg = as(filter.child(), Aggregate.class); - var mvExp = as(agg.child(), MvExpand.class); - assertThat(mvExp.limit(), equalTo(50)); - limit = as(mvExp.child(), Limit.class); - assertThat(limit.limit().fold(FoldContext.small()), equalTo(50)); + var limit50Before = asLimit(agg.child(), 50, true); + var mvExp = as(limit50Before.child(), MvExpand.class); + limit = asLimit(mvExp.child(), 50, false); + as(limit.child(), EsRelation.class); + } + + /** + * TODO: Push down the filter correctly https://github.com/elastic/elasticsearch/issues/115311 + * + * Expected + * Limit[5[INTEGER],false] + * \_Filter[ISNOTNULL(first_name{f}#15)] + * \_Aggregate[STANDARD,[first_name{f}#15],[MAX(salary{f}#19,true[BOOLEAN]) AS max_s, first_name{f}#15]] + * \_Limit[50[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#25]] + * |_EsqlProject[[_meta_field{f}#20, emp_no{f}#14, first_name{f}#15, gender{f}#16, hire_date{f}#21, job{f}#22, job.raw{f}#23, l + * anguages{f}#17 AS language_code, last_name{f}#18, long_noidx{f}#24, salary{f}#19]] + * | \_Limit[50[INTEGER],false] + * | \_EsRelation[test][_meta_field{f}#20, emp_no{f}#14, first_name{f}#15, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#25] + */ + public void testPushDown_TheRightLimit_PastLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + from test + | rename languages as language_code + | lookup join languages_lookup on language_code + | limit 50 + | keep emp_no, first_name, salary + | stats max_s = max(salary) by first_name + | where first_name is not null + | limit 5"""); + + var limit = asLimit(plan, 5, false); + var filter = as(limit.child(), Filter.class); + var agg = as(filter.child(), Aggregate.class); + var limit50Before = asLimit(agg.child(), 50, true); + var join = as(limit50Before.child(), Join.class); + var project = as(join.left(), Project.class); + limit = asLimit(project.child(), 50, false); as(limit.child(), EsRelation.class); } @@ -2131,10 +2290,11 @@ public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedField_ResultT /** * Expected * - * MvExpand[first_name{f}#7,first_name{r}#16,10] - * \_TopN[[Order[emp_no{f}#6,DESC,FIRST]],10[INTEGER]] - * \_Filter[emp_no{f}#6 ≤ 10006[INTEGER]] - * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] + * Limit[10[INTEGER],true] + * \_MvExpand[first_name{f}#7,first_name{r}#17] + * \_TopN[[Order[emp_no{f}#6,DESC,FIRST]],10[INTEGER]] + * \_Filter[emp_no{f}#6 ≤ 10006[INTEGER]] + * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] */ public void testFilterWithSortBeforeMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -2144,8 +2304,8 @@ public void testFilterWithSortBeforeMvExpand() { | mv_expand first_name | limit 10"""); - var mvExp = as(plan, MvExpand.class); - assertThat(mvExp.limit(), equalTo(10)); + var limit = asLimit(plan, 10, true); + var mvExp = as(limit.child(), MvExpand.class); var topN = as(mvExp.child(), TopN.class); assertThat(topN.limit().fold(FoldContext.small()), equalTo(10)); assertThat(orderNames(topN), contains("emp_no")); @@ -2153,6 +2313,36 @@ public void testFilterWithSortBeforeMvExpand() { as(filter.child(), EsRelation.class); } + /** + * Expected + * Limit[10[INTEGER],true] + * \_Join[LEFT,[language_code{r}#6],[language_code{r}#6],[language_code{f}#19]] + * |_EsqlProject[[_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, gender{f}#10, hire_date{f}#15, job{f}#16, job.raw{f}#17, lan + * guages{f}#11 AS language_code, last_name{f}#12, long_noidx{f}#18, salary{f}#13]] + * | \_TopN[[Order[emp_no{f}#8,DESC,FIRST]],10[INTEGER]] + * | \_Filter[emp_no{f}#8 ≤ 10006[INTEGER]] + * | \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#19, language_name{f}#20] + */ + public void testFilterWithSortBeforeLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + from test + | where emp_no <= 10006 + | sort emp_no desc + | rename languages as language_code + | lookup join languages_lookup on language_code + | limit 10"""); + + var limit = asLimit(plan, 10, true); + var join = as(limit.child(), Join.class); + var project = as(join.left(), Project.class); + var topN = as(project.child(), TopN.class); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(10)); + assertThat(orderNames(topN), contains("emp_no")); + var filter = as(topN.child(), Filter.class); + as(filter.child(), EsRelation.class); + } + /** * Expected * @@ -2184,12 +2374,17 @@ public void testMultiMvExpand_SortDownBelow() { /** * Expected * - * MvExpand[c{r}#7,c{r}#16,10000] - * \_EsqlProject[[c{r}#7, a{r}#3]] - * \_TopN[[Order[a{r}#3,ASC,FIRST]],7300[INTEGER]] - * \_MvExpand[b{r}#5,b{r}#15,7300] - * \_Limit[7300[INTEGER]] - * \_Row[[null[NULL] AS a, 123[INTEGER] AS b, 234[INTEGER] AS c]] + * Limit[10000[INTEGER],true] + * \_MvExpand[c{r}#7,c{r}#16] + * \_EsqlProject[[c{r}#7, a{r}#3]] + * \_TopN[[Order[a{r}#3,ASC,FIRST]],7300[INTEGER]] + * \_Limit[7300[INTEGER],true] + * \_MvExpand[b{r}#5,b{r}#15] + * \_Limit[7300[INTEGER],false] + * \_LocalRelation[[a{r}#3, b{r}#5, c{r}#7],[ConstantNullBlock[positions=1], + * IntVectorBlock[vector=ConstantIntVector[positions=1, value=123]], + * IntVectorBlock[vector=ConstantIntVector[positions=1, value=234]]]] + * */ public void testLimitThenSortBeforeMvExpand() { LogicalPlan plan = optimizedPlan(""" @@ -2200,15 +2395,53 @@ public void testLimitThenSortBeforeMvExpand() { | sort a NULLS FIRST | mv_expand c"""); - var mvExpand = as(plan, MvExpand.class); - assertThat(mvExpand.limit(), equalTo(10000)); + var limit10kBefore = asLimit(plan, 10000, true); + var mvExpand = as(limit10kBefore.child(), MvExpand.class); var project = as(mvExpand.child(), EsqlProject.class); var topN = as(project.child(), TopN.class); assertThat(topN.limit().fold(FoldContext.small()), equalTo(7300)); assertThat(orderNames(topN), contains("a")); - mvExpand = as(topN.child(), MvExpand.class); - var limit = as(mvExpand.child(), Limit.class); - assertThat(limit.limit().fold(FoldContext.small()), equalTo(7300)); + var limit7300Before = asLimit(topN.child(), 7300, true); + mvExpand = as(limit7300Before.child(), MvExpand.class); + var limit = asLimit(mvExpand.child(), 7300, false); + as(limit.child(), LocalRelation.class); + } + + /** + * Expects + * Limit[10000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#14],[language_code{r}#14],[language_code{f}#18]] + * |_EsqlProject[[c{r}#7 AS language_code, a{r}#3]] + * | \_TopN[[Order[a{r}#3,ASC,FIRST]],7300[INTEGER]] + * | \_Limit[7300[INTEGER],true] + * | \_Join[LEFT,[language_code{r}#5],[language_code{r}#5],[language_code{f}#16]] + * | |_Limit[7300[INTEGER],false] + * | | \_LocalRelation[[a{r}#3, language_code{r}#5, c{r}#7],[ConstantNullBlock[positions=1], + * IntVectorBlock[vector=ConstantIntVector[positions=1, value=123]], + * IntVectorBlock[vector=ConstantIntVector[positions=1, value=234]]]] + * | \_EsRelation[languages_lookup][LOOKUP][language_code{f}#16] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] + */ + public void testLimitThenSortBeforeLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + row a = null, language_code = 123, c = 234 + | lookup join languages_lookup on language_code + | limit 7300 + | keep c, a + | sort a NULLS FIRST + | rename c as language_code + | lookup join languages_lookup on language_code + """); + + var limit10kBefore = asLimit(plan, 10000, true); + var join = as(limit10kBefore.child(), Join.class); + var project = as(join.left(), EsqlProject.class); + var topN = as(project.child(), TopN.class); + assertThat(topN.limit().fold(FoldContext.small()), equalTo(7300)); + assertThat(orderNames(topN), contains("a")); + var limit7300Before = asLimit(topN.child(), 7300, true); + join = as(limit7300Before.child(), Join.class); + var limit = asLimit(join.left(), 7300, false); as(limit.child(), LocalRelation.class); } @@ -2341,27 +2574,51 @@ public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedFieldAlias() /** * Expected: - * MvExpand[a{r}#1402,a{r}#1406,1000] - * \_TopN[[Order[a{r}#1402,ASC,LAST]],1000[INTEGER]] - * \_Row[[1[INTEGER] AS a]] + * Limit[1000[INTEGER],true] + * \_MvExpand[a{r}#3,a{r}#7] + * \_TopN[[Order[a{r}#3,ASC,LAST]],1000[INTEGER]] + * \_LocalRelation[[a{r}#3],[IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]]]] */ public void testSortMvExpand() { LogicalPlan plan = optimizedPlan(""" row a = 1 | sort a - | mv_expand a"""); + | mv_expand a + """); - var expand = as(plan, MvExpand.class); - assertThat(expand.limit(), equalTo(1000)); + var limit = asLimit(plan, 1000, true); + var expand = as(limit.child(), MvExpand.class); var topN = as(expand.child(), TopN.class); var row = as(topN.child(), LocalRelation.class); } /** * Expected: - * MvExpand[emp_no{f}#5,emp_no{r}#15,20] - * \_TopN[[Order[emp_no{f}#5,ASC,LAST]],20[INTEGER]] - * \_EsRelation[test][_meta_field{f}#11, emp_no{f}#5, first_name{f}#6, ge..] + * Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#3],[language_code{r}#3],[language_code{f}#6]] + * |_TopN[[Order[language_code{r}#3,ASC,LAST]],1000[INTEGER]] + * | \_LocalRelation[[language_code{r}#3],[IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]]]] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#6, language_name{f}#7] + */ + public void testSortLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + row language_code = 1 + | sort language_code + | lookup join languages_lookup on language_code + """); + + var limit = asLimit(plan, 1000, true); + var join = as(limit.child(), Join.class); + var topN = as(join.left(), TopN.class); + var row = as(topN.child(), LocalRelation.class); + } + + /** + * Expected: + * Limit[20[INTEGER],true] + * \_MvExpand[emp_no{f}#5,emp_no{r}#16] + * \_TopN[[Order[emp_no{f}#5,ASC,LAST]],20[INTEGER]] + * \_EsRelation[test][_meta_field{f}#11, emp_no{f}#5, first_name{f}#6, ge..] */ public void testSortMvExpandLimit() { LogicalPlan plan = optimizedPlan(""" @@ -2370,8 +2627,8 @@ public void testSortMvExpandLimit() { | mv_expand emp_no | limit 20"""); - var expand = as(plan, MvExpand.class); - assertThat(expand.limit(), equalTo(20)); + var limit = asLimit(plan, 20, true); + var expand = as(limit.child(), MvExpand.class); var topN = as(expand.child(), TopN.class); assertThat(topN.limit().fold(FoldContext.small()), is(20)); var row = as(topN.child(), EsRelation.class); @@ -2379,9 +2636,37 @@ public void testSortMvExpandLimit() { /** * Expected: - * MvExpand[b{r}#5,b{r}#9,1000] - * \_Limit[1000[INTEGER]] - * \_Row[[1[INTEGER] AS a, -15[INTEGER] AS b]] + * Limit[20[INTEGER],true] + * \_Join[LEFT,[language_code{r}#5],[language_code{r}#5],[language_code{f}#18]] + * |_EsqlProject[[_meta_field{f}#13, emp_no{f}#7 AS language_code, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, jo + * b.raw{f}#16, languages{f}#10, last_name{f}#11, long_noidx{f}#17, salary{f}#12]] + * | \_TopN[[Order[emp_no{f}#7,ASC,LAST]],20[INTEGER]] + * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] + */ + public void testSortLookupJoinLimit() { + LogicalPlan plan = optimizedPlan(""" + from test + | sort emp_no + | rename emp_no as language_code + | lookup join languages_lookup on language_code + | limit 20"""); + + var limit = asLimit(plan, 20, true); + var join = as(limit.child(), Join.class); + var project = as(join.left(), Project.class); + var topN = as(project.child(), TopN.class); + assertThat(topN.limit().fold(FoldContext.small()), is(20)); + var row = as(topN.child(), EsRelation.class); + } + + /** + * Expected: + * Limit[1000[INTEGER],true] + * \_MvExpand[b{r}#5,b{r}#9] + * \_Limit[1000[INTEGER],false] + * \_LocalRelation[[a{r}#3, b{r}#5],[IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]], + * IntVectorBlock[vector=ConstantIntVector[positions=1, value=-15]]]] * * see https://github.com/elastic/elasticsearch/issues/102084 */ @@ -2389,15 +2674,90 @@ public void testWhereMvExpand() { LogicalPlan plan = optimizedPlan(""" row a = 1, b = -15 | where b < 3 - | mv_expand b"""); + | mv_expand b + """); + + var limit = asLimit(plan, 1000, true); + var expand = as(limit.child(), MvExpand.class); + var limit2 = asLimit(expand.child(), 1000, false); + var row = as(limit2.child(), LocalRelation.class); + } + + /** + * Expected: + * Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#5],[language_code{r}#5],[language_code{f}#8]] + * |_Limit[1000[INTEGER],false] + * | \_LocalRelation[[a{r}#3, language_code{r}#5],[IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]], IntVectorBlock[ve + * ctor=ConstantIntVector[positions=1, value=-15]]]] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#8, language_name{f}#9] + */ + public void testWhereLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + row a = 1, language_code = -15 + | where language_code < 3 + | lookup join languages_lookup on language_code + """); - var expand = as(plan, MvExpand.class); - assertThat(expand.limit(), equalTo(1000)); - var limit2 = as(expand.child(), Limit.class); - assertThat(limit2.limit().fold(FoldContext.small()), is(1000)); + var limit = asLimit(plan, 1000, true); + var join = as(limit.child(), Join.class); + var limit2 = asLimit(join.left(), 1000, false); var row = as(limit2.child(), LocalRelation.class); } + /** + * Expects + * TopN[[Order[language_code{r}#7,ASC,LAST]],1[INTEGER]] + * \_Limit[1[INTEGER],true] + * \_MvExpand[language_code{r}#3,language_code{r}#7] + * \_Limit[1[INTEGER],false] + * \_LocalRelation[[language_code{r}#3],[IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]]]] + * + * Notice that the `TopN` at the very top has limit 1, not 3! + */ + public void testDescendantLimitMvExpand() { + LogicalPlan plan = optimizedPlan(""" + ROW language_code = 1 + | MV_EXPAND language_code + | LIMIT 1 + | SORT language_code + | LIMIT 3 + """); + + var topn = as(plan, TopN.class); + var limitAfter = asLimit(topn.child(), 1, true); + var mvExpand = as(limitAfter.child(), MvExpand.class); + var limitBefore = asLimit(mvExpand.child(), 1, false); + var localRelation = as(limitBefore.child(), LocalRelation.class); + } + + /** + * Expects + * TopN[[Order[language_code{r}#3,ASC,LAST]],1[INTEGER]] + * \_Limit[1[INTEGER],true] + * \_Join[LEFT,[language_code{r}#3],[language_code{r}#3],[language_code{f}#6]] + * |_Limit[1[INTEGER],false] + * | \_LocalRelation[[language_code{r}#3],[IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]]]] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#6, language_name{f}#7] + * + * Notice that the `TopN` at the very top has limit 1, not 3! + */ + public void testDescendantLimitLookupJoin() { + LogicalPlan plan = optimizedPlan(""" + ROW language_code = 1 + | LOOKUP JOIN languages_lookup ON language_code + | LIMIT 1 + | SORT language_code + | LIMIT 3 + """); + + var topn = as(plan, TopN.class); + var limitAfter = asLimit(topn.child(), 1, true); + var join = as(limitAfter.child(), Join.class); + var limitBefore = asLimit(join.left(), 1, false); + var localRelation = as(limitBefore.child(), LocalRelation.class); + } + private static List orderNames(TopN topN) { return topN.order().stream().map(o -> as(o.child(), NamedExpression.class).name()).toList(); } @@ -4930,7 +5290,17 @@ public void testPlanSanityCheck() throws Exception { assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references [salary")); } - public void testPlanSanityCheckWithBinaryPlans() throws Exception { + /** + * Expects + * Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#17]] + * |_EsqlProject[[_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, gender{f}#8, hire_date{f}#13, job{f}#14, job.raw{f}#15, lang + * uages{f}#9 AS language_code, last_name{f}#10, long_noidx{f}#16, salary{f}#11]] + * | \_Limit[1000[INTEGER],false] + * | \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#17, language_name{f}#18] + */ + public void testPlanSanityCheckWithBinaryPlans() { assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); var plan = optimizedPlan(""" @@ -4939,7 +5309,8 @@ public void testPlanSanityCheckWithBinaryPlans() throws Exception { | LOOKUP JOIN languages_lookup ON language_code """); - var join = as(plan, Join.class); + var upperLimit = asLimit(plan, null, true); + var join = as(upperLimit.child(), Join.class); var joinWithInvalidLeftPlan = join.replaceChildren(join.right(), join.right()); IllegalStateException e = expectThrows(IllegalStateException.class, () -> logicalOptimizer.optimize(joinWithInvalidLeftPlan)); @@ -5995,15 +6366,15 @@ public void testLookupStats() { /** * Filter on join keys should be pushed down * Expects - * Project[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang - * uage_code{r}#4, last_name{f}#11, long_noidx{f}#17, salary{f}#12, language_name{f}#19]] - * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] - * |_EsqlProject[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang + * + * Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] + * |_EsqlProject[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang * uages{f}#10 AS language_code, last_name{f}#11, long_noidx{f}#17, salary{f}#12]] - * | \_Limit[1000[INTEGER]] - * | \_Filter[languages{f}#10 > 1[INTEGER]] - * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] - * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] + * | \_Limit[1000[INTEGER],false] + * | \_Filter[languages{f}#10 > 1[INTEGER]] + * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownFilterOnJoinKeyWithRename() { assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); @@ -6016,11 +6387,11 @@ public void testLookupJoinPushDownFilterOnJoinKeyWithRename() { """; var plan = optimizedPlan(query); - var join = as(plan, Join.class); + var upperLimit = asLimit(plan, 1000, true); + var join = as(upperLimit.child(), Join.class); assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); var project = as(join.left(), Project.class); - var limit = as(project.child(), Limit.class); - assertThat(limit.limit().fold(FoldContext.small()), equalTo(1000)); + var limit = asLimit(project.child(), 1000, false); var filter = as(limit.child(), Filter.class); // assert that the rename has been undone var op = as(filter.condition(), GreaterThan.class); @@ -6037,15 +6408,14 @@ public void testLookupJoinPushDownFilterOnJoinKeyWithRename() { /** * Filter on on left side fields (outside the join key) should be pushed down * Expects - * Project[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang - * uage_code{r}#4, last_name{f}#11, long_noidx{f}#17, salary{f}#12, language_name{f}#19]] - * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] - * |_EsqlProject[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang + * Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] + * |_EsqlProject[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang * uages{f}#10 AS language_code, last_name{f}#11, long_noidx{f}#17, salary{f}#12]] - * | \_Limit[1000[INTEGER]] - * | \_Filter[emp_no{f}#7 > 1[INTEGER]] - * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] - * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] + * | \_Limit[1000[INTEGER],false] + * | \_Filter[emp_no{f}#7 > 1[INTEGER]] + * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownFilterOnLeftSideField() { assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); @@ -6059,12 +6429,12 @@ public void testLookupJoinPushDownFilterOnLeftSideField() { var plan = optimizedPlan(query); - var join = as(plan, Join.class); + var upperLimit = asLimit(plan, 1000, true); + var join = as(upperLimit.child(), Join.class); assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); var project = as(join.left(), Project.class); - var limit = as(project.child(), Limit.class); - assertThat(limit.limit().fold(FoldContext.small()), equalTo(1000)); + var limit = asLimit(project.child(), 1000, false); var filter = as(limit.child(), Filter.class); var op = as(filter.condition(), GreaterThan.class); var field = as(op.left(), FieldAttribute.class); @@ -6226,14 +6596,16 @@ public void testLookupJoinPushDownDisabledForDisjunctionBetweenLeftAndRightField /** * When dropping lookup fields, the lookup relation shouldn't include them. * At least until we can implement InsertFieldExtract there. + * * Expects - * EsqlProject[[languages{f}#10]] - * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] - * |_Project[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang - * uages{f}#10, last_name{f}#11, long_noidx{f}#17, salary{f}#12, languages{f}#10 AS language_code]] - * | \_Limit[1000[INTEGER]] - * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] - * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18] + * EsqlProject[[languages{f}#21]] + * \_Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#29]] + * |_Project[[_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, gender{f}#20, hire_date{f}#25, job{f}#26, job.raw{f}#27, l + * anguages{f}#21, last_name{f}#22, long_noidx{f}#28, salary{f}#23, languages{f}#21 AS language_code]] + * | \_Limit[1000[INTEGER],false] + * | \_EsRelation[test][_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, ..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#29] */ public void testLookupJoinKeepNoLookupFields() { assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); @@ -6255,7 +6627,9 @@ public void testLookupJoinKeepNoLookupFields() { assertThat(project.projections().size(), equalTo(1)); assertThat(project.projections().get(0).name(), equalTo("languages")); - var join = as(project.child(), Join.class); + var limit = asLimit(project.child(), 1000, true); + + var join = as(limit.child(), Join.class); var joinRightRelation = as(join.right(), EsRelation.class); assertThat(joinRightRelation.output().size(), equalTo(1)); @@ -6266,13 +6640,15 @@ public void testLookupJoinKeepNoLookupFields() { * Ensure a JOIN shadowed by another JOIN doesn't request the shadowed fields. * * Expected - * Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#20]] - * |_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] - * | |_Eval[[languages{f}#10 AS language_code]] - * | | \_Limit[1000[INTEGER]] - * | | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] - * | \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18] - * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#20, language_name{f}#21] + * Limit[1000[INTEGER],true] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#20]] + * |_Limit[1000[INTEGER],true] + * | \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] + * | |_Eval[[languages{f}#10 AS language_code]] + * | | \_Limit[1000[INTEGER],false] + * | | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + * | \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#20, language_name{f}#21] */ public void testMultipleLookupShadowing() { assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()); @@ -6286,18 +6662,25 @@ public void testMultipleLookupShadowing() { var plan = optimizedPlan(query); - var finalJoin = as(plan, Join.class); + var limit1 = asLimit(plan, 1000, true); + + var finalJoin = as(limit1.child(), Join.class); var finalJoinRightRelation = as(finalJoin.right(), EsRelation.class); assertThat(finalJoinRightRelation.output().size(), equalTo(2)); assertThat(finalJoinRightRelation.output().get(0).name(), equalTo("language_code")); assertThat(finalJoinRightRelation.output().get(1).name(), equalTo("language_name")); - var initialJoin = as(finalJoin.left(), Join.class); + var limit2 = asLimit(finalJoin.left(), 1000, true); + + var initialJoin = as(limit2.child(), Join.class); var initialJoinRightRelation = as(initialJoin.right(), EsRelation.class); assertThat(initialJoinRightRelation.output().size(), equalTo(1)); assertThat(initialJoinRightRelation.output().get(0).name(), equalTo("language_code")); + + var eval = as(initialJoin.left(), Eval.class); + var limit3 = asLimit(eval.child(), 1000, false); } // diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index dcc549057b77a..af0a9c2f97961 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -1695,8 +1695,7 @@ public void testParamForIdentifier() { List.of(new Order(EMPTY, attribute("f.11..f.12.*"), Order.OrderDirection.ASC, Order.NullsPosition.LAST)) ), attribute("f.*.13.f.14*"), - attribute("f.*.13.f.14*"), - null + attribute("f.*.13.f.14*") ), statement( """ diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/AbstractNodeSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/AbstractNodeSerializationTests.java index e6faa9a253d76..998b895a4e005 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/AbstractNodeSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/AbstractNodeSerializationTests.java @@ -51,7 +51,7 @@ public static List randomFieldAttributes(int min, int max, boolean on } @Override - protected final T copyInstance(T instance, TransportVersion version) throws IOException { + protected T copyInstance(T instance, TransportVersion version) throws IOException { return copyInstance( instance, getNamedWriteableRegistry(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/LimitSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/LimitSerializationTests.java index 5d994eb2880ba..b1ffb9c5f8ba8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/LimitSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/LimitSerializationTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.plan.logical; +import org.elasticsearch.TransportVersion; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.FieldAttributeTests; @@ -19,23 +20,33 @@ protected Limit createTestInstance() { Source source = randomSource(); Expression limit = FieldAttributeTests.createFieldAttribute(0, false); LogicalPlan child = randomChild(0); - return new Limit(source, limit, child); + return new Limit(source, limit, child, randomBoolean()); } @Override protected Limit mutateInstance(Limit instance) throws IOException { Expression limit = instance.limit(); LogicalPlan child = instance.child(); - if (randomBoolean()) { - limit = randomValueOtherThan(limit, () -> FieldAttributeTests.createFieldAttribute(0, false)); - } else { - child = randomValueOtherThan(child, () -> randomChild(0)); + boolean duplicated = instance.duplicated(); + switch (randomIntBetween(0, 2)) { + case 0 -> limit = randomValueOtherThan(limit, () -> FieldAttributeTests.createFieldAttribute(0, false)); + case 1 -> child = randomValueOtherThan(child, () -> randomChild(0)); + case 2 -> duplicated = duplicated == false; + default -> throw new IllegalStateException("Should never reach here"); } - return new Limit(instance.source(), limit, child); + return new Limit(instance.source(), limit, child, duplicated); } @Override protected boolean alwaysEmptySource() { return true; } + + @Override + protected Limit copyInstance(Limit instance, TransportVersion version) throws IOException { + // Limit#duplicated() is ALWAYS false when being serialized and we assert that in Limit#writeTo(). + // So, we need to manually simulate this situation. + Limit deserializedCopy = super.copyInstance(instance.withDuplicated(false), version); + return deserializedCopy.withDuplicated(instance.duplicated()); + } } From 31597b3897c907643f15ebdacf6435a003fc6478 Mon Sep 17 00:00:00 2001 From: Stef Nestor <26751266+stefnestor@users.noreply.github.com> Date: Wed, 29 Jan 2025 03:14:36 -0700 Subject: [PATCH 185/383] (Doc+) System Index definition (#120327) --- docs/reference/api-conventions.asciidoc | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index 28933eb75050d..545b50df009d3 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -254,6 +254,16 @@ as they contain data essential to the operation of the system. IMPORTANT: Direct access to system indices is deprecated and will no longer be allowed in a future major version. +To view system indices within cluster: + +[source,console] +-------------------------------------------------- +GET _cluster/state/metadata?filter_path=metadata.indices.*.system +-------------------------------------------------- + +WARNING: When overwriting current cluster state, system indices should be restored +as part of their {ref}/snapshot-restore.html#feature-state[feature state]. + [discrete] [[api-conventions-parameters]] === Parameters From d2814d5b09051b085e44c8ac7ddcc125c39b81b9 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Wed, 29 Jan 2025 11:19:45 +0100 Subject: [PATCH 186/383] Fix ChangeDetectorTests::testMultipleChanges (#121137) --- .../xpack/ml/aggs/changepoint/ChangeDetectorTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangeDetectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangeDetectorTests.java index 36076bbb0ec25..9a0338b90156b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangeDetectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangeDetectorTests.java @@ -192,7 +192,7 @@ public void testMultipleChanges() { ChangeType type = new ChangeDetector(bucketValues).detect(0.05); tp += type instanceof ChangeType.TrendChange ? 1 : 0; } - assertThat(tp, greaterThan(90)); + assertThat(tp, greaterThan(80)); } public void testProblemDistributionChange() { From f7bf7275702a93adbfc6cb64424e0098ab67db6a Mon Sep 17 00:00:00 2001 From: Pete Gillin Date: Wed, 29 Jan 2025 10:49:15 +0000 Subject: [PATCH 187/383] Remove much remaining frozen indices code (#120539) This removes the transport action and the index setting. The deprecation check has to go because the setting it uses to detect frozen indices has gone. We are issueing a critical deprecation warning in 8.last telling users to unfreeze them. A lot of integration tests under `x-pack/plugin/sql` also have to go, because they rely on being able to freeze indices. ES-9736 #comment Removed the transport action and the index setting and so on in https://github.com/elastic/elasticsearch/pull/120539 --- .../core/src/main/java/module-info.java | 1 - .../index/engine/frozen/FrozenEngine.java | 8 +- .../core/frozen/action/FreezeIndexAction.java | 20 -- .../deprecation/IndexDeprecationChecker.java | 20 -- .../IndexDeprecationCheckerTests.java | 25 -- .../xpack/frozen/FrozenIndices.java | 11 - .../action/TransportFreezeIndexAction.java | 233 ------------------ .../xpack/security/operator/Constants.java | 1 - .../multi-cluster-with-security/build.gradle | 1 - .../SqlTestClusterWithRemote.java | 2 - .../sql/qa/server/multi-node/build.gradle | 1 - .../sql/qa/multi_node/SqlTestCluster.java | 1 - .../sql/qa/server/security/build.gradle | 1 - .../sql/qa/server/single-node/build.gradle | 1 - .../xpack/sql/qa/single_node/CliErrorsIT.java | 2 +- .../sql/qa/single_node/CliExplainIT.java | 2 +- .../sql/qa/single_node/CliFetchSizeIT.java | 2 +- .../sql/qa/single_node/CliLenientIT.java | 2 +- .../qa/single_node/CliPartialResultsIT.java | 2 +- .../xpack/sql/qa/single_node/CliSelectIT.java | 2 +- .../xpack/sql/qa/single_node/CliShowIT.java | 2 +- .../ConsistentFunctionArgHandlingIT.java | 2 +- .../qa/single_node/CustomDateFormatIT.java | 2 +- .../sql/qa/single_node/FieldExtractorIT.java | 2 +- .../sql/qa/single_node/GeoJdbcCsvSpecIT.java | 4 +- .../sql/qa/single_node/GeoJdbcSqlSpecIT.java | 4 +- .../sql/qa/single_node/JdbcCsvSpecIT.java | 4 +- .../single_node/JdbcDatabaseMetaDataIT.java | 2 +- .../sql/qa/single_node/JdbcDocCsvSpecIT.java | 4 +- .../single_node/JdbcDocFrozenCsvSpecIT.java | 80 ------ .../qa/single_node/JdbcFrozenCsvSpecIT.java | 51 ---- .../qa/single_node/JdbcShardFailureIT.java | 2 +- .../sql/qa/single_node/JdbcShowTablesIT.java | 2 +- .../sql/qa/single_node/JdbcSqlSpecIT.java | 4 +- .../qa/single_node/RestSqlDeprecationIT.java | 2 +- .../xpack/sql/qa/single_node/RestSqlIT.java | 2 +- .../qa/single_node/RestSqlPaginationIT.java | 2 +- .../sql/qa/single_node/RestSqlUsageIT.java | 2 +- .../sql/qa/single_node/SqlProtocolIT.java | 2 +- .../sql/qa/single_node/SqlTestCluster.java | 6 +- .../sql/qa/single_node/SysColumnsIT.java | 2 +- .../xpack/sql/qa/jdbc/DataLoader.java | 20 +- .../qa/jdbc/SpecBaseIntegrationTestCase.java | 2 +- .../multi-cluster-command.csv-spec | 15 -- .../src/main/resources/slow/frozen.csv-spec | 65 ----- x-pack/qa/freeze-plugin/build.gradle | 24 -- .../plugin/freeze/FreezeIndexPlugin.java | 93 ------- 47 files changed, 38 insertions(+), 702 deletions(-) delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/action/FreezeIndexAction.java delete mode 100644 x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java delete mode 100644 x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocFrozenCsvSpecIT.java delete mode 100644 x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcFrozenCsvSpecIT.java delete mode 100644 x-pack/plugin/sql/qa/server/src/main/resources/slow/frozen.csv-spec delete mode 100644 x-pack/qa/freeze-plugin/build.gradle delete mode 100644 x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java diff --git a/x-pack/plugin/core/src/main/java/module-info.java b/x-pack/plugin/core/src/main/java/module-info.java index 55b3428907c40..ca5f8406fc97c 100644 --- a/x-pack/plugin/core/src/main/java/module-info.java +++ b/x-pack/plugin/core/src/main/java/module-info.java @@ -66,7 +66,6 @@ exports org.elasticsearch.xpack.core.esql; exports org.elasticsearch.xpack.core.esql.action; exports org.elasticsearch.xpack.core.esql.action.internal; // TODO: qualify to esql when modularized - exports org.elasticsearch.xpack.core.frozen.action; exports org.elasticsearch.xpack.core.frozen; exports org.elasticsearch.xpack.core.graph.action; exports org.elasticsearch.xpack.core.graph; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/FrozenEngine.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/FrozenEngine.java index 3b242ca94ac61..998603d8f4bc8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/FrozenEngine.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/FrozenEngine.java @@ -17,7 +17,6 @@ import org.apache.lucene.store.Directory; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; -import org.elasticsearch.common.settings.Setting; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.index.engine.Engine; @@ -55,12 +54,7 @@ * stats in order to obtain the number of reopens. */ public final class FrozenEngine extends ReadOnlyEngine { - public static final Setting INDEX_FROZEN = Setting.boolSetting( - "index.frozen", - false, - Setting.Property.IndexScope, - Setting.Property.PrivateIndex - ); + private final SegmentsStats segmentsStats; private final DocsStats docsStats; private volatile ElasticsearchDirectoryReader lastOpenedReader; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/action/FreezeIndexAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/action/FreezeIndexAction.java deleted file mode 100644 index d4d76200c25be..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/frozen/action/FreezeIndexAction.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.core.frozen.action; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.protocol.xpack.frozen.FreezeResponse; - -public class FreezeIndexAction extends ActionType { - - public static final FreezeIndexAction INSTANCE = new FreezeIndexAction(); - public static final String NAME = "indices:admin/freeze"; - - private FreezeIndexAction() { - super(NAME); - } -} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java index 46e634e6d3899..778e4d176ca0f 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecker.java @@ -15,7 +15,6 @@ import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.engine.frozen.FrozenEngine; import org.elasticsearch.xpack.core.deprecation.DeprecatedIndexPredicate; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; @@ -71,7 +70,6 @@ private List> indexSet IndexDeprecationChecker::translogRetentionSettingCheck, IndexDeprecationChecker::checkIndexDataPath, IndexDeprecationChecker::storeTypeSettingCheck, - IndexDeprecationChecker::frozenIndexSettingCheck, IndexDeprecationChecker::deprecatedCamelCasePattern, IndexDeprecationChecker::legacyRoutingSettingCheck ); @@ -189,24 +187,6 @@ private static DeprecationIssue storeTypeSettingCheck(IndexMetadata indexMetadat return null; } - private static DeprecationIssue frozenIndexSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { - Boolean isIndexFrozen = FrozenEngine.INDEX_FROZEN.get(indexMetadata.getSettings()); - if (Boolean.TRUE.equals(isIndexFrozen)) { - String indexName = indexMetadata.getIndex().getName(); - return new DeprecationIssue( - DeprecationIssue.Level.WARNING, - "index [" - + indexName - + "] is a frozen index. The frozen indices feature is deprecated and will be removed in a future version", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/frozen-indices.html", - "Frozen indices no longer offer any advantages. Consider cold or frozen tiers in place of frozen indices.", - false, - null - ); - } - return null; - } - private static DeprecationIssue legacyRoutingSettingCheck(IndexMetadata indexMetadata, ClusterState clusterState) { List deprecatedSettings = LegacyTiersDetection.getDeprecatedFilteredAllocationSettings(indexMetadata.getSettings()); if (deprecatedSettings.isEmpty()) { diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java index edbe7562a1560..dc73ba0c5bcb3 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.engine.frozen.FrozenEngine; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.snapshots.SearchableSnapshotsSettings; import org.elasticsearch.test.ESTestCase; @@ -328,30 +327,6 @@ public void testSimpleFSSetting() { ); } - public void testFrozenIndex() { - Settings.Builder settings = settings(IndexVersion.current()); - settings.put(FrozenEngine.INDEX_FROZEN.getKey(), true); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); - Map> issuesByIndex = checker.check( - state, - new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) - ); - assertThat( - issuesByIndex.get("test"), - contains( - new DeprecationIssue( - DeprecationIssue.Level.WARNING, - "index [test] is a frozen index. The frozen indices feature is deprecated and will be removed in a future version", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/frozen-indices.html", - "Frozen indices no longer offer any advantages. Consider cold or frozen tiers in place of frozen indices.", - false, - null - ) - ) - ); - } - public void testCamelCaseDeprecation() { String simpleMapping = "{\n\"_doc\": {" + "\"properties\" : {\n" diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java index 5e534295c06c5..860a4a1915ea1 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java @@ -8,30 +8,19 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.index.engine.frozen.FrozenEngine; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; -import org.elasticsearch.xpack.core.frozen.action.FreezeIndexAction; -import org.elasticsearch.xpack.frozen.action.TransportFreezeIndexAction; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; public class FrozenIndices extends Plugin implements ActionPlugin { - @Override - public List> getSettings() { - return Arrays.asList(FrozenEngine.INDEX_FROZEN); - } - @Override public List> getActions() { List> actions = new ArrayList<>(); actions.add(new ActionHandler<>(XPackUsageFeatureAction.FROZEN_INDICES, FrozenIndicesUsageTransportAction.class)); - actions.add(new ActionHandler<>(FreezeIndexAction.INSTANCE, TransportFreezeIndexAction.class)); return actions; } } diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java deleted file mode 100644 index 248902b4b7a9e..0000000000000 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.frozen.action; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.close.CloseIndexClusterStateUpdateRequest; -import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; -import org.elasticsearch.action.admin.indices.open.OpenIndexClusterStateUpdateRequest; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.ActiveShardCount; -import org.elasticsearch.action.support.DestructiveOperations; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateUpdateTask; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.block.ClusterBlocks; -import org.elasticsearch.cluster.metadata.IndexAbstraction; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.MetadataIndexStateService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Priority; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.engine.frozen.FrozenEngine; -import org.elasticsearch.injection.guice.Inject; -import org.elasticsearch.protocol.xpack.frozen.FreezeRequest; -import org.elasticsearch.protocol.xpack.frozen.FreezeResponse; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.frozen.action.FreezeIndexAction; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.SortedMap; - -public final class TransportFreezeIndexAction extends TransportMasterNodeAction { - - private static final Logger logger = LogManager.getLogger(TransportFreezeIndexAction.class); - - private final DestructiveOperations destructiveOperations; - private final IndexNameExpressionResolver indexNameExpressionResolver; - private final MetadataIndexStateService indexStateService; - - @Inject - public TransportFreezeIndexAction( - MetadataIndexStateService indexStateService, - TransportService transportService, - ClusterService clusterService, - ThreadPool threadPool, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - DestructiveOperations destructiveOperations - ) { - super( - FreezeIndexAction.NAME, - transportService, - clusterService, - threadPool, - actionFilters, - FreezeRequest::new, - FreezeResponse::new, - EsExecutors.DIRECT_EXECUTOR_SERVICE - ); - this.indexStateService = indexStateService; - this.indexNameExpressionResolver = indexNameExpressionResolver; - this.destructiveOperations = destructiveOperations; - } - - @Override - protected void doExecute(Task task, FreezeRequest request, ActionListener listener) { - destructiveOperations.failDestructive(request.indices()); - super.doExecute(task, request, listener); - } - - private Index[] resolveIndices(FreezeRequest request, ClusterState state) { - List indices = new ArrayList<>(); - for (Index index : indexNameExpressionResolver.concreteIndices(state, request)) { - IndexMetadata metadata = state.metadata().index(index); - Settings settings = metadata.getSettings(); - // only unfreeze if we are frozen and only freeze if we are not frozen already. - // this prevents all indices that are already frozen that match a pattern to - // go through the cycles again. - if ((request.freeze() && FrozenEngine.INDEX_FROZEN.get(settings) == false) - || (request.freeze() == false && FrozenEngine.INDEX_FROZEN.get(settings))) { - indices.add(index); - } - } - if (indices.isEmpty() && request.indicesOptions().allowNoIndices() == false) { - throw new ResourceNotFoundException("no index found to " + (request.freeze() ? "freeze" : "unfreeze")); - } - return indices.toArray(Index.EMPTY_ARRAY); - } - - @Override - protected void masterOperation(Task task, FreezeRequest request, ClusterState state, ActionListener listener) { - final Index[] concreteIndices = resolveIndices(request, state); - if (concreteIndices.length == 0) { - listener.onResponse(new FreezeResponse(true, true)); - return; - } - - final CloseIndexClusterStateUpdateRequest closeRequest = new CloseIndexClusterStateUpdateRequest( - request.masterNodeTimeout(), - request.ackTimeout(), - task.getId(), - ActiveShardCount.DEFAULT, - concreteIndices - ); - - indexStateService.closeIndices(closeRequest, new ActionListener<>() { - @Override - public void onResponse(final CloseIndexResponse response) { - if (response.isAcknowledged()) { - toggleFrozenSettings(concreteIndices, request, listener); - } else { - // TODO improve FreezeResponse so that it also reports failures from the close index API - listener.onResponse(new FreezeResponse(false, false)); - } - } - - @Override - public void onFailure(final Exception t) { - logger.debug(() -> "failed to close indices [" + Arrays.toString(concreteIndices) + "]", t); - listener.onFailure(t); - } - }); - } - - private void toggleFrozenSettings( - final Index[] concreteIndices, - final FreezeRequest request, - final ActionListener listener - ) { - submitUnbatchedTask( - "toggle-frozen-settings", - new AckedClusterStateUpdateTask(Priority.URGENT, request, listener.delegateFailure((delegate, acknowledgedResponse) -> { - OpenIndexClusterStateUpdateRequest updateRequest = new OpenIndexClusterStateUpdateRequest( - request.masterNodeTimeout(), - request.ackTimeout(), - request.waitForActiveShards(), - concreteIndices - ); - indexStateService.openIndices( - updateRequest, - delegate.safeMap( - openIndexClusterStateUpdateResponse -> new FreezeResponse( - openIndexClusterStateUpdateResponse.isAcknowledged(), - openIndexClusterStateUpdateResponse.isShardsAcknowledged() - ) - ) - ); - })) { - @Override - public ClusterState execute(ClusterState currentState) { - List writeIndices = new ArrayList<>(); - SortedMap lookup = currentState.metadata().getIndicesLookup(); - for (Index index : concreteIndices) { - IndexAbstraction ia = lookup.get(index.getName()); - if (ia != null && ia.getParentDataStream() != null && ia.getParentDataStream().getWriteIndex().equals(index)) { - writeIndices.add(index.getName()); - } - } - if (writeIndices.size() > 0) { - throw new IllegalArgumentException( - "cannot freeze the following data stream write indices [" - + Strings.collectionToCommaDelimitedString(writeIndices) - + "]" - ); - } - - final Metadata.Builder builder = Metadata.builder(currentState.metadata()); - ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - for (Index index : concreteIndices) { - final IndexMetadata indexMetadata = currentState.metadata().getIndexSafe(index); - if (indexMetadata.getState() != IndexMetadata.State.CLOSE) { - throw new IllegalStateException("index [" + index.getName() + "] is not closed"); - } - final Settings.Builder settingsBuilder = Settings.builder().put(indexMetadata.getSettings()); - if (request.freeze()) { - settingsBuilder.put(FrozenEngine.INDEX_FROZEN.getKey(), true); - settingsBuilder.put(IndexSettings.INDEX_SEARCH_THROTTLED.getKey(), true); - settingsBuilder.put("index.blocks.write", true); - blocks.addIndexBlock(index.getName(), IndexMetadata.INDEX_WRITE_BLOCK); - } else { - settingsBuilder.remove(FrozenEngine.INDEX_FROZEN.getKey()); - settingsBuilder.remove(IndexSettings.INDEX_SEARCH_THROTTLED.getKey()); - if (indexMetadata.isSearchableSnapshot() == false) { - settingsBuilder.remove("index.blocks.write"); - blocks.removeIndexBlock(index.getName(), IndexMetadata.INDEX_WRITE_BLOCK); - } - } - builder.put( - IndexMetadata.builder(indexMetadata) - .settingsVersion(indexMetadata.getSettingsVersion() + 1) - .settings(settingsBuilder) - .build(), - true - ); - } - return ClusterState.builder(currentState).blocks(blocks).metadata(builder).build(); - } - } - ); - } - - @Override - protected ClusterBlockException checkBlock(FreezeRequest request, ClusterState state) { - return state.blocks() - .indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, indexNameExpressionResolver.concreteIndexNames(state, request)); - } - - @SuppressForbidden(reason = "legacy usage of unbatched task") // TODO add support for batching here - private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String source, ClusterStateUpdateTask task) { - clusterService.submitUnbatchedStateUpdateTask(source, task); - } -} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 5a2d24e1aa3ce..5d3b13b9d451a 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -518,7 +518,6 @@ public class Constants { "indices:admin/flush", "indices:admin/flush[s]", "indices:admin/forcemerge", - "indices:admin/freeze", "indices:admin/get", "indices:admin/analyze_disk_usage", "indices:admin/ilm/explain", diff --git a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle index 04f25f7175451..2fdbd4b903959 100644 --- a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle +++ b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle @@ -1,6 +1,5 @@ dependencies { javaRestTestImplementation project(path: xpackModule('ql:test-fixtures')) - clusterPlugins project(':x-pack:qa:freeze-plugin') } tasks.named("check").configure {dependsOn("javaRestTest") } // run these tests as part of the "check" task diff --git a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/SqlTestClusterWithRemote.java b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/SqlTestClusterWithRemote.java index 0608e61488e38..00901b014954c 100644 --- a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/SqlTestClusterWithRemote.java +++ b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_cluster_with_security/SqlTestClusterWithRemote.java @@ -43,7 +43,6 @@ private static ElasticsearchCluster clusterSettings(String remoteAddress) { .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.autoconfiguration.enabled", "false") .user(USER_NAME, PASSWORD) - .plugin("freeze-plugin") .build(); } @@ -58,7 +57,6 @@ private static ElasticsearchCluster remoteClusterSettings() { .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.autoconfiguration.enabled", "false") .user(USER_NAME, PASSWORD) - .plugin("freeze-plugin") .build(); } diff --git a/x-pack/plugin/sql/qa/server/multi-node/build.gradle b/x-pack/plugin/sql/qa/server/multi-node/build.gradle index e7a558ba68dd9..15332fa1ef113 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/build.gradle +++ b/x-pack/plugin/sql/qa/server/multi-node/build.gradle @@ -7,5 +7,4 @@ description = 'Run a subset of SQL tests against multiple nodes' */ dependencies { - clusterPlugins project(':x-pack:qa:freeze-plugin') } diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlTestCluster.java b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlTestCluster.java index 4f740f22393a7..9c2764397ecb6 100644 --- a/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlTestCluster.java +++ b/x-pack/plugin/sql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/multi_node/SqlTestCluster.java @@ -20,7 +20,6 @@ public static ElasticsearchCluster getCluster() { .setting("xpack.watcher.enabled", "false") .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") - .plugin("freeze-plugin") .build(); } } diff --git a/x-pack/plugin/sql/qa/server/security/build.gradle b/x-pack/plugin/sql/qa/server/security/build.gradle index e00989cbaa89c..2923dfb58780c 100644 --- a/x-pack/plugin/sql/qa/server/security/build.gradle +++ b/x-pack/plugin/sql/qa/server/security/build.gradle @@ -46,7 +46,6 @@ subprojects { user username: "user1", password: 'x-pack-test-password', role: "user1" user username: "user2", password: 'x-pack-test-password', role: "user2" user username: "manage_user", password: 'x-pack-test-password', role: "manage_user" - plugin ':x-pack:qa:freeze-plugin' } File testArtifactsDir = project.file("$buildDir/testArtifacts") diff --git a/x-pack/plugin/sql/qa/server/single-node/build.gradle b/x-pack/plugin/sql/qa/server/single-node/build.gradle index e4376edc683d1..08a196080e54e 100644 --- a/x-pack/plugin/sql/qa/server/single-node/build.gradle +++ b/x-pack/plugin/sql/qa/server/single-node/build.gradle @@ -2,5 +2,4 @@ apply plugin: 'elasticsearch.internal-test-artifact' dependencies { - clusterPlugins project(':x-pack:qa:freeze-plugin') } diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliErrorsIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliErrorsIT.java index 3a92dd675203f..02cdfc993c12c 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliErrorsIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliErrorsIT.java @@ -12,7 +12,7 @@ public class CliErrorsIT extends ErrorsTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java index ac4bffdb951d5..46e16418e0642 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java @@ -17,7 +17,7 @@ public class CliExplainIT extends CliIntegrationTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliFetchSizeIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliFetchSizeIT.java index 83daeccab0b0e..9811142d3611c 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliFetchSizeIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliFetchSizeIT.java @@ -12,7 +12,7 @@ public class CliFetchSizeIT extends FetchSizeTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java index ea7f793dd56ee..99895823adc7f 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java @@ -12,7 +12,7 @@ public class CliLenientIT extends LenientTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliPartialResultsIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliPartialResultsIT.java index 0d6f3fd530d22..8baa265780f40 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliPartialResultsIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliPartialResultsIT.java @@ -12,7 +12,7 @@ public class CliPartialResultsIT extends PartialResultsTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliSelectIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliSelectIT.java index bbc0c16393cb7..ecdd41a203ad3 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliSelectIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliSelectIT.java @@ -12,7 +12,7 @@ public class CliSelectIT extends SelectTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliShowIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliShowIT.java index 0c663be1e8706..2f9deffa48f08 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliShowIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CliShowIT.java @@ -12,7 +12,7 @@ public class CliShowIT extends ShowTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/ConsistentFunctionArgHandlingIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/ConsistentFunctionArgHandlingIT.java index 3db713b5ed4db..de502bf886ff3 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/ConsistentFunctionArgHandlingIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/ConsistentFunctionArgHandlingIT.java @@ -52,7 +52,7 @@ */ public class ConsistentFunctionArgHandlingIT extends JdbcIntegrationTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CustomDateFormatIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CustomDateFormatIT.java index fb312a75bcc9c..4a91372abe5d4 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CustomDateFormatIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/CustomDateFormatIT.java @@ -13,7 +13,7 @@ public class CustomDateFormatIT extends CustomDateFormatTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/FieldExtractorIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/FieldExtractorIT.java index daaa7e81154b4..ac967710e360c 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/FieldExtractorIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/FieldExtractorIT.java @@ -13,7 +13,7 @@ public class FieldExtractorIT extends FieldExtractorTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java index 3763169977873..c83f1597e1884 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java @@ -23,11 +23,11 @@ public class GeoJdbcCsvSpecIT extends GeoCsvSpecTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected void loadDataset(RestClient client) throws Exception { - DataLoader.loadDatasetIntoEs(client, false); + DataLoader.loadDatasetIntoEs(client); } @Override diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java index a2a8cc87f62bc..903caf6990126 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java @@ -15,11 +15,11 @@ public class GeoJdbcSqlSpecIT extends GeoSqlSpecTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected void loadDataset(RestClient client) throws Exception { - DataLoader.loadDatasetIntoEs(client, false); + DataLoader.loadDatasetIntoEs(client); } @Override diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java index 8f661fa037e25..c78a922d7f6e4 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java @@ -22,11 +22,11 @@ public class JdbcCsvSpecIT extends CsvSpecTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected void loadDataset(RestClient client) throws Exception { - DataLoader.loadDatasetIntoEs(client, false); + DataLoader.loadDatasetIntoEs(client); } @Override diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDatabaseMetaDataIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDatabaseMetaDataIT.java index 1a7337255fc78..35aa5c3701391 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDatabaseMetaDataIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDatabaseMetaDataIT.java @@ -12,7 +12,7 @@ public class JdbcDatabaseMetaDataIT extends DatabaseMetaDataTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java index fb8a96ca4ea7c..6147d0759f459 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocCsvSpecIT.java @@ -42,7 +42,7 @@ */ public class JdbcDocCsvSpecIT extends SpecBaseIntegrationTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { @@ -58,7 +58,7 @@ protected String indexName() { @Override protected void loadDataset(RestClient client) throws Exception { - DataLoader.loadDocsDatasetIntoEs(client, false); + DataLoader.loadDocsDatasetIntoEs(client); } @ParametersFactory(shuffle = false, argumentFormatting = SqlSpecTestCase.PARAM_FORMATTING) diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocFrozenCsvSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocFrozenCsvSpecIT.java deleted file mode 100644 index 2276db4cff105..0000000000000 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcDocFrozenCsvSpecIT.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.sql.qa.single_node; - -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.apache.logging.log4j.Logger; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.xpack.sql.qa.jdbc.DataLoader; -import org.elasticsearch.xpack.sql.qa.jdbc.JdbcAssert; -import org.elasticsearch.xpack.sql.qa.jdbc.SpecBaseIntegrationTestCase; -import org.elasticsearch.xpack.sql.qa.jdbc.SqlSpecTestCase; -import org.junit.ClassRule; - -import java.sql.Connection; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.List; - -import static org.elasticsearch.xpack.ql.CsvSpecReader.CsvTestCase; -import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; -import static org.elasticsearch.xpack.ql.SpecReader.Parser; -import static org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.csvConnection; -import static org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.executeCsvQuery; - -public class JdbcDocFrozenCsvSpecIT extends SpecBaseIntegrationTestCase { - @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(true); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - private final CsvTestCase testCase; - - @Override - protected String indexName() { - return "library"; - } - - @Override - protected void loadDataset(RestClient client) throws Exception { - DataLoader.loadDocsDatasetIntoEs(client, true); - } - - @ParametersFactory(shuffle = false, argumentFormatting = SqlSpecTestCase.PARAM_FORMATTING) - public static List readScriptSpec() throws Exception { - Parser parser = specParser(); - return readScriptSpec("/docs/docs-frozen.csv-spec", parser); - } - - public JdbcDocFrozenCsvSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { - super(fileName, groupName, testName, lineNumber); - this.testCase = testCase; - } - - @Override - protected void assertResults(ResultSet expected, ResultSet elastic) throws SQLException { - Logger log = logEsResultSet() ? logger : null; - - JdbcAssert.assertResultSets(expected, elastic, log, true, true); - } - - @Override - protected final void doTest() throws Throwable { - try (Connection csv = csvConnection(testCase); Connection es = esJdbc()) { - - // pass the testName as table for debugging purposes (in case the underlying reader is missing) - ResultSet expected = executeCsvQuery(csv, testName); - ResultSet elasticResults = executeJdbcQuery(es, testCase.query); - assertResults(expected, elasticResults); - } - } -} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcFrozenCsvSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcFrozenCsvSpecIT.java deleted file mode 100644 index 11146bfb9aa28..0000000000000 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcFrozenCsvSpecIT.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.sql.qa.single_node; - -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.xpack.sql.qa.jdbc.CsvSpecTestCase; -import org.junit.ClassRule; - -import java.util.List; -import java.util.Properties; -import java.util.concurrent.TimeUnit; - -import static org.elasticsearch.xpack.ql.CsvSpecReader.CsvTestCase; -import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; - -public class JdbcFrozenCsvSpecIT extends CsvSpecTestCase { - @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(true); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - @ParametersFactory(argumentFormatting = PARAM_FORMATTING) - public static List readScriptSpec() throws Exception { - return readScriptSpec("/slow/frozen.csv-spec", specParser()); - } - - @Override - protected Properties connectionProperties() { - Properties props = new Properties(super.connectionProperties()); - String timeout = String.valueOf(TimeUnit.MINUTES.toMillis(5)); - props.setProperty("connect.timeout", timeout); - props.setProperty("network.timeout", timeout); - props.setProperty("query.timeout", timeout); - props.setProperty("page.timeout", timeout); - - return props; - } - - public JdbcFrozenCsvSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { - super(fileName, groupName, testName, lineNumber, testCase); - } -} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java index 91f3ab029f55c..f7d08ba4e22dd 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java @@ -25,7 +25,7 @@ public class JdbcShardFailureIT extends JdbcIntegrationTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); private String nodeAddresses; diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShowTablesIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShowTablesIT.java index e555448d3284d..40b90e1a42c6c 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShowTablesIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShowTablesIT.java @@ -12,7 +12,7 @@ public class JdbcShowTablesIT extends ShowTablesTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcSqlSpecIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcSqlSpecIT.java index 1c9d029063b12..b8c9a7dbb9007 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcSqlSpecIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcSqlSpecIT.java @@ -14,11 +14,11 @@ public class JdbcSqlSpecIT extends SqlSpecTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected void loadDataset(RestClient client) throws Exception { - DataLoader.loadDatasetIntoEs(client, false); + DataLoader.loadDatasetIntoEs(client); } @Override diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlDeprecationIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlDeprecationIT.java index df5d43f079de3..7074091f4f166 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlDeprecationIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlDeprecationIT.java @@ -20,7 +20,7 @@ public class RestSqlDeprecationIT extends BaseRestSqlTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java index 3cc9844e6664e..167cc212685d7 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java @@ -24,7 +24,7 @@ */ public class RestSqlIT extends RestSqlTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlPaginationIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlPaginationIT.java index 088f5af1e0aef..6ef56274cdbb0 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlPaginationIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlPaginationIT.java @@ -13,7 +13,7 @@ public class RestSqlPaginationIT extends RestSqlPaginationTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlUsageIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlUsageIT.java index f50865979bc1b..297302c534030 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlUsageIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlUsageIT.java @@ -13,7 +13,7 @@ public class RestSqlUsageIT extends RestSqlUsageTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlProtocolIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlProtocolIT.java index 5acf570b0a5da..e59a8392f7335 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlProtocolIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlProtocolIT.java @@ -13,7 +13,7 @@ public class SqlProtocolIT extends SqlProtocolTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlTestCluster.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlTestCluster.java index 2ce6452bb8d93..7907bffd3b99e 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlTestCluster.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SqlTestCluster.java @@ -13,7 +13,7 @@ public class SqlTestCluster { public static String CLUSTER_NAME = "javaRestTest"; - public static ElasticsearchCluster getCluster(boolean enableFreezing) { + public static ElasticsearchCluster getCluster() { var settings = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) .name(CLUSTER_NAME) @@ -22,10 +22,6 @@ public static ElasticsearchCluster getCluster(boolean enableFreezing) { .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial"); - if (enableFreezing) { - settings = settings.plugin("freeze-plugin"); - } - return settings.build(); } } diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SysColumnsIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SysColumnsIT.java index 1fce9bfa18b49..928916b3c40ae 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SysColumnsIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/SysColumnsIT.java @@ -13,7 +13,7 @@ public class SysColumnsIT extends SysColumnsTestCase { @ClassRule - public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(false); + public static final ElasticsearchCluster cluster = SqlTestCluster.getCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java index 07bf55919b44a..49397f6999c24 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java @@ -32,14 +32,14 @@ public class DataLoader { public static void main(String[] args) throws Exception { try (RestClient client = RestClient.builder(new HttpHost("localhost", 9200)).build()) { - loadEmpDatasetIntoEs(client, true); - loadDocsDatasetIntoEs(client, true); + loadEmpDatasetIntoEs(client); + loadDocsDatasetIntoEs(client); LogManager.getLogger(DataLoader.class).info("Data loaded"); } } - public static void loadDatasetIntoEs(RestClient client, boolean includeFrozenIndices) throws Exception { - loadEmpDatasetIntoEs(client, includeFrozenIndices); + public static void loadDatasetIntoEs(RestClient client) throws Exception { + loadEmpDatasetIntoEs(client); } public static void createEmptyIndex(RestClient client, String index) throws Exception { @@ -62,7 +62,7 @@ public static void createEmptyIndex(RestClient client, String index) throws Exce client.performRequest(request); } - private static void loadEmpDatasetIntoEs(RestClient client, boolean includeFrozenIndices) throws Exception { + private static void loadEmpDatasetIntoEs(RestClient client) throws Exception { loadEmpDatasetIntoEs(client, "test_emp", "employees"); loadEmpDatasetWithExtraIntoEs(client, "test_emp_copy", "employees"); loadAppsDatasetIntoEs(client, "apps", "apps"); @@ -71,10 +71,6 @@ private static void loadEmpDatasetIntoEs(RestClient client, boolean includeFroze loadLogUnsignedLongIntoEs(client, "logs_unsigned_long", "logs_unsigned_long"); makeAlias(client, "test_alias", "test_emp", "test_emp_copy"); makeAlias(client, "test_alias_emp", "test_emp", "test_emp_copy"); - if (includeFrozenIndices) { - loadEmpDatasetIntoEs(client, "frozen_emp", "employees"); - freeze(client, "frozen_emp"); - } loadNoColsDatasetIntoEs(client, "empty_mapping"); } @@ -91,14 +87,10 @@ private static void loadNoColsDatasetIntoEs(RestClient client, String index) thr client.performRequest(request); } - public static void loadDocsDatasetIntoEs(RestClient client, boolean includeFrozenIndices) throws Exception { + public static void loadDocsDatasetIntoEs(RestClient client) throws Exception { loadEmpDatasetIntoEs(client, "emp", "employees"); loadLibDatasetIntoEs(client, "library"); makeAlias(client, "employees", "emp"); - if (includeFrozenIndices) { - loadLibDatasetIntoEs(client, "archive"); - freeze(client, "archive"); - } } public static void createString(String name, XContentBuilder builder) throws Exception { diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SpecBaseIntegrationTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SpecBaseIntegrationTestCase.java index b45930a9f0d06..3b97938838840 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SpecBaseIntegrationTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/SpecBaseIntegrationTestCase.java @@ -60,7 +60,7 @@ protected String indexName() { } protected void loadDataset(RestClient client) throws Exception { - DataLoader.loadDatasetIntoEs(client, true); + DataLoader.loadDatasetIntoEs(client); } @Override diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/multi-cluster-with-security/multi-cluster-command.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/multi-cluster-with-security/multi-cluster-command.csv-spec index be615e6d7df9f..806b1fe5fd620 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/multi-cluster-with-security/multi-cluster-command.csv-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/multi-cluster-with-security/multi-cluster-command.csv-spec @@ -46,21 +46,6 @@ my_remote_cluster|test_emp |TABLE |INDEX my_remote_cluster|test_emp_copy |TABLE |INDEX ; -showTablesWithFrozen -SHOW TABLES CATALOG 'my_remote_cluster' INCLUDE FROZEN; - - catalog | name | type | kind ------------------+-------------------+---------------+--------------- -my_remote_cluster|apps |TABLE |INDEX -my_remote_cluster|empty_mapping |TABLE |INDEX -my_remote_cluster|frozen_emp |TABLE |INDEX -my_remote_cluster|logs |TABLE |INDEX -my_remote_cluster|logs_nanos |TABLE |INDEX -my_remote_cluster|logs_unsigned_long |TABLE |INDEX -my_remote_cluster|test_emp |TABLE |INDEX -my_remote_cluster|test_emp_copy |TABLE |INDEX -; - showTablesSimpleLike SHOW TABLES CATALOG 'my_remote_cluster' LIKE 'test_emp'; diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/slow/frozen.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/slow/frozen.csv-spec deleted file mode 100644 index d2bb5c5b4200b..0000000000000 --- a/x-pack/plugin/sql/qa/server/src/main/resources/slow/frozen.csv-spec +++ /dev/null @@ -1,65 +0,0 @@ -// To mute tests follow example in file: example.csv-spec - -// -// Frozen indices tests -// - -showTables -SHOW TABLES INCLUDE FROZEN; - - catalog | name | type | kind -javaRestTest |apps |TABLE |INDEX -javaRestTest |empty_mapping |TABLE |INDEX -javaRestTest |frozen_emp |TABLE |FROZEN INDEX -javaRestTest |logs |TABLE |INDEX -javaRestTest |logs_nanos |TABLE |INDEX -javaRestTest |logs_unsigned_long |TABLE |INDEX -javaRestTest |test_alias |VIEW |ALIAS -javaRestTest |test_alias_emp |VIEW |ALIAS -javaRestTest |test_emp |TABLE |INDEX -javaRestTest |test_emp_copy |TABLE |INDEX -; - -columnFromFrozen -SELECT gender FROM FROZEN frozen_emp ORDER BY gender LIMIT 5; - -gender:s -F -F -F -F -F -; - -percentileFrozen -SELECT gender, PERCENTILE(emp_no, 92.45) p1 FROM FROZEN frozen_emp GROUP BY gender; - -gender:s | p1:d -null |10018.745 -F |10096.336 -M |10091.393 -; - -countFromFrozen -SELECT gender, COUNT(*) AS c FROM FROZEN frozen_emp GROUP BY gender; - -gender:s | c:l -null |10 -F |33 -M |57 -; - -sum -SELECT SUM(salary) FROM FROZEN frozen_emp; - - SUM(salary):l ---------------- -4824855 -; - -kurtosisAndSkewnessNoGroup -SELECT KURTOSIS(emp_no) k, SKEWNESS(salary) s FROM FROZEN frozen_emp; - -k:d | s:d -1.7997599759975997 | 0.2707722118423227 -; diff --git a/x-pack/qa/freeze-plugin/build.gradle b/x-pack/qa/freeze-plugin/build.gradle deleted file mode 100644 index 03704dcc57a6f..0000000000000 --- a/x-pack/qa/freeze-plugin/build.gradle +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - - -apply plugin: 'elasticsearch.base-internal-es-plugin' - -esplugin { - name = 'freeze-plugin' - description = 'Provides freeze-index endpoint for testing purposes only' - classname = 'org.elasticsearch.plugin.freeze.FreezeIndexPlugin' - extendedPlugins = ['x-pack-core'] -} - -dependencies { - compileOnly project(":server") - compileOnly project(path: xpackModule('core')) -} - -tasks.named('test').configure { enabled = false } - diff --git a/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java b/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java deleted file mode 100644 index af9a741b0aef1..0000000000000 --- a/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.plugin.freeze; - -import org.elasticsearch.action.support.ActiveShardCount; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.IndexScopedSettings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.plugins.ActionPlugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.protocol.xpack.frozen.FreezeRequest; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.core.frozen.action.FreezeIndexAction; - -import java.util.List; -import java.util.function.Predicate; -import java.util.function.Supplier; - -import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.RestUtils.getAckTimeout; -import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; - -/** - * Restores the REST endpoint for freezing indices so that the JDBC tests can still freeze indices - * for testing purposes until frozen indices are no longer supported. - */ -public class FreezeIndexPlugin extends Plugin implements ActionPlugin { - - @Override - public List getRestHandlers( - Settings settings, - NamedWriteableRegistry namedWriteableRegistry, - RestController restController, - ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, - SettingsFilter settingsFilter, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier nodesInCluster, - Predicate clusterSupportsFeature - ) { - return List.of(new FreezeIndexRestEndpoint()); - } - - /** - * Used by the {@link FreezeIndexPlugin} above. - */ - static class FreezeIndexRestEndpoint extends BaseRestHandler { - @Override - public String getName() { - return "freeze-for-testing-only"; - } - - @Override - public List routes() { - return List.of(new Route(POST, "/{index}/_freeze")); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - boolean freeze = request.path().endsWith("/_freeze"); - FreezeRequest freezeRequest = new FreezeRequest( - getMasterNodeTimeout(request), - getAckTimeout(request), - Strings.splitStringByCommaToArray(request.param("index")) - ); - freezeRequest.indicesOptions(IndicesOptions.fromRequest(request, freezeRequest.indicesOptions())); - String waitForActiveShards = request.param("wait_for_active_shards"); - if (waitForActiveShards != null) { - freezeRequest.waitForActiveShards(ActiveShardCount.parseString(waitForActiveShards)); - } - freezeRequest.setFreeze(freeze); - return channel -> client.execute(FreezeIndexAction.INSTANCE, freezeRequest, new RestToXContentListener<>(channel)); - } - } - -} From f6d3a2cb9dea2d98c2b2d29cde205778d78e57b8 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Wed, 29 Jan 2025 10:51:02 +0000 Subject: [PATCH 188/383] Move lucene CI job messages to a dedicate notification channel (#121145) --- catalog-info.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/catalog-info.yaml b/catalog-info.yaml index b688e043eb2eb..b001eee61e699 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -123,7 +123,7 @@ spec: pipeline_file: .buildkite/pipelines/lucene-snapshot/build-snapshot.yml env: ELASTIC_SLACK_NOTIFICATIONS_ENABLED: "true" - SLACK_NOTIFICATIONS_CHANNEL: "#lucene" + SLACK_NOTIFICATIONS_CHANNEL: "#lucene-ci" SLACK_NOTIFICATIONS_ALL_BRANCHES: "true" branch_configuration: lucene_snapshot default_branch: lucene_snapshot @@ -167,7 +167,7 @@ spec: pipeline_file: .buildkite/pipelines/lucene-snapshot/update-branch.yml env: ELASTIC_SLACK_NOTIFICATIONS_ENABLED: "true" - SLACK_NOTIFICATIONS_CHANNEL: "#lucene" + SLACK_NOTIFICATIONS_CHANNEL: "#lucene-ci" SLACK_NOTIFICATIONS_ALL_BRANCHES: "true" default_branch: lucene_snapshot teams: @@ -210,7 +210,7 @@ spec: pipeline_file: .buildkite/pipelines/lucene-snapshot/run-tests.yml env: ELASTIC_SLACK_NOTIFICATIONS_ENABLED: "true" - SLACK_NOTIFICATIONS_CHANNEL: "#lucene" + SLACK_NOTIFICATIONS_CHANNEL: "#lucene-ci" SLACK_NOTIFICATIONS_ALL_BRANCHES: "true" branch_configuration: lucene_snapshot default_branch: lucene_snapshot From a744aef0da1cd8b1568444f2ce7b0c6f4bc945dd Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 29 Jan 2025 22:24:01 +1100 Subject: [PATCH 189/383] Mute org.elasticsearch.xpack.security.profile.ProfileIntegTests testProfileIndexAutoCreation #120987 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 8d6d493848fd1..8335577dfe016 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -302,6 +302,9 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cluster.health/20_request_timeout/cluster health request timeout waiting for active shards} issue: https://github.com/elastic/elasticsearch/issues/121130 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testProfileIndexAutoCreation + issue: https://github.com/elastic/elasticsearch/issues/120987 # Examples: # From 9cc5f780b91afcffbfc7f6e1dbd1174da040a147 Mon Sep 17 00:00:00 2001 From: Pete Gillin Date: Wed, 29 Jan 2025 11:59:41 +0000 Subject: [PATCH 190/383] Test index deprecations when closed (#120528) This extends `IndexDeprecationChecksTests` so that it tests two cases for each check: the regular one where the index is open, and one where the index is closed (i.e. the state is changed and a block added). ES-10544 #comment Unit test added in https://github.com/elastic/elasticsearch/pull/120528 --- .../IndexDeprecationCheckerTests.java | 130 +++++++++++++++--- 1 file changed, 113 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java index dc73ba0c5bcb3..ea30c93bfb5b8 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java @@ -7,7 +7,11 @@ package org.elasticsearch.xpack.deprecation; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamMetadata; import org.elasticsearch.cluster.metadata.DataStreamOptions; @@ -38,6 +42,17 @@ public class IndexDeprecationCheckerTests extends ESTestCase { + private final IndexMetadata.State indexMetdataState; + + public IndexDeprecationCheckerTests(@Name("indexMetadataState") IndexMetadata.State indexMetdataState) { + this.indexMetdataState = indexMetdataState; + } + + @ParametersFactory + public static List createParameters() { + return List.of(new Object[] { IndexMetadata.State.OPEN }, new Object[] { IndexMetadata.State.CLOSE }); + } + private static final IndexVersion OLD_VERSION = IndexVersion.fromId(7170099); private final IndexNameExpressionResolver indexNameExpressionResolver = TestIndexNameExpressionResolver.newInstance(); @@ -48,10 +63,11 @@ public void testOldIndicesCheck() { .settings(settings(OLD_VERSION)) .numberOfShards(1) .numberOfReplicas(0) - .state(randomBoolean() ? IndexMetadata.State.OPEN : IndexMetadata.State.CLOSE) // does not matter if its open or closed + .state(indexMetdataState) .build(); ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) .build(); DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.CRITICAL, @@ -72,7 +88,10 @@ public void testOldIndicesCheck() { public void testOldTransformIndicesCheck() { var checker = new IndexDeprecationChecker(indexNameExpressionResolver, Map.of("test", List.of("test-transform"))); var indexMetadata = indexMetadata("test", OLD_VERSION); - var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); var expected = new DeprecationIssue( DeprecationIssue.Level.CRITICAL, "Old index with a compatibility version < 9.0", @@ -91,7 +110,10 @@ public void testOldIndicesCheckWithMultipleTransforms() { Map.of("test", List.of("test-transform1", "test-transform2")) ); var indexMetadata = indexMetadata("test", OLD_VERSION); - var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); var expected = new DeprecationIssue( DeprecationIssue.Level.CRITICAL, "Old index with a compatibility version < 9.0", @@ -113,6 +135,7 @@ public void testMultipleOldIndicesCheckWithTransforms() { var indexMetadata2 = indexMetadata("test2", OLD_VERSION); var clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder().put(indexMetadata1, true).put(indexMetadata2, true)) + .blocks(clusterBlocksForIndices(indexMetadata1, indexMetadata2)) .build(); var expected = Map.of( "test1", @@ -143,7 +166,12 @@ public void testMultipleOldIndicesCheckWithTransforms() { } private IndexMetadata indexMetadata(String indexName, IndexVersion indexVersion) { - return IndexMetadata.builder(indexName).settings(settings(indexVersion)).numberOfShards(1).numberOfReplicas(0).build(); + return IndexMetadata.builder(indexName) + .settings(settings(indexVersion)) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); } public void testOldIndicesCheckDataStreamIndex() { @@ -151,6 +179,7 @@ public void testOldIndicesCheckDataStreamIndex() { .settings(settings(OLD_VERSION).put("index.hidden", true)) .numberOfShards(1) .numberOfReplicas(0) + .state(indexMetdataState) .build(); DataStream dataStream = new DataStream( randomAlphaOfLength(10), @@ -182,6 +211,7 @@ public void testOldIndicesCheckDataStreamIndex() { ) ) ) + .blocks(clusterBlocksForIndices(indexMetadata)) .build(); Map> issuesByIndex = checker.check( clusterState, @@ -193,9 +223,15 @@ public void testOldIndicesCheckDataStreamIndex() { public void testOldIndicesCheckSnapshotIgnored() { Settings.Builder settings = settings(OLD_VERSION); settings.put(INDEX_STORE_TYPE_SETTING.getKey(), SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); + IndexMetadata indexMetadata = IndexMetadata.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) .build(); Map> issuesByIndex = checker.check( @@ -207,9 +243,15 @@ public void testOldIndicesCheckSnapshotIgnored() { public void testOldIndicesIgnoredWarningCheck() { Settings.Builder settings = settings(OLD_VERSION).put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); + IndexMetadata indexMetadata = IndexMetadata.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) .build(); DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.WARNING, @@ -231,8 +273,16 @@ public void testTranslogRetentionSettings() { Settings.Builder settings = settings(IndexVersion.current()); settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), randomPositiveTimeValue()); settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), between(1, 1024) + "b"); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + IndexMetadata indexMetadata = IndexMetadata.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); Map> issuesByIndex = checker.check( state, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) @@ -266,8 +316,16 @@ public void testDefaultTranslogRetentionSettings() { settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), between(1, 1024) + "b"); settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false); } - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + IndexMetadata indexMetadata = IndexMetadata.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); Map> issuesByIndex = checker.check( state, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) @@ -278,8 +336,16 @@ public void testDefaultTranslogRetentionSettings() { public void testIndexDataPathSetting() { Settings.Builder settings = settings(IndexVersion.current()); settings.put(IndexMetadata.INDEX_DATA_PATH_SETTING.getKey(), createTempDir()); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + IndexMetadata indexMetadata = IndexMetadata.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); Map> issuesByIndex = checker.check( state, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) @@ -304,8 +370,16 @@ public void testIndexDataPathSetting() { public void testSimpleFSSetting() { Settings.Builder settings = settings(IndexVersion.current()); settings.put(INDEX_STORE_TYPE_SETTING.getKey(), "simplefs"); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + IndexMetadata indexMetadata = IndexMetadata.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); Map> issuesByIndex = checker.check( state, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) @@ -343,8 +417,12 @@ public void testCamelCaseDeprecation() { .numberOfShards(1) .numberOfReplicas(1) .putMapping(simpleMapping) + .state(indexMetdataState) + .build(); + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(simpleIndex, true)) + .blocks(clusterBlocksForIndices(simpleIndex)) .build(); - ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(simpleIndex, true)).build(); Map> issuesByIndex = checker.check( state, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) @@ -366,8 +444,16 @@ public void testLegacyTierIndex() { String filter = randomFrom("include", "exclude", "require"); String tier = randomFrom("hot", "warm", "cold", "frozen"); settings.put("index.routing.allocation." + filter + ".data", tier); - IndexMetadata indexMetadata = IndexMetadata.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(Metadata.builder().put(indexMetadata, true)).build(); + IndexMetadata indexMetadata = IndexMetadata.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); + ClusterState state = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); Map> issuesByIndex = checker.check( state, new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS) @@ -388,4 +474,14 @@ public void testLegacyTierIndex() { ) ); } + + private ClusterBlocks clusterBlocksForIndices(IndexMetadata... indicesMetadatas) { + ClusterBlocks.Builder builder = ClusterBlocks.builder(); + for (IndexMetadata indexMetadata : indicesMetadatas) { + if (indexMetadata.getState() == IndexMetadata.State.CLOSE) { + builder.addIndexBlock(indexMetadata.getIndex().getName(), MetadataIndexStateService.INDEX_CLOSED_BLOCK); + } + } + return builder.build(); + } } From e1207398c72fe930dceab40e6d995ae220ad0c71 Mon Sep 17 00:00:00 2001 From: "Jihyun(Brian) Jeong" Date: Wed, 29 Jan 2025 23:03:11 +1100 Subject: [PATCH 191/383] (Doc+) Clarify dimension field requirements for time_series aggregation (#119442) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * (Doc+) Clarify dimension field requirements for time_series aggregation 👋 howdy, team! This PR adds a note explaining that time series indices require: - index.mode set to "time_series" - at least one dimension field with time_series_dimension: true - a routing_path array listing those dimension fields Without these settings, the time_series aggregation may return empty buckets or behave unexpectedly. By emphasizing the dimension field requirement, we help users configure their time series indices correctly and see meaningful aggregation results. * Apply suggestions from code review Co-authored-by: shainaraskas <58563081+shainaraskas@users.noreply.github.com> --------- Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Co-authored-by: shainaraskas <58563081+shainaraskas@users.noreply.github.com> --- .../aggregations/bucket/time-series-aggregation.asciidoc | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc b/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc index 1fb527cd645f0..86e8355b69882 100644 --- a/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc @@ -6,9 +6,14 @@ preview::[] -The time series aggregation queries data created using a time series index. This is typically data such as metrics +The time series aggregation queries data created using a <>. This is typically data such as metrics or other data streams with a time component, and requires creating an index using the time series mode. +[NOTE] +==== +Refer to the <> to learn more about the key differences from regular data streams. +==== + ////////////////////////// Creating a time series mapping From 15b93fefdb781ba4bd58a93af51a7356c4b894b5 Mon Sep 17 00:00:00 2001 From: Valeriy Khakhutskyy <1292899+valeriy42@users.noreply.github.com> Date: Wed, 29 Jan 2025 13:03:42 +0100 Subject: [PATCH 192/383] Extend documentation note. (#121146) --- docs/reference/ml/ml-shared.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/ml/ml-shared.asciidoc b/docs/reference/ml/ml-shared.asciidoc index af384c2c90011..590a1a3089f90 100644 --- a/docs/reference/ml/ml-shared.asciidoc +++ b/docs/reference/ml/ml-shared.asciidoc @@ -918,7 +918,7 @@ An array of index names. Wildcards are supported. For example: `["it_ops_metrics", "server*"]`. + -- -NOTE: If any indices are in remote clusters then the {ml} nodes need to have the +NOTE: If any indices are in remote clusters then the master nodes and the {ml} nodes need to have the `remote_cluster_client` role. -- From 60935e82c1b78115302876dfe9420e90b79c3df7 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 29 Jan 2025 13:20:35 +0100 Subject: [PATCH 193/383] ESQL: Implement a MetricsAware interface (#121074) * ESQL: Implement a MetricsAware interface (#120527) This implements an interface that export the names of the plan nodes and functions that need to be counted in the metrics. Also, the metrics are now counted from within the parser. This should allow correct accounting for the cases where some nodes can appear both standalone or part other nodes' children (like Aggregate being a child of INLINESTATS, so no STATS counting should occur). The functions counting now also validates that behind a name there is actually a function registered. Closes #115992. (cherry picked from commit a4482d4c4c2d24418553510afa6a11a5f316983a) * Drop the HashSet gating when counting commands The telemetry accounting is no longer done in just one place in the parser, but split, so that no HashSet is required to discard duplicate accounting of the same node. This lowers the memory requirements. --- docs/changelog/121074.yaml | 5 ++ .../xpack/esql/EsqlTestUtils.java | 2 +- .../xpack/esql/action/TelemetryIT.java | 72 +++++++++++++++---- .../xpack/esql/analysis/Analyzer.java | 8 +-- .../xpack/esql/analysis/Verifier.java | 4 +- .../esql/capabilities/TelemetryAware.java | 23 ++++++ .../xpack/esql/execution/PlanExecutor.java | 20 +++--- .../function/EsqlFunctionRegistry.java | 15 ++++ .../xpack/esql/parser/AstBuilder.java | 4 +- .../xpack/esql/parser/EsqlParser.java | 13 +++- .../xpack/esql/parser/ExpressionBuilder.java | 39 ++++++---- .../xpack/esql/parser/LogicalPlanBuilder.java | 25 ++++--- .../xpack/esql/plan/logical/Aggregate.java | 5 +- .../xpack/esql/plan/logical/Dissect.java | 8 +-- .../xpack/esql/plan/logical/Drop.java | 7 +- .../xpack/esql/plan/logical/Enrich.java | 7 +- .../xpack/esql/plan/logical/EsRelation.java | 5 -- .../xpack/esql/plan/logical/Eval.java | 8 +-- .../xpack/esql/plan/logical/Explain.java | 8 +-- .../xpack/esql/plan/logical/Filter.java | 5 +- .../xpack/esql/plan/logical/Grok.java | 8 +-- .../xpack/esql/plan/logical/InlineStats.java | 8 +-- .../xpack/esql/plan/logical/Keep.java | 8 +-- .../xpack/esql/plan/logical/Limit.java | 8 +-- .../xpack/esql/plan/logical/LogicalPlan.java | 2 - .../xpack/esql/plan/logical/Lookup.java | 8 +-- .../xpack/esql/plan/logical/MvExpand.java | 5 +- .../xpack/esql/plan/logical/OrderBy.java | 5 +- .../xpack/esql/plan/logical/Project.java | 8 --- .../xpack/esql/plan/logical/Rename.java | 8 +-- .../xpack/esql/plan/logical/Row.java | 8 +-- .../xpack/esql/plan/logical/TopN.java | 7 -- .../esql/plan/logical/UnresolvedRelation.java | 19 ++++- .../xpack/esql/plan/logical/join/Join.java | 5 -- .../esql/plan/logical/join/LookupJoin.java | 8 ++- .../esql/plan/logical/join/StubRelation.java | 5 -- .../plan/logical/local/LocalRelation.java | 8 --- .../esql/plan/logical/show/ShowInfo.java | 5 +- .../xpack/esql/session/EsqlSession.java | 11 ++- .../xpack/esql/stats/PlanningMetrics.java | 41 ----------- .../{stats => telemetry}/FeatureMetric.java | 2 +- .../esql/{stats => telemetry}/Metrics.java | 2 +- .../xpack/esql/telemetry/PlanTelemetry.java | 59 +++++++++++++++ .../PlanTelemetryManager.java} | 12 ++-- .../{stats => telemetry}/QueryMetric.java | 2 +- .../elasticsearch/xpack/esql/CsvTests.java | 4 +- .../function/CheckLicenseTests.java | 2 +- .../LocalLogicalPlanOptimizerTests.java | 5 -- .../LocalPhysicalPlanOptimizerTests.java | 2 +- .../esql/planner/QueryTranslatorTests.java | 2 +- .../PlanExecutorMetricsTests.java | 2 +- .../VerifierMetricsTests.java | 36 +++++----- 52 files changed, 332 insertions(+), 266 deletions(-) create mode 100644 docs/changelog/121074.yaml create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{stats => telemetry}/FeatureMetric.java (98%) rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{stats => telemetry}/Metrics.java (99%) create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{stats/PlanningMetricsManager.java => telemetry/PlanTelemetryManager.java} (89%) rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{stats => telemetry}/QueryMetric.java (93%) rename x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/{stats => telemetry}/PlanExecutorMetricsTests.java (99%) rename x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/{stats => telemetry}/VerifierMetricsTests.java (93%) diff --git a/docs/changelog/121074.yaml b/docs/changelog/121074.yaml new file mode 100644 index 0000000000000..6e07ab295ea66 --- /dev/null +++ b/docs/changelog/121074.yaml @@ -0,0 +1,5 @@ +pr: 121074 +summary: Implement a `MetricsAware` interface +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 217bf6692aa27..919a963f7fc98 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -74,8 +74,8 @@ import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; -import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchStats; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import org.elasticsearch.xpack.versionfield.Version; import org.junit.Assert; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java index 25603acece3cb..7036216ebbbcf 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java @@ -20,7 +20,7 @@ import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; -import org.elasticsearch.xpack.esql.stats.PlanningMetricsManager; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetryManager; import org.junit.Before; import java.util.Collection; @@ -113,6 +113,41 @@ public static Iterable parameters() { Map.ofEntries(Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2)), true ) }, + new Object[] { + new Test( + // Using the `::` cast operator and a function alias + """ + ROW host = "1.1.1.1" + | EVAL ip = host::ip::string, y = to_str(host) + """, + Map.ofEntries(Map.entry("ROW", 1), Map.entry("EVAL", 1)), + Map.ofEntries(Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2)), + true + ) }, + new Object[] { + new Test( + // Using the `::` cast operator and a function alias + """ + FROM idx + | EVAL ip = host::ip::string, y = to_str(host) + """, + Map.ofEntries(Map.entry("FROM", 1), Map.entry("EVAL", 1)), + Map.ofEntries(Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2)), + true + ) }, + new Object[] { + new Test( + """ + FROM idx + | EVAL y = to_str(host) + | LOOKUP JOIN lookup_idx ON host + """, + Build.current().isSnapshot() + ? Map.ofEntries(Map.entry("FROM", 1), Map.entry("EVAL", 1), Map.entry("LOOKUP JOIN", 1)) + : Collections.emptyMap(), + Build.current().isSnapshot() ? Map.ofEntries(Map.entry("TO_STRING", 1)) : Collections.emptyMap(), + Build.current().isSnapshot() + ) }, new Object[] { new Test( "METRICS idx | LIMIT 10", @@ -123,9 +158,7 @@ public static Iterable parameters() { new Object[] { new Test( "METRICS idx max(id) BY host | LIMIT 10", - Build.current().isSnapshot() - ? Map.ofEntries(Map.entry("METRICS", 1), Map.entry("LIMIT", 1), Map.entry("FROM TS", 1)) - : Collections.emptyMap(), + Build.current().isSnapshot() ? Map.ofEntries(Map.entry("METRICS", 1), Map.entry("LIMIT", 1)) : Collections.emptyMap(), Build.current().isSnapshot() ? Map.ofEntries(Map.entry("MAX", 1)) : Collections.emptyMap(), Build.current().isSnapshot() ) } @@ -138,7 +171,7 @@ public static Iterable parameters() { // | EVAL ip = to_ip(host), x = to_string(host), y = to_string(host) // | INLINESTATS max(id) // """, - // Build.current().isSnapshot() ? Map.of("FROM", 1, "EVAL", 1, "INLINESTATS", 1, "STATS", 1) : Collections.emptyMap(), + // Build.current().isSnapshot() ? Map.of("FROM", 1, "EVAL", 1, "INLINESTATS", 1) : Collections.emptyMap(), // Build.current().isSnapshot() // ? Map.ofEntries(Map.entry("MAX", 1), Map.entry("TO_IP", 1), Map.entry("TO_STRING", 2)) // : Collections.emptyMap(), @@ -186,19 +219,19 @@ private static void testQuery( client(dataNode.getName()).execute(EsqlQueryAction.INSTANCE, request, ActionListener.running(() -> { try { // test total commands used - final List commandMeasurementsAll = measurements(plugin, PlanningMetricsManager.FEATURE_METRICS_ALL); + final List commandMeasurementsAll = measurements(plugin, PlanTelemetryManager.FEATURE_METRICS_ALL); assertAllUsages(expectedCommands, commandMeasurementsAll, iteration, success); // test num of queries using a command - final List commandMeasurements = measurements(plugin, PlanningMetricsManager.FEATURE_METRICS); + final List commandMeasurements = measurements(plugin, PlanTelemetryManager.FEATURE_METRICS); assertUsageInQuery(expectedCommands, commandMeasurements, iteration, success); // test total functions used - final List functionMeasurementsAll = measurements(plugin, PlanningMetricsManager.FUNCTION_METRICS_ALL); + final List functionMeasurementsAll = measurements(plugin, PlanTelemetryManager.FUNCTION_METRICS_ALL); assertAllUsages(expectedFunctions, functionMeasurementsAll, iteration, success); // test number of queries using a function - final List functionMeasurements = measurements(plugin, PlanningMetricsManager.FUNCTION_METRICS); + final List functionMeasurements = measurements(plugin, PlanTelemetryManager.FUNCTION_METRICS); assertUsageInQuery(expectedFunctions, functionMeasurements, iteration, success); } finally { latch.countDown(); @@ -216,8 +249,8 @@ private static void assertAllUsages(Map expected, List found = featureNames(metrics); assertThat(found, is(expected.keySet())); for (Measurement metric : metrics) { - assertThat(metric.attributes().get(PlanningMetricsManager.SUCCESS), is(success)); - String featureName = (String) metric.attributes().get(PlanningMetricsManager.FEATURE_NAME); + assertThat(metric.attributes().get(PlanTelemetryManager.SUCCESS), is(success)); + String featureName = (String) metric.attributes().get(PlanTelemetryManager.FEATURE_NAME); assertThat(metric.getLong(), is(iteration * expected.get(featureName))); } } @@ -227,7 +260,7 @@ private static void assertUsageInQuery(Map expected, List measurements(TestTelemetryPlugin plugin, String private static Set featureNames(List functionMeasurements) { return functionMeasurements.stream() - .map(x -> x.attributes().get(PlanningMetricsManager.FEATURE_NAME)) + .map(x -> x.attributes().get(PlanTelemetryManager.FEATURE_NAME)) .map(String.class::cast) .collect(Collectors.toSet()); } @@ -268,6 +301,19 @@ private static void loadData(String nodeName) { } client().admin().indices().prepareRefresh("idx").get(); + + assertAcked( + client().admin() + .indices() + .prepareCreate("lookup_idx") + .setSettings( + Settings.builder() + .put("index.routing.allocation.require._name", nodeName) + .put("index.mode", "lookup") + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + ) + .setMapping("ip", "type=ip", "host", "type=keyword") + ); } private DiscoveryNode randomDataNode() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index fd98b2717eae0..1351b5ce51f44 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -92,7 +92,7 @@ import org.elasticsearch.xpack.esql.rule.Rule; import org.elasticsearch.xpack.esql.rule.RuleExecutor; import org.elasticsearch.xpack.esql.session.Configuration; -import org.elasticsearch.xpack.esql.stats.FeatureMetric; +import org.elasticsearch.xpack.esql.telemetry.FeatureMetric; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.time.Duration; @@ -133,7 +133,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.core.type.DataType.isTemporalAmount; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.LIMIT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.maybeParseTemporalAmount; /** @@ -220,7 +220,7 @@ private LogicalPlan resolveIndex(UnresolvedRelation plan, IndexResolution indexR plan.metadataFields(), plan.indexMode(), indexResolutionMessage, - plan.commandName() + plan.telemetryLabel() ); } IndexPattern table = plan.indexPattern(); @@ -233,7 +233,7 @@ private LogicalPlan resolveIndex(UnresolvedRelation plan, IndexResolution indexR plan.metadataFields(), plan.indexMode(), "invalid [" + table + "] resolution to [" + indexResolution + "]", - plan.commandName() + plan.telemetryLabel() ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index b59a112b1adb6..c2663650685eb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -32,8 +32,8 @@ import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.Project; -import org.elasticsearch.xpack.esql.stats.FeatureMetric; -import org.elasticsearch.xpack.esql.stats.Metrics; +import org.elasticsearch.xpack.esql.telemetry.FeatureMetric; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import java.util.ArrayList; import java.util.BitSet; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java new file mode 100644 index 0000000000000..9116c18b7a9bc --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TelemetryAware.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.capabilities; + +import java.util.Locale; + +/** + * Interface for plan nodes that need to be accounted in the statistics + */ +public interface TelemetryAware { + + /** + * @return the label reported in the telemetry data. Only needs to be overwritten if the label doesn't match the class name. + */ + default String telemetryLabel() { + return getClass().getSimpleName().toUpperCase(Locale.ROOT); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index 94913581f696d..81f63fd9d37a6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -26,10 +26,10 @@ import org.elasticsearch.xpack.esql.session.IndexResolver; import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; import org.elasticsearch.xpack.esql.session.Result; -import org.elasticsearch.xpack.esql.stats.Metrics; -import org.elasticsearch.xpack.esql.stats.PlanningMetrics; -import org.elasticsearch.xpack.esql.stats.PlanningMetricsManager; -import org.elasticsearch.xpack.esql.stats.QueryMetric; +import org.elasticsearch.xpack.esql.telemetry.Metrics; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetryManager; +import org.elasticsearch.xpack.esql.telemetry.QueryMetric; import static org.elasticsearch.action.ActionListener.wrap; @@ -41,7 +41,7 @@ public class PlanExecutor { private final Mapper mapper; private final Metrics metrics; private final Verifier verifier; - private final PlanningMetricsManager planningMetricsManager; + private final PlanTelemetryManager planTelemetryManager; public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry, XPackLicenseState licenseState) { this.indexResolver = indexResolver; @@ -50,7 +50,7 @@ public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry, XP this.mapper = new Mapper(); this.metrics = new Metrics(functionRegistry); this.verifier = new Verifier(metrics, licenseState); - this.planningMetricsManager = new PlanningMetricsManager(meterRegistry); + this.planTelemetryManager = new PlanTelemetryManager(meterRegistry); } public void esql( @@ -65,7 +65,7 @@ public void esql( QueryBuilderResolver queryBuilderResolver, ActionListener listener ) { - final PlanningMetrics planningMetrics = new PlanningMetrics(); + final PlanTelemetry planTelemetry = new PlanTelemetry(functionRegistry); final var session = new EsqlSession( sessionId, cfg, @@ -76,7 +76,7 @@ public void esql( new LogicalPlanOptimizer(new LogicalOptimizerContext(cfg, foldContext)), mapper, verifier, - planningMetrics, + planTelemetry, indicesExpressionGrouper, queryBuilderResolver ); @@ -84,12 +84,12 @@ public void esql( metrics.total(clientId); ActionListener executeListener = wrap(x -> { - planningMetricsManager.publish(planningMetrics, true); + planTelemetryManager.publish(planTelemetry, true); listener.onResponse(x); }, ex -> { // TODO when we decide if we will differentiate Kibana from REST, this String value will likely come from the request metrics.failed(clientId); - planningMetricsManager.publish(planningMetrics, false); + planTelemetryManager.publish(planTelemetry, false); listener.onFailure(ex); }); // Wrap it in a listener so that if we have any exceptions during execution, the listener picks it up diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index d1622daaa5e33..a614a473ebe41 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -224,6 +224,7 @@ public class EsqlFunctionRegistry { // it has with the alias name associated to the FunctionDefinition instance private final Map defs = new LinkedHashMap<>(); private final Map aliases = new HashMap<>(); + private final Map, String> names = new HashMap<>(); private SnapshotFunctionRegistry snapshotRegistry = null; @@ -258,6 +259,12 @@ public boolean functionExists(String functionName) { return defs.containsKey(functionName); } + public String functionName(Class clazz) { + String name = names.get(clazz); + Check.notNull(name, "Cannot find function by class {}", clazz); + return name; + } + public Collection listFunctions() { // It is worth double checking if we need this copy. These are immutable anyway. return defs.values(); @@ -758,6 +765,14 @@ void register(FunctionDefinition... functions) { } aliases.put(alias, f.name()); } + Check.isTrue( + names.containsKey(f.clazz()) == false, + "function type [{}} is registered twice with names [{}] and [{}]", + f.clazz(), + names.get(f.clazz()), + f.name() + ); + names.put(f.clazz(), f.name()); } // sort the temporary map by key name and add it to the global map of functions defs.putAll( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java index 3b39e6a9d1fdb..ec23783fe1a2c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AstBuilder.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.esql.parser; public class AstBuilder extends LogicalPlanBuilder { - public AstBuilder(QueryParams params) { - super(params); + public AstBuilder(ParsingContext context) { + super(context); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java index 9538e3ba495db..5912f1fe58bcd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java @@ -18,7 +18,9 @@ import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.xpack.esql.core.util.StringUtils; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import java.util.BitSet; import java.util.function.BiFunction; @@ -52,20 +54,27 @@ public void setEsqlConfig(EsqlConfig config) { this.config = config; } + // testing utility public LogicalPlan createStatement(String query) { return createStatement(query, new QueryParams()); } + // testing utility public LogicalPlan createStatement(String query, QueryParams params) { + return createStatement(query, params, new PlanTelemetry(new EsqlFunctionRegistry())); + } + + public LogicalPlan createStatement(String query, QueryParams params, PlanTelemetry metrics) { if (log.isDebugEnabled()) { log.debug("Parsing as statement: {}", query); } - return invokeParser(query, params, EsqlBaseParser::singleStatement, AstBuilder::plan); + return invokeParser(query, params, metrics, EsqlBaseParser::singleStatement, AstBuilder::plan); } private T invokeParser( String query, QueryParams params, + PlanTelemetry metrics, Function parseFunction, BiFunction result ) { @@ -99,7 +108,7 @@ private T invokeParser( log.trace("Parse tree: {}", tree.toStringTree()); } - return result.apply(new AstBuilder(params), tree); + return result.apply(new AstBuilder(new ExpressionBuilder.ParsingContext(params, metrics)), tree); } catch (StackOverflowError e) { throw new ParsingException("ESQL statement is too large, causing stack overflow when generating the parsing tree: [{}]", query); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 114fcda1e634a..78c3044257f9f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -62,6 +62,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.math.BigInteger; @@ -115,10 +116,12 @@ public abstract class ExpressionBuilder extends IdentifierBuilder { */ public static final int MAX_EXPRESSION_DEPTH = 400; - protected final QueryParams params; + protected final ParsingContext context; - ExpressionBuilder(QueryParams params) { - this.params = params; + public record ParsingContext(QueryParams params, PlanTelemetry telemetry) {} + + ExpressionBuilder(ParsingContext context) { + this.context = context; } protected Expression expression(ParseTree ctx) { @@ -621,7 +624,9 @@ public Expression visitFunctionExpression(EsqlBaseParser.FunctionExpressionConte @Override public String visitFunctionName(EsqlBaseParser.FunctionNameContext ctx) { - return visitIdentifierOrParameter(ctx.identifierOrParameter()); + var name = visitIdentifierOrParameter(ctx.identifierOrParameter()); + context.telemetry().function(name); + return name; } @Override @@ -683,7 +688,9 @@ private Expression castToType(Source source, ParseTree parseTree, EsqlBaseParser throw new ParsingException(source, "Unsupported conversion to type [{}]", dataType); } Expression expr = expression(parseTree); - return converterToFactory.apply(source, expr); + var convertFunction = converterToFactory.apply(source, expr); + context.telemetry().function(convertFunction.getClass()); + return convertFunction; } @Override @@ -915,10 +922,10 @@ QueryParam paramByToken(TerminalNode node) { return null; } Token token = node.getSymbol(); - if (params.contains(token) == false) { + if (context.params().contains(token) == false) { throw new ParsingException(source(node), "Unexpected parameter"); } - return params.get(token); + return context.params().get(token); } QueryParam paramByNameOrPosition(TerminalNode node) { @@ -929,26 +936,28 @@ QueryParam paramByNameOrPosition(TerminalNode node) { String nameOrPosition = token.getText().substring(1); if (isInteger(nameOrPosition)) { int index = Integer.parseInt(nameOrPosition); - if (params.get(index) == null) { + if (context.params().get(index) == null) { String message = ""; - int np = params.size(); + int np = context.params().size(); if (np > 0) { message = ", did you mean " + (np == 1 ? "position 1?" : "any position between 1 and " + np + "?"); } - params.addParsingError(new ParsingException(source(node), "No parameter is defined for position " + index + message)); + context.params() + .addParsingError(new ParsingException(source(node), "No parameter is defined for position " + index + message)); } - return params.get(index); + return context.params().get(index); } else { - if (params.contains(nameOrPosition) == false) { + if (context.params().contains(nameOrPosition) == false) { String message = ""; - List potentialMatches = StringUtils.findSimilar(nameOrPosition, params.namedParams().keySet()); + List potentialMatches = StringUtils.findSimilar(nameOrPosition, context.params().namedParams().keySet()); if (potentialMatches.size() > 0) { message = ", did you mean " + (potentialMatches.size() == 1 ? "[" + potentialMatches.get(0) + "]?" : "any of " + potentialMatches + "?"); } - params.addParsingError(new ParsingException(source(node), "Unknown query parameter [" + nameOrPosition + "]" + message)); + context.params() + .addParsingError(new ParsingException(source(node), "Unknown query parameter [" + nameOrPosition + "]" + message)); } - return params.get(nameOrPosition); + return context.params().get(nameOrPosition); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 7ddd3dafd2784..46c1de31bb471 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -18,6 +18,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -94,15 +95,15 @@ interface PlanFactory extends Function {} */ public static final int MAX_QUERY_DEPTH = 500; - public LogicalPlanBuilder(QueryParams params) { - super(params); + public LogicalPlanBuilder(ParsingContext context) { + super(context); } private int queryDepth = 0; protected LogicalPlan plan(ParseTree ctx) { LogicalPlan p = ParserUtils.typedParsing(this, ctx, LogicalPlan.class); - var errors = this.params.parsingErrors(); + var errors = this.context.params().parsingErrors(); if (errors.hasNext() == false) { return p; } else { @@ -126,7 +127,9 @@ protected List plans(List ctxs) { @Override public LogicalPlan visitSingleStatement(EsqlBaseParser.SingleStatementContext ctx) { - return plan(ctx.query()); + var plan = plan(ctx.query()); + telemetryAccounting(plan); + return plan; } @Override @@ -141,6 +144,7 @@ public LogicalPlan visitCompositeQuery(EsqlBaseParser.CompositeQueryContext ctx) } try { LogicalPlan input = plan(ctx.query()); + telemetryAccounting(input); PlanFactory makePlan = typedParsing(this, ctx.processingCommand(), PlanFactory.class); return makePlan.apply(input); } finally { @@ -148,6 +152,13 @@ public LogicalPlan visitCompositeQuery(EsqlBaseParser.CompositeQueryContext ctx) } } + private LogicalPlan telemetryAccounting(LogicalPlan node) { + if (node instanceof TelemetryAware ma) { + this.context.telemetry().command(ma); + } + return node; + } + @Override public PlanFactory visitEvalCommand(EsqlBaseParser.EvalCommandContext ctx) { return p -> new Eval(source(ctx), p, visitFields(ctx.fields())); @@ -482,8 +493,7 @@ public LogicalPlan visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) false, List.of(new MetadataAttribute(source, MetadataAttribute.TSID_FIELD, DataType.KEYWORD, false)), IndexMode.TIME_SERIES, - null, - "FROM TS" + null ); return new Aggregate(source, relation, Aggregate.AggregateType.METRICS, stats.groupings, stats.aggregates); } @@ -543,8 +553,7 @@ public PlanFactory visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { false, emptyList(), IndexMode.LOOKUP, - null, - "???" + null ); var condition = ctx.joinCondition(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java index 0111d23fac281..5c40bfce32064 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -39,7 +40,7 @@ import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; import static org.elasticsearch.xpack.esql.plan.logical.Filter.checkFilterConditionDataType; -public class Aggregate extends UnaryPlan implements PostAnalysisVerificationAware { +public class Aggregate extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "Aggregate", @@ -142,7 +143,7 @@ public List aggregates() { } @Override - public String commandName() { + public String telemetryLabel() { return switch (aggregateType) { case STANDARD -> "STATS"; case METRICS -> "METRICS"; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java index a83e102e51005..9200850b2f9db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.dissect.DissectParser; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; @@ -25,7 +26,7 @@ import java.util.List; import java.util.Objects; -public class Dissect extends RegexExtract { +public class Dissect extends RegexExtract implements TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Dissect", Dissect::new); private final Parser parser; @@ -123,11 +124,6 @@ public boolean equals(Object o) { return Objects.equals(parser, dissect.parser); } - @Override - public String commandName() { - return "DISSECT"; - } - @Override public int hashCode() { return Objects.hash(super.hashCode(), parser); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java index add5a2d576c00..483c3508013ab 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -16,7 +17,7 @@ import java.util.List; import java.util.Objects; -public class Drop extends UnaryPlan { +public class Drop extends UnaryPlan implements TelemetryAware { private final List removals; public Drop(Source source, LogicalPlan child, List removals) { @@ -38,10 +39,6 @@ public List removals() { return removals; } - public String commandName() { - return "DROP"; - } - @Override public boolean expressionsResolved() { return Resolvables.resolved(removals); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java index 9b81060349815..4e9fc87318029 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java @@ -18,6 +18,7 @@ import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -48,7 +49,7 @@ import static org.elasticsearch.xpack.esql.core.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; -public class Enrich extends UnaryPlan implements GeneratingPlan, PostAnalysisPlanVerificationAware { +public class Enrich extends UnaryPlan implements GeneratingPlan, PostAnalysisPlanVerificationAware, TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "Enrich", @@ -202,10 +203,6 @@ protected AttributeSet computeReferences() { return matchField.references(); } - public String commandName() { - return "ENRICH"; - } - @Override public boolean expressionsResolved() { return policyName.resolved() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index 90b3aa8625087..448085df1e831 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -172,11 +172,6 @@ public Set concreteIndices() { return indexNameWithModes.keySet(); } - @Override - public String commandName() { - return "FROM"; - } - @Override public boolean expressionsResolved() { // For unresolved expressions to exist in EsRelation is fine, as long as they are not used in later operations diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java index cbd79011032df..7c437dac03409 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -37,7 +38,7 @@ import static org.elasticsearch.xpack.esql.core.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; -public class Eval extends UnaryPlan implements GeneratingPlan, PostAnalysisVerificationAware { +public class Eval extends UnaryPlan implements GeneratingPlan, PostAnalysisVerificationAware, TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Eval", Eval::new); private final List fields; @@ -131,11 +132,6 @@ private List renameAliases(List originalAttributes, List n return newFieldsWithUpdatedRefs; } - @Override - public String commandName() { - return "EVAL"; - } - @Override public boolean expressionsResolved() { return Resolvables.resolved(fields); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java index 38e7c19522df6..bd49ed04881cc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -17,7 +18,7 @@ import java.util.List; import java.util.Objects; -public class Explain extends LeafPlan { +public class Explain extends LeafPlan implements TelemetryAware { public enum Type { PARSED, @@ -69,11 +70,6 @@ public List output() { ); } - @Override - public String commandName() { - return "EXPLAIN"; - } - @Override public boolean expressionsResolved() { return true; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java index 0fae5e5831fc7..6931c320007fe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -28,7 +29,7 @@ * {@code SELECT x FROM y WHERE z ..} the "WHERE" clause is a Filter. A * {@code Filter} has a "condition" Expression that does the filtering. */ -public class Filter extends UnaryPlan implements PostAnalysisVerificationAware { +public class Filter extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Filter", Filter::new); private final Expression condition; @@ -69,7 +70,7 @@ public Expression condition() { } @Override - public String commandName() { + public String telemetryLabel() { return "WHERE"; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java index fcfd1ac0f04da..1fab2cbecd034 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java @@ -15,6 +15,7 @@ import org.elasticsearch.grok.GrokCaptureType; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; @@ -31,7 +32,7 @@ import java.util.Objects; import java.util.stream.Collectors; -public class Grok extends RegexExtract { +public class Grok extends RegexExtract implements TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Grok", Grok::readFrom); public record Parser(String pattern, org.elasticsearch.grok.Grok grok) { @@ -148,11 +149,6 @@ public boolean equals(Object o) { return Objects.equals(parser, grok.parser); } - @Override - public String commandName() { - return "GROK"; - } - @Override public int hashCode() { return Objects.hash(super.hashCode(), parser); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java index 4211f8a0d45b6..527ba28d377f1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; @@ -36,7 +37,7 @@ * underlying aggregate. *

*/ -public class InlineStats extends UnaryPlan implements NamedWriteable, SurrogateLogicalPlan { +public class InlineStats extends UnaryPlan implements NamedWriteable, SurrogateLogicalPlan, TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( LogicalPlan.class, "InlineStats", @@ -80,11 +81,6 @@ public Aggregate aggregate() { return aggregate; } - @Override - public String commandName() { - return "INLINESTATS"; - } - @Override public boolean expressionsResolved() { return aggregate.expressionsResolved(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java index 4c03d68e6e6f7..67108afb94668 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.plan.logical; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -14,7 +15,7 @@ import java.util.List; import java.util.Objects; -public class Keep extends Project { +public class Keep extends Project implements TelemetryAware { public Keep(Source source, LogicalPlan child, List projections) { super(source, child, projections); @@ -44,9 +45,4 @@ public int hashCode() { public boolean equals(Object obj) { return super.equals(obj); } - - @Override - public String commandName() { - return "KEEP"; - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java index 09879e47859c9..a59433e94f965 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -17,7 +18,7 @@ import java.io.IOException; import java.util.Objects; -public class Limit extends UnaryPlan { +public class Limit extends UnaryPlan implements TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Limit", Limit::new); private final Expression limit; @@ -100,11 +101,6 @@ public Limit withDuplicated(boolean duplicated) { return new Limit(source(), limit, child(), duplicated); } - @Override - public String commandName() { - return "LIMIT"; - } - @Override public boolean expressionsResolved() { return limit.resolved(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java index e845c25bd3b32..ac4baea8bc853 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java @@ -75,8 +75,6 @@ public boolean resolved() { return lazyResolved; } - public abstract String commandName(); - public abstract boolean expressionsResolved(); @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java index 6e7f421003292..1c05ceb124529 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -31,7 +32,7 @@ * Looks up values from the associated {@code tables}. * The class is supposed to be substituted by a {@link Join}. */ -public class Lookup extends UnaryPlan implements SurrogateLogicalPlan { +public class Lookup extends UnaryPlan implements SurrogateLogicalPlan, TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Lookup", Lookup::new); private final Expression tableName; @@ -117,11 +118,6 @@ public JoinConfig joinConfig() { return new JoinConfig(JoinTypes.LEFT, matchFields, leftFields, rightFields); } - @Override - public String commandName() { - return "LOOKUP"; - } - @Override public boolean expressionsResolved() { return tableName.resolved() && Resolvables.resolved(matchFields) && localRelation != null; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java index 9b0168ddd739d..e700ad90afdab 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -22,7 +23,7 @@ import java.util.List; import java.util.Objects; -public class MvExpand extends UnaryPlan { +public class MvExpand extends UnaryPlan implements TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "MvExpand", MvExpand::new); private final NamedExpression target; @@ -83,7 +84,7 @@ protected AttributeSet computeReferences() { return target.references(); } - public String commandName() { + public String telemetryLabel() { return "MV_EXPAND"; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java index d927d78701c65..051e2c7769bde 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -24,7 +25,7 @@ import static org.elasticsearch.xpack.esql.common.Failure.fail; -public class OrderBy extends UnaryPlan implements PostAnalysisVerificationAware { +public class OrderBy extends UnaryPlan implements PostAnalysisVerificationAware, TelemetryAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "OrderBy", OrderBy::new); private final List order; @@ -69,7 +70,7 @@ public List order() { } @Override - public String commandName() { + public String telemetryLabel() { return "SORT"; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java index 841e7fbe81896..e12a8cb557fde 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java @@ -78,14 +78,6 @@ public boolean resolved() { return super.resolved() && Expressions.anyMatch(projections, Functions::isAggregate) == false; } - @Override - public String commandName() { - // this could represent multiple commands (KEEP, DROP, RENAME) - // and should not be present in a pre-analyzed plan. - // maybe it should throw exception? - return ""; - } - @Override public boolean expressionsResolved() { return Resolvables.resolved(projections); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java index 773d3fd015e5f..7887d8ed66b99 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.analysis.Analyzer.ResolveRefs; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expressions; @@ -20,7 +21,7 @@ import java.util.List; import java.util.Objects; -public class Rename extends UnaryPlan { +public class Rename extends UnaryPlan implements TelemetryAware { private final List renamings; @@ -51,11 +52,6 @@ public List output() { return Expressions.asAttributes(projectionsAfterResolution); } - @Override - public String commandName() { - return "RENAME"; - } - @Override public boolean expressionsResolved() { for (var alias : renamings) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java index 65d1adf5e2799..005ca45d19131 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -23,7 +24,7 @@ import static org.elasticsearch.xpack.esql.common.Failure.fail; -public class Row extends LeafPlan implements PostAnalysisVerificationAware { +public class Row extends LeafPlan implements PostAnalysisVerificationAware, TelemetryAware { private final List fields; @@ -51,11 +52,6 @@ public List output() { return Expressions.asAttributes(fields); } - @Override - public String commandName() { - return "ROW"; - } - @Override public boolean expressionsResolved() { return Resolvables.resolved(fields); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java index d6e0e4334bd47..a9a5dbddc544f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java @@ -55,13 +55,6 @@ public String getWriteableName() { return ENTRY.name; } - @Override - public String commandName() { - // this is the result of optimizations, it will never appear in a pre-analyzed plan - // maybe we should throw exception? - return ""; - } - @Override public boolean expressionsResolved() { return limit.resolved() && Resolvables.resolved(order); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java index 0a20e1dd9080d..5d22a86b2cdf7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java @@ -8,11 +8,13 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.plan.IndexPattern; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import java.util.Collections; import java.util.List; @@ -20,7 +22,7 @@ import static java.util.Collections.singletonList; -public class UnresolvedRelation extends LeafPlan implements Unresolvable { +public class UnresolvedRelation extends LeafPlan implements Unresolvable, TelemetryAware { private final IndexPattern indexPattern; private final boolean frozen; @@ -56,6 +58,17 @@ public UnresolvedRelation( this.commandName = commandName; } + public UnresolvedRelation( + Source source, + IndexPattern table, + boolean frozen, + List metadataFields, + IndexMode indexMode, + String unresolvedMessage + ) { + this(source, table, frozen, metadataFields, indexMode, unresolvedMessage, null); + } + @Override public void writeTo(StreamOutput out) { throw new UnsupportedOperationException("not serialized"); @@ -86,7 +99,7 @@ public boolean resolved() { /** * - * This is used by {@link org.elasticsearch.xpack.esql.stats.PlanningMetrics} to collect query statistics + * This is used by {@link PlanTelemetry} to collect query statistics * It can return *
    *
  • "FROM" if this a |FROM idx command
  • @@ -95,7 +108,7 @@ public boolean resolved() { *
*/ @Override - public String commandName() { + public String telemetryLabel() { return commandName; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java index a541142f952e0..997bff70663bd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -189,11 +189,6 @@ public Join replaceChildren(LogicalPlan left, LogicalPlan right) { return new Join(source(), left, right, config); } - @Override - public String commandName() { - return "JOIN"; - } - @Override public int hashCode() { return Objects.hash(config, left(), right()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java index c29cf0ec7f414..5f1f569e3671b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java @@ -9,6 +9,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -27,7 +28,7 @@ /** * Lookup join - specialized LEFT (OUTER) JOIN between the main left side and a lookup index (index_mode = lookup) on the right. */ -public class LookupJoin extends Join implements SurrogateLogicalPlan, PostAnalysisVerificationAware { +public class LookupJoin extends Join implements SurrogateLogicalPlan, PostAnalysisVerificationAware, TelemetryAware { public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, List joinFields) { this(source, left, right, new UsingJoinType(LEFT, joinFields), emptyList(), emptyList(), emptyList()); @@ -77,6 +78,11 @@ protected NodeInfo info() { ); } + @Override + public String telemetryLabel() { + return "LOOKUP JOIN"; + } + @Override public void postAnalysisVerification(Failures failures) { super.postAnalysisVerification(failures); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java index 4f04024d61d46..33e1f385f9eec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/StubRelation.java @@ -67,11 +67,6 @@ protected NodeInfo info() { return NodeInfo.create(this, StubRelation::new, output); } - @Override - public String commandName() { - return ""; - } - @Override public int hashCode() { return Objects.hash(StubRelation.class, output); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java index 07432481d2341..d6106bae6b6b8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java @@ -63,14 +63,6 @@ public LocalSupplier supplier() { return supplier; } - @Override - public String commandName() { - // this colud be an empty source, a lookup table or something else - // but it should not be present in a pre-analyzed plan - // maybe we sholud throw exception? - return ""; - } - @Override public boolean expressionsResolved() { return true; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java index fa432537d27e3..99c917ba803a9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Build; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -22,7 +23,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; -public class ShowInfo extends LeafPlan { +public class ShowInfo extends LeafPlan implements TelemetryAware { private final List attributes; @@ -59,7 +60,7 @@ public List> values() { } @Override - public String commandName() { + public String telemetryLabel() { return "SHOW"; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 0505955e450d7..8c95992cf9f5a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -73,7 +73,7 @@ import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.mapper.Mapper; -import org.elasticsearch.xpack.esql.stats.PlanningMetrics; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import java.util.ArrayList; import java.util.Arrays; @@ -112,7 +112,7 @@ public interface PlanRunner { private final Mapper mapper; private final PhysicalPlanOptimizer physicalPlanOptimizer; - private final PlanningMetrics planningMetrics; + private final PlanTelemetry planTelemetry; private final IndicesExpressionGrouper indicesExpressionGrouper; private final QueryBuilderResolver queryBuilderResolver; @@ -126,7 +126,7 @@ public EsqlSession( LogicalPlanOptimizer logicalPlanOptimizer, Mapper mapper, Verifier verifier, - PlanningMetrics planningMetrics, + PlanTelemetry planTelemetry, IndicesExpressionGrouper indicesExpressionGrouper, QueryBuilderResolver queryBuilderResolver ) { @@ -140,7 +140,7 @@ public EsqlSession( this.mapper = mapper; this.logicalPlanOptimizer = logicalPlanOptimizer; this.physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(configuration)); - this.planningMetrics = planningMetrics; + this.planTelemetry = planTelemetry; this.indicesExpressionGrouper = indicesExpressionGrouper; this.queryBuilderResolver = queryBuilderResolver; } @@ -280,7 +280,7 @@ private LocalRelation resultToPlan(LogicalPlan plan, Result result) { } private LogicalPlan parse(String query, QueryParams params) { - var parsed = new EsqlParser().createStatement(query, params); + var parsed = new EsqlParser().createStatement(query, params, planTelemetry); LOGGER.debug("Parsed logical plan:\n{}", parsed); return parsed; } @@ -297,7 +297,6 @@ public void analyzedPlan( } Function analyzeAction = (l) -> { - planningMetrics.gatherPreAnalysisMetrics(parsed); Analyzer analyzer = new Analyzer( new AnalyzerContext(configuration, functionRegistry, l.indices, l.lookupIndices, l.enrichResolution), verifier diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java deleted file mode 100644 index 7b452e50fd525..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetrics.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.stats; - -import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; - -import java.util.HashMap; -import java.util.Locale; -import java.util.Map; - -/** - * This class is responsible for collecting metrics related to ES|QL planning. - */ -public class PlanningMetrics { - private Map commands = new HashMap<>(); - private Map functions = new HashMap<>(); - - public void gatherPreAnalysisMetrics(LogicalPlan plan) { - plan.forEachDown(p -> add(commands, p.commandName())); - plan.forEachExpressionDown(UnresolvedFunction.class, p -> add(functions, p.name().toUpperCase(Locale.ROOT))); - } - - private void add(Map map, String key) { - Integer cmd = map.get(key); - map.put(key, cmd == null ? 1 : cmd + 1); - } - - public Map commands() { - return commands; - } - - public Map functions() { - return functions; - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/FeatureMetric.java similarity index 98% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/FeatureMetric.java index 4cae2a9c247f3..3a36f5b0d7c04 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/FeatureMetric.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/Metrics.java similarity index 99% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/Metrics.java index 092fecb3142db..b8962b47809a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/Metrics.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/Metrics.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.util.Maps; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java new file mode 100644 index 0000000000000..10b48c243d3b1 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetry.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.telemetry; + +import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; +import org.elasticsearch.xpack.esql.core.expression.function.Function; +import org.elasticsearch.xpack.esql.core.util.Check; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; + +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +/** + * This class is responsible for collecting metrics related to ES|QL planning. + */ +public class PlanTelemetry { + private final EsqlFunctionRegistry functionRegistry; + private final Map commands = new HashMap<>(); + private final Map functions = new HashMap<>(); + + public PlanTelemetry(EsqlFunctionRegistry functionRegistry) { + this.functionRegistry = functionRegistry; + } + + private void add(Map map, String key) { + map.compute(key.toUpperCase(Locale.ROOT), (k, count) -> count == null ? 1 : count + 1); + } + + public void command(TelemetryAware command) { + Check.notNull(command.telemetryLabel(), "TelemetryAware [{}] has no telemetry label", command); + add(commands, command.telemetryLabel()); + } + + public void function(String name) { + var functionName = functionRegistry.resolveAlias(name); + if (functionRegistry.functionExists(functionName)) { + // The metrics have been collected initially with their uppercase spelling + add(functions, functionName); + } + } + + public void function(Class clazz) { + add(functions, functionRegistry.functionName(clazz)); + } + + public Map commands() { + return commands; + } + + public Map functions() { + return functions; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetricsManager.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetryManager.java similarity index 89% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetricsManager.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetryManager.java index a2d00a1f530e9..2cd536daf389c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/PlanningMetricsManager.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/PlanTelemetryManager.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.telemetry.metric.LongCounter; import org.elasticsearch.telemetry.metric.MeterRegistry; @@ -17,7 +17,7 @@ * * @see METERING */ -public class PlanningMetricsManager { +public class PlanTelemetryManager { // APM counters private final LongCounter featuresCounter; @@ -59,7 +59,7 @@ public class PlanningMetricsManager { */ public static final String SUCCESS = "success"; - public PlanningMetricsManager(MeterRegistry meterRegistry) { + public PlanTelemetryManager(MeterRegistry meterRegistry) { featuresCounter = meterRegistry.registerLongCounter( FEATURE_METRICS, "ESQL features, total number of queries that use them", @@ -77,9 +77,9 @@ public PlanningMetricsManager(MeterRegistry meterRegistry) { /** * Publishes the collected metrics to the meter registry */ - public void publish(PlanningMetrics metrics, boolean success) { - metrics.commands().entrySet().forEach(x -> incCommand(x.getKey(), x.getValue(), success)); - metrics.functions().entrySet().forEach(x -> incFunction(x.getKey(), x.getValue(), success)); + public void publish(PlanTelemetry metrics, boolean success) { + metrics.commands().forEach((key, value) -> incCommand(key, value, success)); + metrics.functions().forEach((key, value) -> incFunction(key, value, success)); } private void incCommand(String name, int count, boolean success) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/QueryMetric.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/QueryMetric.java similarity index 93% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/QueryMetric.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/QueryMetric.java index e862006d058ac..567b4b0a84937 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/QueryMetric.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/telemetry/QueryMetric.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import java.util.Locale; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 350befc219f6e..bae20bb9b26d3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -90,7 +90,7 @@ import org.elasticsearch.xpack.esql.session.EsqlSession.PlanRunner; import org.elasticsearch.xpack.esql.session.Result; import org.elasticsearch.xpack.esql.stats.DisabledSearchStats; -import org.elasticsearch.xpack.esql.stats.PlanningMetrics; +import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import org.junit.After; import org.junit.Before; import org.mockito.Mockito; @@ -514,7 +514,7 @@ private ActualResults executePlan(BigArrays bigArrays) throws Exception { new LogicalPlanOptimizer(new LogicalOptimizerContext(configuration, foldCtx)), mapper, TEST_VERIFIER, - new PlanningMetrics(), + new PlanTelemetry(functionRegistry), null, EsqlTestUtils.MOCK_QUERY_BUILDER_RESOLVER ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java index e507640c7b23c..cf2de30e44456 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Limit; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.stats.Metrics; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import java.util.List; import java.util.Objects; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index c9821aea343bf..98f3d1d2d8d8e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -249,11 +249,6 @@ public UnaryPlan replaceChild(LogicalPlan newChild) { return new MockFieldAttributeCommand(source(), newChild, field); } - @Override - public String commandName() { - return "MOCK"; - } - @Override public boolean expressionsResolved() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index aae2d012fc3a6..8bdd7a4e1645f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -74,9 +74,9 @@ import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.rule.Rule; import org.elasticsearch.xpack.esql.session.Configuration; -import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchContextStats; import org.elasticsearch.xpack.esql.stats.SearchStats; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import org.elasticsearch.xpack.kql.query.KqlQueryBuilder; import org.junit.Before; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java index 57210fda07f2b..f9732272dbd74 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.xpack.esql.optimizer.TestPlannerOptimizer; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; -import org.elasticsearch.xpack.esql.stats.Metrics; +import org.elasticsearch.xpack.esql.telemetry.Metrics; import org.hamcrest.Matcher; import org.junit.BeforeClass; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java similarity index 99% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java index a3c5cd9168b4f..4c2913031271f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java similarity index 93% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java index eda906b147956..de377fe78588c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/VerifierMetricsTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.stats; +package org.elasticsearch.xpack.esql.telemetry; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; @@ -22,23 +22,23 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyzer; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.DISSECT; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.DROP; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.ENRICH; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.EVAL; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.FROM; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.GROK; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.KEEP; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.MV_EXPAND; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.RENAME; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.ROW; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.SHOW; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.SORT; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.STATS; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.WHERE; -import static org.elasticsearch.xpack.esql.stats.Metrics.FPREFIX; -import static org.elasticsearch.xpack.esql.stats.Metrics.FUNC_PREFIX; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.DISSECT; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.DROP; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.ENRICH; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.EVAL; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.FROM; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.GROK; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.KEEP; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.MV_EXPAND; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.RENAME; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.ROW; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.SHOW; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.SORT; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.STATS; +import static org.elasticsearch.xpack.esql.telemetry.FeatureMetric.WHERE; +import static org.elasticsearch.xpack.esql.telemetry.Metrics.FPREFIX; +import static org.elasticsearch.xpack.esql.telemetry.Metrics.FUNC_PREFIX; public class VerifierMetricsTests extends ESTestCase { From 348a0bc04efded1770df5ec968ab0e1acf58d14c Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Wed, 29 Jan 2025 13:40:16 +0100 Subject: [PATCH 194/383] Link generated classes to sources (#121038) --- ...AggregatorFunctionSupplierImplementer.java | 2 +- .../compute/gen/AggregatorImplementer.java | 2 +- .../gen/ConvertEvaluatorImplementer.java | 2 +- .../compute/gen/EvaluatorImplementer.java | 2 +- .../gen/GroupingAggregatorImplementer.java | 2 +- .../compute/gen/MvEvaluatorImplementer.java | 2 +- .../aggregation/BooleanArrayState.java | 2 +- .../BooleanFallibleArrayState.java | 2 +- .../aggregation/BooleanFallibleState.java | 2 +- .../compute/aggregation/BooleanState.java | 2 +- .../compute/aggregation/DoubleArrayState.java | 2 +- .../aggregation/DoubleFallibleArrayState.java | 2 +- .../aggregation/DoubleFallibleState.java | 2 +- .../compute/aggregation/DoubleState.java | 2 +- .../compute/aggregation/FloatArrayState.java | 2 +- .../aggregation/FloatFallibleArrayState.java | 2 +- .../aggregation/FloatFallibleState.java | 2 +- .../compute/aggregation/FloatState.java | 2 +- .../compute/aggregation/IntArrayState.java | 2 +- .../aggregation/IntFallibleArrayState.java | 2 +- .../compute/aggregation/IntFallibleState.java | 2 +- .../compute/aggregation/IntState.java | 2 +- .../compute/aggregation/LongArrayState.java | 2 +- .../aggregation/LongFallibleArrayState.java | 2 +- .../aggregation/LongFallibleState.java | 2 +- .../compute/aggregation/LongState.java | 2 +- .../blockhash/BytesRefBlockHash.java | 2 +- .../blockhash/DoubleBlockHash.java | 2 +- .../aggregation/blockhash/IntBlockHash.java | 2 +- .../aggregation/blockhash/LongBlockHash.java | 2 +- .../compute/data/BooleanArrayBlock.java | 2 +- .../compute/data/BooleanArrayVector.java | 2 +- .../compute/data/BooleanBigArrayBlock.java | 2 +- .../compute/data/BooleanBigArrayVector.java | 2 +- .../compute/data/BooleanBlock.java | 2 +- .../compute/data/BooleanBlockBuilder.java | 2 +- .../compute/data/BooleanLookup.java | 2 +- .../compute/data/BooleanVector.java | 2 +- .../compute/data/BooleanVectorBlock.java | 2 +- .../compute/data/BooleanVectorBuilder.java | 2 +- .../data/BooleanVectorFixedBuilder.java | 2 +- .../compute/data/BytesRefArrayBlock.java | 2 +- .../compute/data/BytesRefArrayVector.java | 2 +- .../compute/data/BytesRefBlock.java | 2 +- .../compute/data/BytesRefBlockBuilder.java | 2 +- .../compute/data/BytesRefLookup.java | 2 +- .../compute/data/BytesRefVector.java | 2 +- .../compute/data/BytesRefVectorBlock.java | 2 +- .../compute/data/BytesRefVectorBuilder.java | 2 +- .../compute/data/ConstantBooleanVector.java | 2 +- .../compute/data/ConstantBytesRefVector.java | 2 +- .../compute/data/ConstantDoubleVector.java | 2 +- .../compute/data/ConstantFloatVector.java | 2 +- .../compute/data/ConstantIntVector.java | 2 +- .../compute/data/ConstantLongVector.java | 2 +- .../compute/data/DoubleArrayBlock.java | 2 +- .../compute/data/DoubleArrayVector.java | 2 +- .../compute/data/DoubleBigArrayBlock.java | 2 +- .../compute/data/DoubleBigArrayVector.java | 2 +- .../compute/data/DoubleBlock.java | 2 +- .../compute/data/DoubleBlockBuilder.java | 2 +- .../compute/data/DoubleLookup.java | 2 +- .../compute/data/DoubleVector.java | 2 +- .../compute/data/DoubleVectorBlock.java | 2 +- .../compute/data/DoubleVectorBuilder.java | 2 +- .../data/DoubleVectorFixedBuilder.java | 2 +- .../compute/data/FloatArrayBlock.java | 2 +- .../compute/data/FloatArrayVector.java | 2 +- .../compute/data/FloatBigArrayBlock.java | 2 +- .../compute/data/FloatBigArrayVector.java | 2 +- .../compute/data/FloatBlock.java | 2 +- .../compute/data/FloatBlockBuilder.java | 2 +- .../compute/data/FloatLookup.java | 2 +- .../compute/data/FloatVector.java | 2 +- .../compute/data/FloatVectorBlock.java | 2 +- .../compute/data/FloatVectorBuilder.java | 2 +- .../compute/data/FloatVectorFixedBuilder.java | 2 +- .../compute/data/IntArrayBlock.java | 2 +- .../compute/data/IntArrayVector.java | 2 +- .../compute/data/IntBigArrayBlock.java | 2 +- .../compute/data/IntBigArrayVector.java | 2 +- .../elasticsearch/compute/data/IntBlock.java | 2 +- .../compute/data/IntBlockBuilder.java | 2 +- .../elasticsearch/compute/data/IntLookup.java | 2 +- .../elasticsearch/compute/data/IntVector.java | 2 +- .../compute/data/IntVectorBlock.java | 2 +- .../compute/data/IntVectorBuilder.java | 2 +- .../compute/data/IntVectorFixedBuilder.java | 2 +- .../compute/data/LongArrayBlock.java | 2 +- .../compute/data/LongArrayVector.java | 2 +- .../compute/data/LongBigArrayBlock.java | 2 +- .../compute/data/LongBigArrayVector.java | 2 +- .../elasticsearch/compute/data/LongBlock.java | 2 +- .../compute/data/LongBlockBuilder.java | 2 +- .../compute/data/LongLookup.java | 2 +- .../compute/data/LongVector.java | 2 +- .../compute/data/LongVectorBlock.java | 2 +- .../compute/data/LongVectorBuilder.java | 2 +- .../compute/data/LongVectorFixedBuilder.java | 2 +- ...ountDistinctBooleanAggregatorFunction.java | 2 +- ...inctBooleanAggregatorFunctionSupplier.java | 2 +- ...inctBooleanGroupingAggregatorFunction.java | 2 +- ...untDistinctBytesRefAggregatorFunction.java | 2 +- ...nctBytesRefAggregatorFunctionSupplier.java | 2 +- ...nctBytesRefGroupingAggregatorFunction.java | 2 +- ...CountDistinctDoubleAggregatorFunction.java | 2 +- ...tinctDoubleAggregatorFunctionSupplier.java | 2 +- ...tinctDoubleGroupingAggregatorFunction.java | 2 +- .../CountDistinctFloatAggregatorFunction.java | 2 +- ...stinctFloatAggregatorFunctionSupplier.java | 2 +- ...stinctFloatGroupingAggregatorFunction.java | 2 +- .../CountDistinctIntAggregatorFunction.java | 2 +- ...DistinctIntAggregatorFunctionSupplier.java | 2 +- ...DistinctIntGroupingAggregatorFunction.java | 2 +- .../CountDistinctLongAggregatorFunction.java | 2 +- ...istinctLongAggregatorFunctionSupplier.java | 2 +- ...istinctLongGroupingAggregatorFunction.java | 2 +- .../MaxBooleanAggregatorFunction.java | 2 +- .../MaxBooleanAggregatorFunctionSupplier.java | 2 +- .../MaxBooleanGroupingAggregatorFunction.java | 2 +- .../MaxBytesRefAggregatorFunction.java | 2 +- ...MaxBytesRefAggregatorFunctionSupplier.java | 2 +- ...MaxBytesRefGroupingAggregatorFunction.java | 2 +- .../MaxDoubleAggregatorFunction.java | 2 +- .../MaxDoubleAggregatorFunctionSupplier.java | 2 +- .../MaxDoubleGroupingAggregatorFunction.java | 2 +- .../MaxFloatAggregatorFunction.java | 2 +- .../MaxFloatAggregatorFunctionSupplier.java | 2 +- .../MaxFloatGroupingAggregatorFunction.java | 2 +- .../aggregation/MaxIntAggregatorFunction.java | 2 +- .../MaxIntAggregatorFunctionSupplier.java | 2 +- .../MaxIntGroupingAggregatorFunction.java | 2 +- .../aggregation/MaxIpAggregatorFunction.java | 2 +- .../MaxIpAggregatorFunctionSupplier.java | 2 +- .../MaxIpGroupingAggregatorFunction.java | 2 +- .../MaxLongAggregatorFunction.java | 2 +- .../MaxLongAggregatorFunctionSupplier.java | 2 +- .../MaxLongGroupingAggregatorFunction.java | 2 +- ...luteDeviationDoubleAggregatorFunction.java | 2 +- ...ationDoubleAggregatorFunctionSupplier.java | 2 +- ...ationDoubleGroupingAggregatorFunction.java | 2 +- ...oluteDeviationFloatAggregatorFunction.java | 2 +- ...iationFloatAggregatorFunctionSupplier.java | 2 +- ...iationFloatGroupingAggregatorFunction.java | 2 +- ...bsoluteDeviationIntAggregatorFunction.java | 2 +- ...eviationIntAggregatorFunctionSupplier.java | 2 +- ...eviationIntGroupingAggregatorFunction.java | 2 +- ...soluteDeviationLongAggregatorFunction.java | 2 +- ...viationLongAggregatorFunctionSupplier.java | 2 +- ...viationLongGroupingAggregatorFunction.java | 2 +- .../MinBooleanAggregatorFunction.java | 2 +- .../MinBooleanAggregatorFunctionSupplier.java | 2 +- .../MinBooleanGroupingAggregatorFunction.java | 2 +- .../MinBytesRefAggregatorFunction.java | 2 +- ...MinBytesRefAggregatorFunctionSupplier.java | 2 +- ...MinBytesRefGroupingAggregatorFunction.java | 2 +- .../MinDoubleAggregatorFunction.java | 2 +- .../MinDoubleAggregatorFunctionSupplier.java | 2 +- .../MinDoubleGroupingAggregatorFunction.java | 2 +- .../MinFloatAggregatorFunction.java | 2 +- .../MinFloatAggregatorFunctionSupplier.java | 2 +- .../MinFloatGroupingAggregatorFunction.java | 2 +- .../aggregation/MinIntAggregatorFunction.java | 2 +- .../MinIntAggregatorFunctionSupplier.java | 2 +- .../MinIntGroupingAggregatorFunction.java | 2 +- .../aggregation/MinIpAggregatorFunction.java | 2 +- .../MinIpAggregatorFunctionSupplier.java | 2 +- .../MinIpGroupingAggregatorFunction.java | 2 +- .../MinLongAggregatorFunction.java | 2 +- .../MinLongAggregatorFunctionSupplier.java | 2 +- .../MinLongGroupingAggregatorFunction.java | 2 +- .../PercentileDoubleAggregatorFunction.java | 2 +- ...ntileDoubleAggregatorFunctionSupplier.java | 2 +- ...ntileDoubleGroupingAggregatorFunction.java | 2 +- .../PercentileFloatAggregatorFunction.java | 2 +- ...entileFloatAggregatorFunctionSupplier.java | 2 +- ...entileFloatGroupingAggregatorFunction.java | 2 +- .../PercentileIntAggregatorFunction.java | 2 +- ...rcentileIntAggregatorFunctionSupplier.java | 2 +- ...rcentileIntGroupingAggregatorFunction.java | 2 +- .../PercentileLongAggregatorFunction.java | 2 +- ...centileLongAggregatorFunctionSupplier.java | 2 +- ...centileLongGroupingAggregatorFunction.java | 2 +- .../RateDoubleAggregatorFunctionSupplier.java | 2 +- .../RateDoubleGroupingAggregatorFunction.java | 2 +- .../RateFloatAggregatorFunctionSupplier.java | 2 +- .../RateFloatGroupingAggregatorFunction.java | 2 +- .../RateIntAggregatorFunctionSupplier.java | 2 +- .../RateIntGroupingAggregatorFunction.java | 2 +- .../RateLongAggregatorFunctionSupplier.java | 2 +- .../RateLongGroupingAggregatorFunction.java | 2 +- .../StdDevDoubleAggregatorFunction.java | 2 +- ...tdDevDoubleAggregatorFunctionSupplier.java | 2 +- ...tdDevDoubleGroupingAggregatorFunction.java | 2 +- .../StdDevFloatAggregatorFunction.java | 2 +- ...StdDevFloatAggregatorFunctionSupplier.java | 2 +- ...StdDevFloatGroupingAggregatorFunction.java | 2 +- .../StdDevIntAggregatorFunction.java | 2 +- .../StdDevIntAggregatorFunctionSupplier.java | 2 +- .../StdDevIntGroupingAggregatorFunction.java | 2 +- .../StdDevLongAggregatorFunction.java | 2 +- .../StdDevLongAggregatorFunctionSupplier.java | 2 +- .../StdDevLongGroupingAggregatorFunction.java | 2 +- .../SumDoubleAggregatorFunction.java | 2 +- .../SumDoubleAggregatorFunctionSupplier.java | 2 +- .../SumDoubleGroupingAggregatorFunction.java | 2 +- .../SumFloatAggregatorFunction.java | 2 +- .../SumFloatAggregatorFunctionSupplier.java | 2 +- .../SumFloatGroupingAggregatorFunction.java | 2 +- .../aggregation/SumIntAggregatorFunction.java | 2 +- .../SumIntAggregatorFunctionSupplier.java | 2 +- .../SumIntGroupingAggregatorFunction.java | 2 +- .../SumLongAggregatorFunction.java | 2 +- .../SumLongAggregatorFunctionSupplier.java | 2 +- .../SumLongGroupingAggregatorFunction.java | 2 +- .../TopBooleanAggregatorFunction.java | 2 +- .../TopBooleanAggregatorFunctionSupplier.java | 2 +- .../TopBooleanGroupingAggregatorFunction.java | 2 +- .../TopBytesRefAggregatorFunction.java | 2 +- ...TopBytesRefAggregatorFunctionSupplier.java | 2 +- ...TopBytesRefGroupingAggregatorFunction.java | 2 +- .../TopDoubleAggregatorFunction.java | 2 +- .../TopDoubleAggregatorFunctionSupplier.java | 2 +- .../TopDoubleGroupingAggregatorFunction.java | 2 +- .../TopFloatAggregatorFunction.java | 2 +- .../TopFloatAggregatorFunctionSupplier.java | 2 +- .../TopFloatGroupingAggregatorFunction.java | 2 +- .../aggregation/TopIntAggregatorFunction.java | 2 +- .../TopIntAggregatorFunctionSupplier.java | 2 +- .../TopIntGroupingAggregatorFunction.java | 2 +- .../aggregation/TopIpAggregatorFunction.java | 2 +- .../TopIpAggregatorFunctionSupplier.java | 2 +- .../TopIpGroupingAggregatorFunction.java | 2 +- .../TopLongAggregatorFunction.java | 2 +- .../TopLongAggregatorFunctionSupplier.java | 2 +- .../TopLongGroupingAggregatorFunction.java | 2 +- .../ValuesBooleanAggregatorFunction.java | 2 +- ...luesBooleanAggregatorFunctionSupplier.java | 2 +- ...luesBooleanGroupingAggregatorFunction.java | 2 +- .../ValuesBytesRefAggregatorFunction.java | 2 +- ...uesBytesRefAggregatorFunctionSupplier.java | 2 +- ...uesBytesRefGroupingAggregatorFunction.java | 2 +- .../ValuesDoubleAggregatorFunction.java | 2 +- ...aluesDoubleAggregatorFunctionSupplier.java | 2 +- ...aluesDoubleGroupingAggregatorFunction.java | 2 +- .../ValuesFloatAggregatorFunction.java | 2 +- ...ValuesFloatAggregatorFunctionSupplier.java | 2 +- ...ValuesFloatGroupingAggregatorFunction.java | 2 +- .../ValuesIntAggregatorFunction.java | 2 +- .../ValuesIntAggregatorFunctionSupplier.java | 2 +- .../ValuesIntGroupingAggregatorFunction.java | 2 +- .../ValuesLongAggregatorFunction.java | 2 +- .../ValuesLongAggregatorFunctionSupplier.java | 2 +- .../ValuesLongGroupingAggregatorFunction.java | 2 +- ...esianPointDocValuesAggregatorFunction.java | 2 +- ...ntDocValuesAggregatorFunctionSupplier.java | 2 +- ...ntDocValuesGroupingAggregatorFunction.java | 2 +- ...anPointSourceValuesAggregatorFunction.java | 2 +- ...ourceValuesAggregatorFunctionSupplier.java | 2 +- ...ourceValuesGroupingAggregatorFunction.java | 2 +- ...idGeoPointDocValuesAggregatorFunction.java | 2 +- ...ntDocValuesAggregatorFunctionSupplier.java | 2 +- ...ntDocValuesGroupingAggregatorFunction.java | 2 +- ...eoPointSourceValuesAggregatorFunction.java | 2 +- ...ourceValuesAggregatorFunctionSupplier.java | 2 +- ...ourceValuesGroupingAggregatorFunction.java | 2 +- ...esianPointDocValuesAggregatorFunction.java | 2 +- ...ntDocValuesAggregatorFunctionSupplier.java | 2 +- ...ntDocValuesGroupingAggregatorFunction.java | 2 +- ...anPointSourceValuesAggregatorFunction.java | 2 +- ...ourceValuesAggregatorFunctionSupplier.java | 2 +- ...ourceValuesGroupingAggregatorFunction.java | 2 +- ...esianShapeDocValuesAggregatorFunction.java | 2 +- ...peDocValuesAggregatorFunctionSupplier.java | 2 +- ...peDocValuesGroupingAggregatorFunction.java | 2 +- ...anShapeSourceValuesAggregatorFunction.java | 2 +- ...ourceValuesAggregatorFunctionSupplier.java | 2 +- ...ourceValuesGroupingAggregatorFunction.java | 2 +- ...ntGeoPointDocValuesAggregatorFunction.java | 2 +- ...ntDocValuesAggregatorFunctionSupplier.java | 2 +- ...ntDocValuesGroupingAggregatorFunction.java | 2 +- ...eoPointSourceValuesAggregatorFunction.java | 2 +- ...ourceValuesAggregatorFunctionSupplier.java | 2 +- ...ourceValuesGroupingAggregatorFunction.java | 2 +- ...ntGeoShapeDocValuesAggregatorFunction.java | 2 +- ...peDocValuesAggregatorFunctionSupplier.java | 2 +- ...peDocValuesGroupingAggregatorFunction.java | 2 +- ...eoShapeSourceValuesAggregatorFunction.java | 2 +- ...ourceValuesAggregatorFunctionSupplier.java | 2 +- ...ourceValuesGroupingAggregatorFunction.java | 2 +- .../compute/aggregation/X-ArrayState.java.st | 2 +- .../aggregation/X-FallibleArrayState.java.st | 2 +- .../aggregation/X-FallibleState.java.st | 2 +- .../compute/aggregation/X-State.java.st | 2 +- .../aggregation/blockhash/X-BlockHash.java.st | 2 +- .../compute/data/X-ArrayBlock.java.st | 2 +- .../compute/data/X-ArrayVector.java.st | 2 +- .../compute/data/X-BigArrayBlock.java.st | 2 +- .../compute/data/X-BigArrayVector.java.st | 2 +- .../compute/data/X-Block.java.st | 2 +- .../compute/data/X-BlockBuilder.java.st | 2 +- .../compute/data/X-ConstantVector.java.st | 2 +- .../compute/data/X-Lookup.java.st | 2 +- .../compute/data/X-Vector.java.st | 2 +- .../compute/data/X-VectorBlock.java.st | 2 +- .../compute/data/X-VectorBuilder.java.st | 2 +- .../compute/data/X-VectorFixedBuilder.java.st | 2 +- .../operator/logical/NotEvaluator.java | 2 +- .../conditional/GreatestBooleanEvaluator.java | 2 +- .../GreatestBytesRefEvaluator.java | 2 +- .../conditional/GreatestDoubleEvaluator.java | 2 +- .../conditional/GreatestIntEvaluator.java | 2 +- .../conditional/GreatestLongEvaluator.java | 2 +- .../conditional/LeastBooleanEvaluator.java | 2 +- .../conditional/LeastBytesRefEvaluator.java | 2 +- .../conditional/LeastDoubleEvaluator.java | 2 +- .../scalar/conditional/LeastIntEvaluator.java | 2 +- .../conditional/LeastLongEvaluator.java | 2 +- .../scalar/convert/FromBase64Evaluator.java | 2 +- .../scalar/convert/ToBase64Evaluator.java | 2 +- .../convert/ToBooleanFromDoubleEvaluator.java | 2 +- .../convert/ToBooleanFromIntEvaluator.java | 2 +- .../convert/ToBooleanFromLongEvaluator.java | 2 +- .../convert/ToBooleanFromStringEvaluator.java | 2 +- .../ToBooleanFromUnsignedLongEvaluator.java | 2 +- .../ToCartesianPointFromStringEvaluator.java | 2 +- .../ToCartesianShapeFromStringEvaluator.java | 2 +- .../ToDateNanosFromDatetimeEvaluator.java | 2 +- .../ToDateNanosFromDoubleEvaluator.java | 2 +- .../convert/ToDateNanosFromLongEvaluator.java | 2 +- .../ToDateNanosFromStringEvaluator.java | 2 +- .../ToDatetimeFromDateNanosEvaluator.java | 2 +- .../ToDatetimeFromStringEvaluator.java | 2 +- .../scalar/convert/ToDegreesEvaluator.java | 2 +- .../convert/ToDoubleFromBooleanEvaluator.java | 2 +- .../convert/ToDoubleFromIntEvaluator.java | 2 +- .../convert/ToDoubleFromLongEvaluator.java | 2 +- .../convert/ToDoubleFromStringEvaluator.java | 2 +- .../ToDoubleFromUnsignedLongEvaluator.java | 2 +- .../ToGeoPointFromStringEvaluator.java | 2 +- .../ToGeoShapeFromStringEvaluator.java | 2 +- .../convert/ToIPFromStringEvaluator.java | 2 +- .../ToIntegerFromBooleanEvaluator.java | 2 +- .../convert/ToIntegerFromDoubleEvaluator.java | 2 +- .../convert/ToIntegerFromLongEvaluator.java | 2 +- .../convert/ToIntegerFromStringEvaluator.java | 2 +- .../ToIntegerFromUnsignedLongEvaluator.java | 2 +- .../convert/ToLongFromBooleanEvaluator.java | 2 +- .../convert/ToLongFromDoubleEvaluator.java | 2 +- .../convert/ToLongFromIntEvaluator.java | 2 +- .../convert/ToLongFromStringEvaluator.java | 2 +- .../ToLongFromUnsignedLongEvaluator.java | 2 +- .../scalar/convert/ToRadiansEvaluator.java | 2 +- .../convert/ToStringFromBooleanEvaluator.java | 2 +- .../ToStringFromCartesianPointEvaluator.java | 2 +- .../ToStringFromCartesianShapeEvaluator.java | 2 +- .../ToStringFromDateNanosEvaluator.java | 2 +- .../ToStringFromDatetimeEvaluator.java | 2 +- .../convert/ToStringFromDoubleEvaluator.java | 2 +- .../ToStringFromGeoPointEvaluator.java | 2 +- .../ToStringFromGeoShapeEvaluator.java | 2 +- .../convert/ToStringFromIPEvaluator.java | 2 +- .../convert/ToStringFromIntEvaluator.java | 2 +- .../convert/ToStringFromLongEvaluator.java | 2 +- .../ToStringFromUnsignedLongEvaluator.java | 2 +- .../convert/ToStringFromVersionEvaluator.java | 2 +- .../ToUnsignedLongFromBooleanEvaluator.java | 2 +- .../ToUnsignedLongFromDoubleEvaluator.java | 2 +- .../ToUnsignedLongFromIntEvaluator.java | 2 +- .../ToUnsignedLongFromLongEvaluator.java | 2 +- .../ToUnsignedLongFromStringEvaluator.java | 2 +- .../convert/ToVersionFromStringEvaluator.java | 2 +- .../date/DateDiffConstantEvaluator.java | 168 ---------------- .../date/DateDiffConstantMillisEvaluator.java | 2 +- .../DateDiffConstantMillisNanosEvaluator.java | 2 +- .../date/DateDiffConstantNanosEvaluator.java | 2 +- .../DateDiffConstantNanosMillisEvaluator.java | 2 +- .../scalar/date/DateDiffEvaluator.java | 190 ------------------ .../scalar/date/DateDiffMillisEvaluator.java | 2 +- .../date/DateDiffMillisNanosEvaluator.java | 2 +- .../scalar/date/DateDiffNanosEvaluator.java | 2 +- .../date/DateDiffNanosMillisEvaluator.java | 2 +- .../date/DateExtractConstantEvaluator.java | 137 ------------- .../DateExtractConstantMillisEvaluator.java | 2 +- .../DateExtractConstantNanosEvaluator.java | 2 +- .../scalar/date/DateExtractEvaluator.java | 169 ---------------- .../date/DateExtractMillisEvaluator.java | 2 +- .../date/DateExtractNanosEvaluator.java | 2 +- .../DateFormatMillisConstantEvaluator.java | 2 +- .../date/DateFormatMillisEvaluator.java | 2 +- .../DateFormatNanosConstantEvaluator.java | 2 +- .../scalar/date/DateFormatNanosEvaluator.java | 2 +- .../date/DateParseConstantEvaluator.java | 2 +- .../scalar/date/DateParseEvaluator.java | 2 +- .../date/DateTruncDateNanosEvaluator.java | 2 +- .../date/DateTruncDatetimeEvaluator.java | 2 +- .../function/scalar/date/NowEvaluator.java | 2 +- .../scalar/ip/CIDRMatchEvaluator.java | 2 +- .../function/scalar/ip/IpPrefixEvaluator.java | 2 +- .../scalar/math/AbsDoubleEvaluator.java | 2 +- .../function/scalar/math/AbsIntEvaluator.java | 2 +- .../scalar/math/AbsLongEvaluator.java | 2 +- .../function/scalar/math/AcosEvaluator.java | 2 +- .../function/scalar/math/AsinEvaluator.java | 2 +- .../function/scalar/math/Atan2Evaluator.java | 2 +- .../function/scalar/math/AtanEvaluator.java | 2 +- .../scalar/math/CastIntToDoubleEvaluator.java | 2 +- .../scalar/math/CastIntToLongEvaluator.java | 2 +- .../math/CastIntToUnsignedLongEvaluator.java | 2 +- .../math/CastLongToDoubleEvaluator.java | 2 +- .../math/CastLongToUnsignedLongEvaluator.java | 2 +- .../CastUnsignedLongToDoubleEvaluator.java | 2 +- .../scalar/math/CbrtDoubleEvaluator.java | 2 +- .../scalar/math/CbrtIntEvaluator.java | 2 +- .../scalar/math/CbrtLongEvaluator.java | 2 +- .../math/CbrtUnsignedLongEvaluator.java | 2 +- .../scalar/math/CeilDoubleEvaluator.java | 2 +- .../function/scalar/math/CosEvaluator.java | 2 +- .../function/scalar/math/CoshEvaluator.java | 2 +- .../scalar/math/ExpDoubleEvaluator.java | 2 +- .../function/scalar/math/ExpIntEvaluator.java | 2 +- .../scalar/math/ExpLongEvaluator.java | 2 +- .../scalar/math/ExpUnsignedLongEvaluator.java | 2 +- .../scalar/math/FloorDoubleEvaluator.java | 2 +- .../function/scalar/math/HypotEvaluator.java | 2 +- .../scalar/math/Log10DoubleEvaluator.java | 2 +- .../scalar/math/Log10IntEvaluator.java | 2 +- .../scalar/math/Log10LongEvaluator.java | 2 +- .../math/Log10UnsignedLongEvaluator.java | 2 +- .../scalar/math/LogConstantEvaluator.java | 2 +- .../function/scalar/math/LogEvaluator.java | 2 +- .../function/scalar/math/PowEvaluator.java | 2 +- .../scalar/math/RoundDoubleEvaluator.java | 2 +- .../math/RoundDoubleNoDecimalsEvaluator.java | 2 +- .../scalar/math/RoundIntEvaluator.java | 2 +- .../scalar/math/RoundLongEvaluator.java | 2 +- .../math/RoundUnsignedLongEvaluator.java | 2 +- .../scalar/math/SignumDoubleEvaluator.java | 2 +- .../scalar/math/SignumIntEvaluator.java | 2 +- .../scalar/math/SignumLongEvaluator.java | 2 +- .../math/SignumUnsignedLongEvaluator.java | 2 +- .../function/scalar/math/SinEvaluator.java | 2 +- .../function/scalar/math/SinhEvaluator.java | 2 +- .../scalar/math/SqrtDoubleEvaluator.java | 2 +- .../scalar/math/SqrtIntEvaluator.java | 2 +- .../scalar/math/SqrtLongEvaluator.java | 2 +- .../math/SqrtUnsignedLongEvaluator.java | 2 +- .../function/scalar/math/TanEvaluator.java | 2 +- .../function/scalar/math/TanhEvaluator.java | 2 +- .../multivalue/MvAppendBooleanEvaluator.java | 2 +- .../multivalue/MvAppendBytesRefEvaluator.java | 2 +- .../multivalue/MvAppendDoubleEvaluator.java | 2 +- .../multivalue/MvAppendIntEvaluator.java | 2 +- .../multivalue/MvAppendLongEvaluator.java | 2 +- .../multivalue/MvAvgDoubleEvaluator.java | 2 +- .../scalar/multivalue/MvAvgIntEvaluator.java | 2 +- .../scalar/multivalue/MvAvgLongEvaluator.java | 2 +- .../MvAvgUnsignedLongEvaluator.java | 2 +- .../multivalue/MvFirstBooleanEvaluator.java | 2 +- .../multivalue/MvFirstBytesRefEvaluator.java | 2 +- .../multivalue/MvFirstDoubleEvaluator.java | 2 +- .../multivalue/MvFirstIntEvaluator.java | 2 +- .../multivalue/MvFirstLongEvaluator.java | 2 +- .../multivalue/MvLastBooleanEvaluator.java | 2 +- .../multivalue/MvLastBytesRefEvaluator.java | 2 +- .../multivalue/MvLastDoubleEvaluator.java | 2 +- .../scalar/multivalue/MvLastIntEvaluator.java | 2 +- .../multivalue/MvLastLongEvaluator.java | 2 +- .../multivalue/MvMaxBooleanEvaluator.java | 2 +- .../multivalue/MvMaxBytesRefEvaluator.java | 2 +- .../multivalue/MvMaxDoubleEvaluator.java | 2 +- .../scalar/multivalue/MvMaxIntEvaluator.java | 2 +- .../scalar/multivalue/MvMaxLongEvaluator.java | 2 +- ...edianAbsoluteDeviationDoubleEvaluator.java | 2 +- ...MvMedianAbsoluteDeviationIntEvaluator.java | 2 +- ...vMedianAbsoluteDeviationLongEvaluator.java | 2 +- ...bsoluteDeviationUnsignedLongEvaluator.java | 2 +- .../multivalue/MvMedianDoubleEvaluator.java | 2 +- .../multivalue/MvMedianIntEvaluator.java | 2 +- .../multivalue/MvMedianLongEvaluator.java | 2 +- .../MvMedianUnsignedLongEvaluator.java | 2 +- .../multivalue/MvMinBooleanEvaluator.java | 2 +- .../multivalue/MvMinBytesRefEvaluator.java | 2 +- .../multivalue/MvMinDoubleEvaluator.java | 2 +- .../scalar/multivalue/MvMinIntEvaluator.java | 2 +- .../scalar/multivalue/MvMinLongEvaluator.java | 2 +- .../MvPSeriesWeightedSumDoubleEvaluator.java | 2 +- .../MvPercentileDoubleEvaluator.java | 2 +- .../MvPercentileIntegerEvaluator.java | 2 +- .../multivalue/MvPercentileLongEvaluator.java | 2 +- .../multivalue/MvSliceBooleanEvaluator.java | 2 +- .../multivalue/MvSliceBytesRefEvaluator.java | 2 +- .../multivalue/MvSliceDoubleEvaluator.java | 2 +- .../multivalue/MvSliceIntEvaluator.java | 2 +- .../multivalue/MvSliceLongEvaluator.java | 2 +- .../multivalue/MvSumDoubleEvaluator.java | 2 +- .../scalar/multivalue/MvSumIntEvaluator.java | 2 +- .../scalar/multivalue/MvSumLongEvaluator.java | 2 +- .../MvSumUnsignedLongEvaluator.java | 2 +- .../scalar/multivalue/MvZipEvaluator.java | 2 +- ...ianPointDocValuesAndConstantEvaluator.java | 2 +- ...esianPointDocValuesAndSourceEvaluator.java | 2 +- ...nsCartesianSourceAndConstantEvaluator.java | 2 +- ...ainsCartesianSourceAndSourceEvaluator.java | 2 +- ...GeoPointDocValuesAndConstantEvaluator.java | 2 +- ...nsGeoPointDocValuesAndSourceEvaluator.java | 2 +- ...ContainsGeoSourceAndConstantEvaluator.java | 2 +- ...alContainsGeoSourceAndSourceEvaluator.java | 2 +- ...ianPointDocValuesAndConstantEvaluator.java | 2 +- ...esianPointDocValuesAndSourceEvaluator.java | 2 +- ...ntCartesianSourceAndConstantEvaluator.java | 2 +- ...ointCartesianSourceAndSourceEvaluator.java | 2 +- ...GeoPointDocValuesAndConstantEvaluator.java | 2 +- ...ntGeoPointDocValuesAndSourceEvaluator.java | 2 +- ...DisjointGeoSourceAndConstantEvaluator.java | 2 +- ...alDisjointGeoSourceAndSourceEvaluator.java | 2 +- ...ianPointDocValuesAndConstantEvaluator.java | 2 +- ...esianPointDocValuesAndSourceEvaluator.java | 2 +- ...tsCartesianSourceAndConstantEvaluator.java | 2 +- ...ectsCartesianSourceAndSourceEvaluator.java | 2 +- ...GeoPointDocValuesAndConstantEvaluator.java | 2 +- ...tsGeoPointDocValuesAndSourceEvaluator.java | 2 +- ...tersectsGeoSourceAndConstantEvaluator.java | 2 +- ...IntersectsGeoSourceAndSourceEvaluator.java | 2 +- ...ianPointDocValuesAndConstantEvaluator.java | 2 +- ...esianPointDocValuesAndSourceEvaluator.java | 2 +- ...inCartesianSourceAndConstantEvaluator.java | 2 +- ...thinCartesianSourceAndSourceEvaluator.java | 2 +- ...GeoPointDocValuesAndConstantEvaluator.java | 2 +- ...inGeoPointDocValuesAndSourceEvaluator.java | 2 +- ...alWithinGeoSourceAndConstantEvaluator.java | 2 +- ...tialWithinGeoSourceAndSourceEvaluator.java | 2 +- ...ianPointDocValuesAndConstantEvaluator.java | 2 +- ...esianPointDocValuesAndSourceEvaluator.java | 2 +- ...ceCartesianSourceAndConstantEvaluator.java | 2 +- ...anceCartesianSourceAndSourceEvaluator.java | 2 +- ...GeoPointDocValuesAndConstantEvaluator.java | 2 +- ...ceGeoPointDocValuesAndSourceEvaluator.java | 2 +- ...DistanceGeoSourceAndConstantEvaluator.java | 2 +- ...StDistanceGeoSourceAndSourceEvaluator.java | 2 +- .../spatial/StEnvelopeFromWKBEvaluator.java | 2 +- .../StEnvelopeFromWKBGeoEvaluator.java | 2 +- .../scalar/spatial/StXFromWKBEvaluator.java | 2 +- .../spatial/StXMaxFromWKBEvaluator.java | 2 +- .../spatial/StXMaxFromWKBGeoEvaluator.java | 2 +- .../spatial/StXMinFromWKBEvaluator.java | 2 +- .../spatial/StXMinFromWKBGeoEvaluator.java | 2 +- .../scalar/spatial/StYFromWKBEvaluator.java | 2 +- .../spatial/StYMaxFromWKBEvaluator.java | 2 +- .../spatial/StYMaxFromWKBGeoEvaluator.java | 2 +- .../spatial/StYMinFromWKBEvaluator.java | 2 +- .../spatial/StYMinFromWKBGeoEvaluator.java | 2 +- .../scalar/string/AutomataMatchEvaluator.java | 2 +- .../scalar/string/BitLengthEvaluator.java | 2 +- .../scalar/string/ByteLengthEvaluator.java | 2 +- .../scalar/string/ChangeCaseEvaluator.java | 2 +- .../scalar/string/ConcatEvaluator.java | 2 +- .../scalar/string/EndsWithEvaluator.java | 2 +- .../scalar/string/HashConstantEvaluator.java | 2 +- .../function/scalar/string/HashEvaluator.java | 2 +- .../scalar/string/LTrimEvaluator.java | 2 +- .../function/scalar/string/LeftEvaluator.java | 2 +- .../scalar/string/LengthEvaluator.java | 2 +- .../scalar/string/LocateEvaluator.java | 2 +- .../scalar/string/LocateNoStartEvaluator.java | 2 +- .../scalar/string/RTrimEvaluator.java | 2 +- .../string/RepeatConstantEvaluator.java | 2 +- .../scalar/string/RepeatEvaluator.java | 2 +- .../string/ReplaceConstantEvaluator.java | 2 +- .../scalar/string/ReplaceEvaluator.java | 2 +- .../scalar/string/ReverseEvaluator.java | 2 +- .../scalar/string/RightEvaluator.java | 2 +- .../scalar/string/SpaceEvaluator.java | 2 +- .../string/SplitSingleByteEvaluator.java | 2 +- .../scalar/string/SplitVariableEvaluator.java | 2 +- .../scalar/string/StartsWithEvaluator.java | 2 +- .../scalar/string/SubstringEvaluator.java | 2 +- .../string/SubstringNoLengthEvaluator.java | 2 +- .../function/scalar/string/TrimEvaluator.java | 2 +- .../arithmetic/AddDateNanosEvaluator.java | 2 +- .../arithmetic/AddDatetimesEvaluator.java | 2 +- .../arithmetic/AddDoublesEvaluator.java | 2 +- .../operator/arithmetic/AddIntsEvaluator.java | 2 +- .../arithmetic/AddLongsEvaluator.java | 2 +- .../arithmetic/AddUnsignedLongsEvaluator.java | 2 +- .../arithmetic/DivDoublesEvaluator.java | 2 +- .../operator/arithmetic/DivIntsEvaluator.java | 2 +- .../arithmetic/DivLongsEvaluator.java | 2 +- .../arithmetic/DivUnsignedLongsEvaluator.java | 2 +- .../arithmetic/ModDoublesEvaluator.java | 2 +- .../operator/arithmetic/ModIntsEvaluator.java | 2 +- .../arithmetic/ModLongsEvaluator.java | 2 +- .../arithmetic/ModUnsignedLongsEvaluator.java | 2 +- .../arithmetic/MulDoublesEvaluator.java | 2 +- .../operator/arithmetic/MulIntsEvaluator.java | 2 +- .../arithmetic/MulLongsEvaluator.java | 2 +- .../arithmetic/MulUnsignedLongsEvaluator.java | 2 +- .../arithmetic/NegDoublesEvaluator.java | 2 +- .../operator/arithmetic/NegIntsEvaluator.java | 2 +- .../arithmetic/NegLongsEvaluator.java | 2 +- .../arithmetic/SubDateNanosEvaluator.java | 2 +- .../arithmetic/SubDatetimesEvaluator.java | 2 +- .../arithmetic/SubDoublesEvaluator.java | 2 +- .../operator/arithmetic/SubIntsEvaluator.java | 2 +- .../arithmetic/SubLongsEvaluator.java | 2 +- .../arithmetic/SubUnsignedLongsEvaluator.java | 2 +- .../comparison/EqualsBoolsEvaluator.java | 2 +- .../comparison/EqualsDoublesEvaluator.java | 2 +- .../comparison/EqualsGeometriesEvaluator.java | 2 +- .../comparison/EqualsIntsEvaluator.java | 2 +- .../comparison/EqualsKeywordsEvaluator.java | 2 +- .../comparison/EqualsLongsEvaluator.java | 2 +- .../EqualsMillisNanosEvaluator.java | 2 +- .../EqualsNanosMillisEvaluator.java | 2 +- .../GreaterThanDoublesEvaluator.java | 2 +- .../comparison/GreaterThanIntsEvaluator.java | 2 +- .../GreaterThanKeywordsEvaluator.java | 2 +- .../comparison/GreaterThanLongsEvaluator.java | 2 +- .../GreaterThanMillisNanosEvaluator.java | 2 +- .../GreaterThanNanosMillisEvaluator.java | 2 +- .../GreaterThanOrEqualDoublesEvaluator.java | 2 +- .../GreaterThanOrEqualIntsEvaluator.java | 2 +- .../GreaterThanOrEqualKeywordsEvaluator.java | 2 +- .../GreaterThanOrEqualLongsEvaluator.java | 2 +- ...reaterThanOrEqualMillisNanosEvaluator.java | 2 +- ...reaterThanOrEqualNanosMillisEvaluator.java | 2 +- .../InsensitiveEqualsConstantEvaluator.java | 2 +- .../InsensitiveEqualsEvaluator.java | 2 +- .../comparison/LessThanDoublesEvaluator.java | 2 +- .../comparison/LessThanIntsEvaluator.java | 2 +- .../comparison/LessThanKeywordsEvaluator.java | 2 +- .../comparison/LessThanLongsEvaluator.java | 2 +- .../LessThanMillisNanosEvaluator.java | 2 +- .../LessThanNanosMillisEvaluator.java | 2 +- .../LessThanOrEqualDoublesEvaluator.java | 2 +- .../LessThanOrEqualIntsEvaluator.java | 2 +- .../LessThanOrEqualKeywordsEvaluator.java | 2 +- .../LessThanOrEqualLongsEvaluator.java | 2 +- .../LessThanOrEqualMillisNanosEvaluator.java | 2 +- .../LessThanOrEqualNanosMillisEvaluator.java | 2 +- .../comparison/NotEqualsBoolsEvaluator.java | 2 +- .../comparison/NotEqualsDoublesEvaluator.java | 2 +- .../NotEqualsGeometriesEvaluator.java | 2 +- .../comparison/NotEqualsIntsEvaluator.java | 2 +- .../NotEqualsKeywordsEvaluator.java | 2 +- .../comparison/NotEqualsLongsEvaluator.java | 2 +- .../NotEqualsMillisNanosEvaluator.java | 2 +- .../NotEqualsNanosMillisEvaluator.java | 2 +- 648 files changed, 644 insertions(+), 1308 deletions(-) delete mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java delete mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java delete mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java delete mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java index f11ccbced6fbe..15fc75a990c42 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java @@ -87,7 +87,7 @@ public JavaFile sourceFile() { private TypeSpec type() { TypeSpec.Builder builder = TypeSpec.classBuilder(implementation); builder.addJavadoc("{@link $T} implementation for {@link $T}.\n", AGGREGATOR_FUNCTION_SUPPLIER, declarationType); - builder.addJavadoc("This class is generated. Do not edit it."); + builder.addJavadoc("This class is generated. Edit {@code " + getClass().getSimpleName() + "} instead."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.addSuperinterface(AGGREGATOR_FUNCTION_SUPPLIER); diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java index 4589ab13a4e39..46881bf337c89 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java @@ -218,7 +218,7 @@ public JavaFile sourceFile() { private TypeSpec type() { TypeSpec.Builder builder = TypeSpec.classBuilder(implementation); builder.addJavadoc("{@link $T} implementation for {@link $T}.\n", AGGREGATOR_FUNCTION, declarationType); - builder.addJavadoc("This class is generated. Do not edit it."); + builder.addJavadoc("This class is generated. Edit {@code " + getClass().getSimpleName() + "} instead."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.addSuperinterface(AGGREGATOR_FUNCTION); builder.addField( diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java index f875cd7e6480e..c58684c5d06c9 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java @@ -83,7 +83,7 @@ public JavaFile sourceFile() { private TypeSpec type() { TypeSpec.Builder builder = TypeSpec.classBuilder(implementation); builder.addJavadoc("{@link $T} implementation for {@link $T}.\n", EXPRESSION_EVALUATOR, declarationType); - builder.addJavadoc("This class is generated. Do not edit it."); + builder.addJavadoc("This class is generated. Edit {@code " + getClass().getSimpleName() + "} instead."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.superclass(ABSTRACT_CONVERT_FUNCTION_EVALUATOR); diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java index b4a0cf9127f23..f0044ae4774f8 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java @@ -93,7 +93,7 @@ public JavaFile sourceFile() { private TypeSpec type() { TypeSpec.Builder builder = TypeSpec.classBuilder(implementation); builder.addJavadoc("{@link $T} implementation for {@link $T}.\n", EXPRESSION_EVALUATOR, declarationType); - builder.addJavadoc("This class is generated. Do not edit it."); + builder.addJavadoc("This class is generated. Edit {@code " + getClass().getSimpleName() + "} instead."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.addSuperinterface(EXPRESSION_EVALUATOR); builder.addType(factory()); diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java index bae8800d3d62f..8224c73936b90 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java @@ -156,7 +156,7 @@ public JavaFile sourceFile() { private TypeSpec type() { TypeSpec.Builder builder = TypeSpec.classBuilder(implementation); builder.addJavadoc("{@link $T} implementation for {@link $T}.\n", GROUPING_AGGREGATOR_FUNCTION, declarationType); - builder.addJavadoc("This class is generated. Do not edit it."); + builder.addJavadoc("This class is generated. Edit {@code " + getClass().getSimpleName() + "} instead."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.addSuperinterface(GROUPING_AGGREGATOR_FUNCTION); builder.addField( diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java index 30ca69b1651de..1872012500ea7 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java @@ -128,7 +128,7 @@ public JavaFile sourceFile() { private TypeSpec type() { TypeSpec.Builder builder = TypeSpec.classBuilder(implementation); builder.addJavadoc("{@link $T} implementation for {@link $T}.\n", EXPRESSION_EVALUATOR, declarationType); - builder.addJavadoc("This class is generated. Do not edit it."); + builder.addJavadoc("This class is generated. Edit {@code " + getClass().getSimpleName() + "} instead."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); if (warnExceptions.isEmpty()) { builder.superclass(ABSTRACT_MULTIVALUE_FUNCTION_EVALUATOR); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanArrayState.java index 793e6cc1b37ef..314bb5fea28a6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanArrayState.java @@ -27,7 +27,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayState.java.st} instead. *

*/ final class BooleanArrayState extends AbstractArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleArrayState.java index 6367fdfb6617e..d8d21a79f2141 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleArrayState.java @@ -28,7 +28,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleArrayState.java.st} instead. *

*/ final class BooleanFallibleArrayState extends AbstractFallibleArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleState.java index 073f31c390a6f..e75d0ddd76787 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanFallibleState.java @@ -13,7 +13,7 @@ /** * Aggregator state for a single boolean. * It stores a third boolean to store if the aggregation failed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleState.java.st} instead. */ final class BooleanFallibleState implements AggregatorState { private boolean value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanState.java index ba4d133dee553..cb0f4f7c3c603 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/BooleanState.java @@ -12,7 +12,7 @@ /** * Aggregator state for a single boolean. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-State.java.st} instead. */ final class BooleanState implements AggregatorState { private boolean value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java index 953b7172a2862..b2f080e0484fd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java @@ -27,7 +27,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayState.java.st} instead. *

*/ final class DoubleArrayState extends AbstractArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleArrayState.java index dd1d60f7bd246..f8d25c65c2f03 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleArrayState.java @@ -28,7 +28,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleArrayState.java.st} instead. *

*/ final class DoubleFallibleArrayState extends AbstractFallibleArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleState.java index 4cdeddec724bf..a59aff16f9ae5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleState.java @@ -13,7 +13,7 @@ /** * Aggregator state for a single double. * It stores a third boolean to store if the aggregation failed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleState.java.st} instead. */ final class DoubleFallibleState implements AggregatorState { private double value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleState.java index 90ecc2c1d3c03..c224a7d02479e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleState.java @@ -12,7 +12,7 @@ /** * Aggregator state for a single double. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-State.java.st} instead. */ final class DoubleState implements AggregatorState { private double value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatArrayState.java index b3767828f00db..db30dd1bec9ed 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatArrayState.java @@ -27,7 +27,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayState.java.st} instead. *

*/ final class FloatArrayState extends AbstractArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleArrayState.java index 055cf345033c5..d8c1cde11e1f1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleArrayState.java @@ -28,7 +28,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleArrayState.java.st} instead. *

*/ final class FloatFallibleArrayState extends AbstractFallibleArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleState.java index b050c86258dcd..bfaef7e6c291c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatFallibleState.java @@ -13,7 +13,7 @@ /** * Aggregator state for a single float. * It stores a third boolean to store if the aggregation failed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleState.java.st} instead. */ final class FloatFallibleState implements AggregatorState { private float value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatState.java index 6f608271b6e42..b271ed042d993 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FloatState.java @@ -12,7 +12,7 @@ /** * Aggregator state for a single float. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-State.java.st} instead. */ final class FloatState implements AggregatorState { private float value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java index 034ed72d08c17..c1451ce76d710 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java @@ -27,7 +27,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayState.java.st} instead. *

*/ final class IntArrayState extends AbstractArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleArrayState.java index e45d84720ca1a..b79e7114e7eb5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleArrayState.java @@ -28,7 +28,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleArrayState.java.st} instead. *

*/ final class IntFallibleArrayState extends AbstractFallibleArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleState.java index 360f3fdb009e4..91e952bf412d9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntFallibleState.java @@ -13,7 +13,7 @@ /** * Aggregator state for a single int. * It stores a third boolean to store if the aggregation failed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleState.java.st} instead. */ final class IntFallibleState implements AggregatorState { private int value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntState.java index c539c576ef36d..52b3cf18a1263 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntState.java @@ -12,7 +12,7 @@ /** * Aggregator state for a single int. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-State.java.st} instead. */ final class IntState implements AggregatorState { private int value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java index 9ff7e3f536484..2fc2bac5783ce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java @@ -27,7 +27,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayState.java.st} instead. *

*/ final class LongArrayState extends AbstractArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleArrayState.java index cb69579906871..155248275133f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleArrayState.java @@ -28,7 +28,7 @@ * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleArrayState.java.st} instead. *

*/ final class LongFallibleArrayState extends AbstractFallibleArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleState.java index 98669ef627d04..b6f472a9feb78 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongFallibleState.java @@ -13,7 +13,7 @@ /** * Aggregator state for a single long. * It stores a third boolean to store if the aggregation failed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleState.java.st} instead. */ final class LongFallibleState implements AggregatorState { private long value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongState.java index e9d97dcfe7fc1..64b227145bc98 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongState.java @@ -12,7 +12,7 @@ /** * Aggregator state for a single long. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-State.java.st} instead. */ final class LongState implements AggregatorState { private long value; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java index b8ea7658a8247..9297e76d5c0be 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java @@ -29,7 +29,7 @@ /** * Maps a {@link BytesRefBlock} column to group ids. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockHash.java.st} instead. */ final class BytesRefBlockHash extends BlockHash { private final int channel; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java index c9c672112a630..ede268271d9dd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java @@ -28,7 +28,7 @@ /** * Maps a {@link DoubleBlock} column to group ids. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockHash.java.st} instead. */ final class DoubleBlockHash extends BlockHash { private final int channel; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java index 13b60c6f1fec5..3a3267af3c983 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java @@ -26,7 +26,7 @@ /** * Maps a {@link IntBlock} column to group ids. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockHash.java.st} instead. */ final class IntBlockHash extends BlockHash { private final int channel; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java index 5252bd742ec51..c5152ae3cd22d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java @@ -28,7 +28,7 @@ /** * Maps a {@link LongBlock} column to group ids. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockHash.java.st} instead. */ final class LongBlockHash extends BlockHash { private final int channel; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java index 896662dddf1eb..47d386d0bd690 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java @@ -18,7 +18,7 @@ /** * Block implementation that stores values in a {@link BooleanArrayVector}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayBlock.java.st} instead. */ final class BooleanArrayBlock extends AbstractArrayBlock implements BooleanBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java index f761ed5806a06..cde163a2d3bc5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java @@ -19,7 +19,7 @@ /** * Vector implementation that stores an array of boolean values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayVector.java.st} instead. */ final class BooleanArrayVector extends AbstractVector implements BooleanVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java index f353512eb93b7..ed93cc2cc21b8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java @@ -20,7 +20,7 @@ /** * Block implementation that stores values in a {@link BooleanBigArrayVector}. Does not take ownership of the given * {@link BitArray} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayBlock.java.st} instead. */ public final class BooleanBigArrayBlock extends AbstractArrayBlock implements BooleanBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java index a1ccfc487cca9..fd7df3663afa8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java @@ -20,7 +20,7 @@ /** * Vector implementation that defers to an enclosed {@link BitArray}. * Does not take ownership of the array and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayVector.java.st} instead. */ public final class BooleanBigArrayVector extends AbstractVector implements BooleanVector, Releasable { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index b08b80acc6976..c0e0ed0595c0f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -19,7 +19,7 @@ /** * Block that stores boolean values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Block.java.st} instead. */ public sealed interface BooleanBlock extends Block permits BooleanArrayBlock, BooleanVectorBlock, ConstantNullBlock, BooleanBigArrayBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java index 7f4705ddecb27..1fe75bff6e1a5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java @@ -16,7 +16,7 @@ /** * Block build of BooleanBlocks. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockBuilder.java.st} instead. */ final class BooleanBlockBuilder extends AbstractBlockBuilder implements BooleanBlock.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanLookup.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanLookup.java index f969e164eef68..10de16af922f6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanLookup.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanLookup.java @@ -14,7 +14,7 @@ /** * Generic {@link Block#lookup} implementation {@link BooleanBlock}s. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Lookup.java.st} instead. */ final class BooleanLookup implements ReleasableIterator { private final BooleanBlock values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java index f2d6b5fbd4ce9..813f7cd757207 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java @@ -17,7 +17,7 @@ /** * Vector that stores boolean values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Vector.java.st} instead. */ public sealed interface BooleanVector extends Vector permits ConstantBooleanVector, BooleanArrayVector, BooleanBigArrayVector, ConstantNullVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java index 1544cc3355cd0..56cfc725801ab 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java @@ -13,7 +13,7 @@ /** * Block view of a {@link BooleanVector}. Cannot represent multi-values or nulls. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBlock.java.st} instead. */ public final class BooleanVectorBlock extends AbstractVectorBlock implements BooleanBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java index effb90267702f..340ead5a42894 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java @@ -11,7 +11,7 @@ /** * Builder for {@link BooleanVector}s that grows as needed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBuilder.java.st} instead. */ final class BooleanVectorBuilder extends AbstractVectorBuilder implements BooleanVector.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java index c84029b4ceeb4..21835281393a5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java @@ -13,7 +13,7 @@ * Builder for {@link BooleanVector}s that never grows. Prefer this to * {@link BooleanVectorBuilder} if you know the precise size up front because * it's faster. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorFixedBuilder.java.st} instead. */ public final class BooleanVectorFixedBuilder implements BooleanVector.FixedBuilder { private final BlockFactory blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java index 5bcb1b0ec5095..a85b75d8fdc2a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java @@ -21,7 +21,7 @@ /** * Block implementation that stores values in a {@link BytesRefArrayVector}. * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayBlock.java.st} instead. */ final class BytesRefArrayBlock extends AbstractArrayBlock implements BytesRefBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index e72c900718735..509ee7e583e4c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -21,7 +21,7 @@ /** * Vector implementation that stores an array of BytesRef values. * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayVector.java.st} instead. */ final class BytesRefArrayVector extends AbstractVector implements BytesRefVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 6661895722725..2353012ef314b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -20,7 +20,7 @@ /** * Block that stores BytesRef values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Block.java.st} instead. */ public sealed interface BytesRefBlock extends Block permits BytesRefArrayBlock, BytesRefVectorBlock, ConstantNullBlock, OrdinalBytesRefBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index 0a2b350780405..2d724df2d3275 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -15,7 +15,7 @@ /** * Block build of BytesRefBlocks. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockBuilder.java.st} instead. */ final class BytesRefBlockBuilder extends AbstractBlockBuilder implements BytesRefBlock.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefLookup.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefLookup.java index 3ec62902fe048..98967fdac3fbe 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefLookup.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefLookup.java @@ -15,7 +15,7 @@ /** * Generic {@link Block#lookup} implementation {@link BytesRefBlock}s. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Lookup.java.st} instead. */ final class BytesRefLookup implements ReleasableIterator { private final BytesRef firstScratch = new BytesRef(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java index a3432a2913fde..1bca89f531c14 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java @@ -18,7 +18,7 @@ /** * Vector that stores BytesRef values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Vector.java.st} instead. */ public sealed interface BytesRefVector extends Vector permits ConstantBytesRefVector, BytesRefArrayVector, ConstantNullVector, OrdinalBytesRefVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java index 6c0334e147aa0..10cc1b5503a64 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java @@ -14,7 +14,7 @@ /** * Block view of a {@link BytesRefVector}. Cannot represent multi-values or nulls. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBlock.java.st} instead. */ public final class BytesRefVectorBlock extends AbstractVectorBlock implements BytesRefBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java index cd5851e9e49ef..1962784d2a1e3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java @@ -14,7 +14,7 @@ /** * Builder for {@link BytesRefVector}s that grows as needed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBuilder.java.st} instead. */ final class BytesRefVectorBuilder extends AbstractVectorBuilder implements BytesRefVector.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java index f36fbd7a20316..6ef344b8cc40d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java @@ -13,7 +13,7 @@ /** * Vector implementation that stores a constant boolean value. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ConstantVector.java.st} instead. */ final class ConstantBooleanVector extends AbstractVector implements BooleanVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java index 216b7db26b6fb..4bb8ee4a5a392 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java @@ -15,7 +15,7 @@ /** * Vector implementation that stores a constant BytesRef value. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ConstantVector.java.st} instead. */ final class ConstantBytesRefVector extends AbstractVector implements BytesRefVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java index 3c91eaf7171b5..b2f145e6918e1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java @@ -13,7 +13,7 @@ /** * Vector implementation that stores a constant double value. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ConstantVector.java.st} instead. */ final class ConstantDoubleVector extends AbstractVector implements DoubleVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantFloatVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantFloatVector.java index a9ee5e811da64..09b34f0b57494 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantFloatVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantFloatVector.java @@ -13,7 +13,7 @@ /** * Vector implementation that stores a constant float value. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ConstantVector.java.st} instead. */ final class ConstantFloatVector extends AbstractVector implements FloatVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java index 20c1b07a3f102..1131096edf036 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java @@ -13,7 +13,7 @@ /** * Vector implementation that stores a constant int value. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ConstantVector.java.st} instead. */ final class ConstantIntVector extends AbstractVector implements IntVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java index 6e3decb9ad540..a7e22ee58526b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java @@ -13,7 +13,7 @@ /** * Vector implementation that stores a constant long value. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ConstantVector.java.st} instead. */ final class ConstantLongVector extends AbstractVector implements LongVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java index 20bd42da98c71..83c7b85a7ff5a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java @@ -18,7 +18,7 @@ /** * Block implementation that stores values in a {@link DoubleArrayVector}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayBlock.java.st} instead. */ final class DoubleArrayBlock extends AbstractArrayBlock implements DoubleBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index 6426f8f39ecd9..5c375634011c6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -19,7 +19,7 @@ /** * Vector implementation that stores an array of double values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayVector.java.st} instead. */ final class DoubleArrayVector extends AbstractVector implements DoubleVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java index 8dc9efa755c17..9ae9e9c98ad98 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java @@ -20,7 +20,7 @@ /** * Block implementation that stores values in a {@link DoubleBigArrayVector}. Does not take ownership of the given * {@link DoubleArray} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayBlock.java.st} instead. */ public final class DoubleBigArrayBlock extends AbstractArrayBlock implements DoubleBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java index e98d0da358cdf..9c811e4aec056 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java @@ -20,7 +20,7 @@ /** * Vector implementation that defers to an enclosed {@link DoubleArray}. * Does not take ownership of the array and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayVector.java.st} instead. */ public final class DoubleBigArrayVector extends AbstractVector implements DoubleVector, Releasable { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index 04df6253662a9..d5511246afb34 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -19,7 +19,7 @@ /** * Block that stores double values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Block.java.st} instead. */ public sealed interface DoubleBlock extends Block permits DoubleArrayBlock, DoubleVectorBlock, ConstantNullBlock, DoubleBigArrayBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index 8ecc9b91e0ffe..5896bbd2c51e5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -16,7 +16,7 @@ /** * Block build of DoubleBlocks. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockBuilder.java.st} instead. */ final class DoubleBlockBuilder extends AbstractBlockBuilder implements DoubleBlock.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleLookup.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleLookup.java index bcb8a414f7c57..e8d69edb92c20 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleLookup.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleLookup.java @@ -14,7 +14,7 @@ /** * Generic {@link Block#lookup} implementation {@link DoubleBlock}s. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Lookup.java.st} instead. */ final class DoubleLookup implements ReleasableIterator { private final DoubleBlock values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java index ec4b9cadc074e..b478c5ffbe043 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java @@ -17,7 +17,7 @@ /** * Vector that stores double values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Vector.java.st} instead. */ public sealed interface DoubleVector extends Vector permits ConstantDoubleVector, DoubleArrayVector, DoubleBigArrayVector, ConstantNullVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java index 7e680f8e59ff0..f6350bd4586ca 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java @@ -13,7 +13,7 @@ /** * Block view of a {@link DoubleVector}. Cannot represent multi-values or nulls. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBlock.java.st} instead. */ public final class DoubleVectorBlock extends AbstractVectorBlock implements DoubleBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java index f4e7be406e1ca..c5c180003b071 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java @@ -11,7 +11,7 @@ /** * Builder for {@link DoubleVector}s that grows as needed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBuilder.java.st} instead. */ final class DoubleVectorBuilder extends AbstractVectorBuilder implements DoubleVector.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java index e84040578acf7..2ce356220f257 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java @@ -13,7 +13,7 @@ * Builder for {@link DoubleVector}s that never grows. Prefer this to * {@link DoubleVectorBuilder} if you know the precise size up front because * it's faster. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorFixedBuilder.java.st} instead. */ public final class DoubleVectorFixedBuilder implements DoubleVector.FixedBuilder { private final BlockFactory blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayBlock.java index c0941557dc4fe..749041d80d668 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayBlock.java @@ -18,7 +18,7 @@ /** * Block implementation that stores values in a {@link FloatArrayVector}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayBlock.java.st} instead. */ final class FloatArrayBlock extends AbstractArrayBlock implements FloatBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayVector.java index fd47f40463552..f10e9dc39bbfd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatArrayVector.java @@ -19,7 +19,7 @@ /** * Vector implementation that stores an array of float values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayVector.java.st} instead. */ final class FloatArrayVector extends AbstractVector implements FloatVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayBlock.java index a2191ba9fc69a..4a67a2e6ec12f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayBlock.java @@ -20,7 +20,7 @@ /** * Block implementation that stores values in a {@link FloatBigArrayVector}. Does not take ownership of the given * {@link FloatArray} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayBlock.java.st} instead. */ public final class FloatBigArrayBlock extends AbstractArrayBlock implements FloatBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayVector.java index 4e481bc6bcaaf..1aa5aa82e3447 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBigArrayVector.java @@ -20,7 +20,7 @@ /** * Vector implementation that defers to an enclosed {@link FloatArray}. * Does not take ownership of the array and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayVector.java.st} instead. */ public final class FloatBigArrayVector extends AbstractVector implements FloatVector, Releasable { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java index 0679e38b63219..61cda16381940 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java @@ -19,7 +19,7 @@ /** * Block that stores float values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Block.java.st} instead. */ public sealed interface FloatBlock extends Block permits FloatArrayBlock, FloatVectorBlock, ConstantNullBlock, FloatBigArrayBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java index 8504912adc057..809f74899c9c2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java @@ -16,7 +16,7 @@ /** * Block build of FloatBlocks. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockBuilder.java.st} instead. */ final class FloatBlockBuilder extends AbstractBlockBuilder implements FloatBlock.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatLookup.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatLookup.java index 9e0018e527c4d..25e39a649e948 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatLookup.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatLookup.java @@ -14,7 +14,7 @@ /** * Generic {@link Block#lookup} implementation {@link FloatBlock}s. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Lookup.java.st} instead. */ final class FloatLookup implements ReleasableIterator { private final FloatBlock values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVector.java index 5f49efd2779ea..30fd4d69f221f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVector.java @@ -17,7 +17,7 @@ /** * Vector that stores float values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Vector.java.st} instead. */ public sealed interface FloatVector extends Vector permits ConstantFloatVector, FloatArrayVector, FloatBigArrayVector, ConstantNullVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBlock.java index d5df1e8ea6e57..6c2846183cd2d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBlock.java @@ -13,7 +13,7 @@ /** * Block view of a {@link FloatVector}. Cannot represent multi-values or nulls. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBlock.java.st} instead. */ public final class FloatVectorBlock extends AbstractVectorBlock implements FloatBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBuilder.java index 9cec6355ec982..72db1bef16996 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorBuilder.java @@ -11,7 +11,7 @@ /** * Builder for {@link FloatVector}s that grows as needed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBuilder.java.st} instead. */ final class FloatVectorBuilder extends AbstractVectorBuilder implements FloatVector.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorFixedBuilder.java index 9c4f2b3986c7e..d18d24809301f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorFixedBuilder.java @@ -13,7 +13,7 @@ * Builder for {@link FloatVector}s that never grows. Prefer this to * {@link FloatVectorBuilder} if you know the precise size up front because * it's faster. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorFixedBuilder.java.st} instead. */ public final class FloatVectorFixedBuilder implements FloatVector.FixedBuilder { private final BlockFactory blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java index 8ced678bc90b0..0be8b6db78343 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java @@ -18,7 +18,7 @@ /** * Block implementation that stores values in a {@link IntArrayVector}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayBlock.java.st} instead. */ final class IntArrayBlock extends AbstractArrayBlock implements IntBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index c3d6691dec3ca..9db51c61bbf1d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -19,7 +19,7 @@ /** * Vector implementation that stores an array of int values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayVector.java.st} instead. */ final class IntArrayVector extends AbstractVector implements IntVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java index 247720489a3fd..93555c913af1b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java @@ -20,7 +20,7 @@ /** * Block implementation that stores values in a {@link IntBigArrayVector}. Does not take ownership of the given * {@link IntArray} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayBlock.java.st} instead. */ public final class IntBigArrayBlock extends AbstractArrayBlock implements IntBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java index b6c73205ad12e..3480b11bfe257 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java @@ -20,7 +20,7 @@ /** * Vector implementation that defers to an enclosed {@link IntArray}. * Does not take ownership of the array and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayVector.java.st} instead. */ public final class IntBigArrayVector extends AbstractVector implements IntVector, Releasable { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index 6af61695929df..a0677816d10ab 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -19,7 +19,7 @@ /** * Block that stores int values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Block.java.st} instead. */ public sealed interface IntBlock extends Block permits IntArrayBlock, IntVectorBlock, ConstantNullBlock, IntBigArrayBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index 31449b6f1cd72..cf8f84d7449ee 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -16,7 +16,7 @@ /** * Block build of IntBlocks. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockBuilder.java.st} instead. */ final class IntBlockBuilder extends AbstractBlockBuilder implements IntBlock.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntLookup.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntLookup.java index b7ea15cd9d818..83a6d92f43586 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntLookup.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntLookup.java @@ -14,7 +14,7 @@ /** * Generic {@link Block#lookup} implementation {@link IntBlock}s. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Lookup.java.st} instead. */ final class IntLookup implements ReleasableIterator { private final IntBlock values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java index da1a92b21c657..afd7aea269772 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java @@ -17,7 +17,7 @@ /** * Vector that stores int values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Vector.java.st} instead. */ public sealed interface IntVector extends Vector permits ConstantIntVector, IntArrayVector, IntBigArrayVector, ConstantNullVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java index 57e69a3aa9acc..a18b2e8ab2384 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java @@ -13,7 +13,7 @@ /** * Block view of a {@link IntVector}. Cannot represent multi-values or nulls. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBlock.java.st} instead. */ public final class IntVectorBlock extends AbstractVectorBlock implements IntBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java index 09bbb32cefe79..96301b1ab8d1c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java @@ -11,7 +11,7 @@ /** * Builder for {@link IntVector}s that grows as needed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBuilder.java.st} instead. */ final class IntVectorBuilder extends AbstractVectorBuilder implements IntVector.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java index 6af564735c073..56f92f9d0eb6e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java @@ -13,7 +13,7 @@ * Builder for {@link IntVector}s that never grows. Prefer this to * {@link IntVectorBuilder} if you know the precise size up front because * it's faster. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorFixedBuilder.java.st} instead. */ public final class IntVectorFixedBuilder implements IntVector.FixedBuilder { private final BlockFactory blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java index fb631ab326ce7..9b9b7a694ebb2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java @@ -18,7 +18,7 @@ /** * Block implementation that stores values in a {@link LongArrayVector}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayBlock.java.st} instead. */ final class LongArrayBlock extends AbstractArrayBlock implements LongBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index 748f25a0005fb..ff9179343536e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -19,7 +19,7 @@ /** * Vector implementation that stores an array of long values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayVector.java.st} instead. */ final class LongArrayVector extends AbstractVector implements LongVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java index ec600d02bc720..a9e98c82c8b5d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java @@ -20,7 +20,7 @@ /** * Block implementation that stores values in a {@link LongBigArrayVector}. Does not take ownership of the given * {@link LongArray} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayBlock.java.st} instead. */ public final class LongBigArrayBlock extends AbstractArrayBlock implements LongBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java index dae27331afc1e..a05a76b1d1d4c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java @@ -20,7 +20,7 @@ /** * Vector implementation that defers to an enclosed {@link LongArray}. * Does not take ownership of the array and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayVector.java.st} instead. */ public final class LongBigArrayVector extends AbstractVector implements LongVector, Releasable { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index 090efd9a31579..9fae8d3cbcddd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -19,7 +19,7 @@ /** * Block that stores long values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Block.java.st} instead. */ public sealed interface LongBlock extends Block permits LongArrayBlock, LongVectorBlock, ConstantNullBlock, LongBigArrayBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index bf25347edd989..58d3dbfe0cb38 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -16,7 +16,7 @@ /** * Block build of LongBlocks. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockBuilder.java.st} instead. */ final class LongBlockBuilder extends AbstractBlockBuilder implements LongBlock.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongLookup.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongLookup.java index ca1b06d70b1d1..3422784c4df60 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongLookup.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongLookup.java @@ -14,7 +14,7 @@ /** * Generic {@link Block#lookup} implementation {@link LongBlock}s. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Lookup.java.st} instead. */ final class LongLookup implements ReleasableIterator { private final LongBlock values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java index 3773e602b8d03..3b3badab91a40 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java @@ -17,7 +17,7 @@ /** * Vector that stores long values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Vector.java.st} instead. */ public sealed interface LongVector extends Vector permits ConstantLongVector, LongArrayVector, LongBigArrayVector, ConstantNullVector { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java index edf58dae1c756..26a2cab5704b5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java @@ -13,7 +13,7 @@ /** * Block view of a {@link LongVector}. Cannot represent multi-values or nulls. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBlock.java.st} instead. */ public final class LongVectorBlock extends AbstractVectorBlock implements LongBlock { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java index eb4e54781a020..8709b2ca6bdef 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java @@ -11,7 +11,7 @@ /** * Builder for {@link LongVector}s that grows as needed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBuilder.java.st} instead. */ final class LongVectorBuilder extends AbstractVectorBuilder implements LongVector.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java index 33cf0e5dc82e2..2fe289de7fd77 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java @@ -13,7 +13,7 @@ * Builder for {@link LongVector}s that never grows. Prefer this to * {@link LongVectorBuilder} if you know the precise size up front because * it's faster. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorFixedBuilder.java.st} instead. */ public final class LongVectorFixedBuilder implements LongVector.FixedBuilder { private final BlockFactory blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java index ca5cd1bda44d0..90ca8e567e8be 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java @@ -18,7 +18,7 @@ /** * {@link AggregatorFunction} implementation for {@link CountDistinctBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class CountDistinctBooleanAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java index 9512f4e76c49c..6ea78052c5f5b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java index 4cdecd9944f7b..fb172567d7021 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class CountDistinctBooleanGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java index 38dadda1eba0c..77c386bd99175 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link CountDistinctBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class CountDistinctBytesRefAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java index b05c529c2ce9b..9191b7d7cfa5a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java index 2261a60ff247e..5d4096de08417 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class CountDistinctBytesRefGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java index 1d985fbd1dff6..f42fcfafef2d7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link CountDistinctDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class CountDistinctDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java index 0a15ebb07ecf4..08153afd30d8e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java index c769a157e5ecb..d6d335dc6d0f0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class CountDistinctDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunction.java index 36d2aaf3e3d4f..5a65d460ad27e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link CountDistinctFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class CountDistinctFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionSupplier.java index 4c2aad00a7a72..a107f38d07a55 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunction.java index 0b1c93aad5e2b..d9e745113689f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class CountDistinctFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java index 05bebca924f7e..44c9751f2bee7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link CountDistinctIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class CountDistinctIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java index fec5b7df48a21..891b2f7f553ed 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java index 7642ca7dcc6a0..ae06526aa5317 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class CountDistinctIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java index 9e62525fa2bb0..864051e7b3197 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link CountDistinctLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class CountDistinctLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java index e3cc788215d39..b9b171c45f883 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class CountDistinctLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java index 00d0e955ba88a..685cb2f0e5dcf 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class CountDistinctLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunction.java index 01763200f2d2c..30d61d45f0759 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunction.java @@ -18,7 +18,7 @@ /** * {@link AggregatorFunction} implementation for {@link MaxBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MaxBooleanAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionSupplier.java index e5bbf63ddee07..d000f49920a3d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MaxBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanGroupingAggregatorFunction.java index dd7760273bfa6..915e481f2661f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MaxBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MaxBooleanGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunction.java index 73b927cd9c521..936b18f35aae0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link MaxBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MaxBytesRefAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionSupplier.java index 7c8af2e0c7e6d..9c97ce88c0063 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MaxBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunction.java index fcb87428e9b7d..f15976bcdc61d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MaxBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MaxBytesRefGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java index 04d24d49cbff8..3af21d1c19dff 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MaxDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MaxDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java index af878fc778985..df4d2749c4361 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MaxDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java index 42588ea81367c..c8f28f0dfd865 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MaxDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MaxDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunction.java index ce22983bff72b..e16e28e182aaa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MaxFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MaxFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionSupplier.java index a3aa44f432430..70628ace17f37 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MaxFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunction.java index 006ee147b15e1..89d3c8dae28ba 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MaxFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MaxFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java index 6a91b574da769..a4c0fbfb75b9a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MaxIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MaxIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java index 5e0a4e2172696..c2c6fdcb8e1a7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MaxIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java index faea5a63eac93..4d86001ac669d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MaxIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MaxIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunction.java index 7f6d47ce1c876..c0beac5a31aa8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link MaxIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MaxIpAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionSupplier.java index 1fb734c243477..cd08981c7b2ab 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MaxIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxIpAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunction.java index f5715949094f7..8b25dcc293159 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MaxIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MaxIpGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java index 97d12d1ef6852..71fa9b04b4937 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MaxLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MaxLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java index f4d17da186d58..0a56f31076008 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MaxLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MaxLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java index a5f115ad0d2b1..9573945dc7d53 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MaxLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MaxLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java index 611314318eba7..a442e49a8ad6e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link MedianAbsoluteDeviationDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java index 4720ce08fa282..b78346f4b57b2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MedianAbsoluteDeviationDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java index bdb56e9e3b1a0..291737cf1c21b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MedianAbsoluteDeviationDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunction.java index e20badf2ce38a..2bcd5be4fd0d3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link MedianAbsoluteDeviationFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionSupplier.java index 1fad0faafad4e..069f125c0347d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MedianAbsoluteDeviationFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MedianAbsoluteDeviationFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunction.java index 12b4c12591169..fe44034fbd6b8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MedianAbsoluteDeviationFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java index df0d24d442283..058eba52c6cdd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link MedianAbsoluteDeviationIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java index e72918359b2f6..147809fae080c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MedianAbsoluteDeviationIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MedianAbsoluteDeviationIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java index 6f0f18c0dea00..b79c1829c2c22 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MedianAbsoluteDeviationIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java index e0ace94a1da49..56d21b5877c73 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link MedianAbsoluteDeviationLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java index aa79691ba220e..1246c96941c37 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MedianAbsoluteDeviationLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MedianAbsoluteDeviationLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java index 910678df997d1..acefb4a683811 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MedianAbsoluteDeviationLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MedianAbsoluteDeviationLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunction.java index 4d91d3794aecb..c6645a1945783 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunction.java @@ -18,7 +18,7 @@ /** * {@link AggregatorFunction} implementation for {@link MinBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MinBooleanAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionSupplier.java index f66dc6e67e0fd..02d2fda11ff7a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MinBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanGroupingAggregatorFunction.java index 71e636001cd5f..ffad2a58c5d41 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MinBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MinBooleanGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunction.java index 01ee21f82ab53..5a421b5c1fe4f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link MinBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MinBytesRefAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionSupplier.java index cb6ab0d06d401..65e7b4b58e94d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MinBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunction.java index 1650c6c513fdd..01adcea310b46 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MinBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MinBytesRefGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java index a436cdcdbef6d..a92183e3e7af7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MinDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MinDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java index f91cdfe54c89e..18aadce9baa58 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MinDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java index 729c77a225049..5c19d45fa16b8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MinDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MinDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunction.java index ec6757e59d074..439f463ce5665 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MinFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MinFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionSupplier.java index a8ccc70f9996a..04d08ed6ea4b6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MinFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunction.java index d3d59935e62d5..e92d9b44d18c8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MinFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MinFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java index f76dcec81d871..4fbb44cca85a5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MinIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MinIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java index e09102a1a88ec..51761433e0254 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MinIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java index 7095608ca50cc..7cf3a99f15e2e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MinIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MinIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunction.java index 795299d9332fc..ec20f57699760 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link MinIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MinIpAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionSupplier.java index 591a8501f874d..dd066820b50e7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MinIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinIpAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunction.java index 0cb4154009a90..abb8e1cd89fcd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MinIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MinIpGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java index 4fc968bab2eff..20852b134ea32 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link MinLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class MinLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java index 85805767c9168..58311b65589f3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link MinLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class MinLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java index 0498c4b8d866b..87971c66fcda8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link MinLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class MinLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java index 9ece01135e0a9..51cbd31e56b85 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link PercentileDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class PercentileDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java index 996cebd805aa8..28fe487f99197 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link PercentileDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class PercentileDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java index 7767f4228bdcb..d455c4ad0a15a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link PercentileDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class PercentileDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunction.java index 434989adf47b2..b4b36ec039b72 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link PercentileFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class PercentileFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionSupplier.java index 1d1678f15448c..4288d062ec238 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link PercentileFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class PercentileFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunction.java index b65876df06031..5b2649a57167a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link PercentileFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class PercentileFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java index eb4ae96f5dea5..730c27ea27bf7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link PercentileIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class PercentileIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java index 7ce62dd7b600b..3a9996aed0d8c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link PercentileIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class PercentileIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java index dbbb85b4728f4..2aef3e76b00f4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link PercentileIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class PercentileIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java index 837f7efb32441..510b69cb6fc89 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link PercentileLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class PercentileLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java index 7e32bfc9d9937..d3cdf57a1862f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link PercentileLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class PercentileLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java index 57934ba6e6fe9..3330e630235da 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link PercentileLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class PercentileLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java index 8806e1ed865c2..92d73864fa772 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link RateDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class RateDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java index 2fca5c1d19c5e..5a2f4203cf49f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link RateDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class RateDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatAggregatorFunctionSupplier.java index 4b1546314a9cb..d4914ba36e803 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link RateFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class RateFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatGroupingAggregatorFunction.java index 628503f12900e..1048c93223b9e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateFloatGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link RateFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class RateFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java index a98f0217ef90e..6c0fd0ed21957 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link RateIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class RateIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java index 2f030544da612..00ea9986de165 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link RateIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class RateIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java index b8100dbbe4455..311616effba37 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link RateLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class RateLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java index fd272e47fa6a3..30cc10bf67077 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link RateLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class RateLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java index dd6cc89401a99..3b08c42b7acbf 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link StdDevDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class StdDevDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java index 313eed4ae97ae..caf53dad23b0d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link StdDevDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class StdDevDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java index da49c254e353a..d15c35f9324a9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link StdDevDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class StdDevDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java index bf8c4854f6b93..4f2937c2db07d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link AggregatorFunction} implementation for {@link StdDevFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class StdDevFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java index 25dfa54895eda..c807c1582e1ca 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link StdDevFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class StdDevFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java index bf994aaf2840e..ec7a319cd0752 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link StdDevFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class StdDevFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java index 4a5585a7dd454..9fe0798f53d76 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link AggregatorFunction} implementation for {@link StdDevIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class StdDevIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java index 5a762d6606a25..36560af8557e2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link StdDevIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class StdDevIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java index 139cc24d3541f..747d0a53c139c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link StdDevIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class StdDevIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java index b5ed31116a90c..60cee4bca2c14 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link StdDevLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class StdDevLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java index 09b996201ef16..dc6ed063031ed 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link StdDevLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class StdDevLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java index da7a5f4bdea0d..dff7e3a204732 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link StdDevLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class StdDevLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java index 4d24579203df1..70c3191a2236b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link SumDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SumDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java index b68bed30013c6..a88b6ddc3bf5b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SumDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SumDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java index 71b282c58aca2..ca7b452e121d0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SumDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SumDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunction.java index 50f41b5edc05f..d2259391a2b43 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link SumFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SumFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionSupplier.java index 515122ec08ac0..5a01eaeaafd39 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SumFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SumFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunction.java index 664f616acee9d..ea5a876b2432b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SumFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SumFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java index 95bd95ac474ad..6d645c5e5c1ce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link AggregatorFunction} implementation for {@link SumIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SumIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java index dcb48944dc557..bef192a06c3df 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SumIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SumIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java index 2f369374d8cdb..d734f42df7038 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SumIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SumIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java index fac21d99bf713..5852a590d265d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link SumLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SumLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java index b4d36aa526075..9f5f3d7d493aa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SumLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SumLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java index c8c0990de4e54..a2feb10cff580 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SumLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SumLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunction.java index b8d06787f7f68..c3424b6c982b5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunction.java @@ -18,7 +18,7 @@ /** * {@link AggregatorFunction} implementation for {@link TopBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class TopBooleanAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionSupplier.java index 74beed084543f..aebe53c4c4fbf 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link TopBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanGroupingAggregatorFunction.java index cd35595eeadb0..108e0a0704ec8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link TopBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class TopBooleanGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunction.java index 9ef460be5796b..a0927f5835a8d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link TopBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class TopBytesRefAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionSupplier.java index 8c77d2116bf69..9108cfcef1892 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link TopBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunction.java index aa2d6094c8c3f..45514ee343668 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBytesRefGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link TopBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class TopBytesRefGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java index 210bc76483a81..ca80c91c58cb0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link TopDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class TopDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java index 36a8763b4a870..3e65be2efb210 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link TopDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java index 6b76ff7772ad1..956c7b8ae5595 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link TopDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class TopDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java index f7fdb406acadb..b569590b781bd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link TopFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class TopFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java index e01df8329a315..79561a349cef1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link TopFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java index ffaf858645440..712277798e65c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link TopFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class TopFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java index 1ea40134f7260..0d279e672dd9d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link TopIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class TopIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java index 4481f2d5afaa8..cd7690f189007 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link TopIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java index a3453126e055e..0581c0f244964 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java @@ -18,7 +18,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link TopIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class TopIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunction.java index 8c216c90504c1..e723dc2451b5e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link TopIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class TopIpAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionSupplier.java index 8f630c0306170..9b137b39d8e89 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link TopIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopIpAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunction.java index 74a6987962b78..bfcdadfa54814 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link TopIpAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class TopIpGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java index 85df0f7edc843..30f115c85ba1b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link TopLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class TopLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java index 1a39c7b5580ec..8fd7f59135986 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link TopLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class TopLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java index b4a4b7154e626..d5ebeca1f174c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link TopLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class TopLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java index abf73c07d4ab6..d3dfad9786561 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java @@ -18,7 +18,7 @@ /** * {@link AggregatorFunction} implementation for {@link ValuesBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class ValuesBooleanAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java index 20d5a5fda7726..b15dd0ed696ab 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link ValuesBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java index b51da118e0f8d..81da1f65e9bee 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link ValuesBooleanAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class ValuesBooleanGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java index ecc6424ba8501..6a4d7ad713f46 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link AggregatorFunction} implementation for {@link ValuesBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class ValuesBytesRefAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java index 31de817edf868..c09331bf19709 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link ValuesBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java index bdce606f92168..6db44ffce8faf 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link ValuesBytesRefAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class ValuesBytesRefGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java index 2fa8ed31ec427..17252bd237f49 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link ValuesDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class ValuesDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java index 049deda37c460..c8f93159eb3c1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link ValuesDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java index 5b8c2ac802663..893d8fcd2ea5d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link ValuesDoubleAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class ValuesDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunction.java index 8b61c6d07eed6..55c58fbc2af16 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link ValuesFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class ValuesFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionSupplier.java index b4b0c2f1a0444..7802a06a6935f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link ValuesFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunction.java index f50c5a67d15a5..8afd75384aa87 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link ValuesFloatAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class ValuesFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java index 7f12bbc18b202..a65e8b1db8fe7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link ValuesIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class ValuesIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java index 21402b5913813..a86b3838d7c92 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link ValuesIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java index c90fcedb291cf..468320a69fc98 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java @@ -18,7 +18,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link ValuesIntAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class ValuesIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java index 7e8c256d90f93..7781392f8c29c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java @@ -19,7 +19,7 @@ /** * {@link AggregatorFunction} implementation for {@link ValuesLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class ValuesLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java index a025bd0ade17a..dd302cc4eb69e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java @@ -12,7 +12,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link ValuesLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class ValuesLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java index 8a79cd7d942ee..cc6e7121c5afb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java @@ -20,7 +20,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link ValuesLongAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class ValuesLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java index a205c728db5fc..5beffde65284e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialCentroidCartesianPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialCentroidCartesianPointDocValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java index 3228340beeb43..19139c22863d9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier.java @@ -13,7 +13,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialCentroidCartesianPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java index cc2fb38bb925c..80c5643ea0a6c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java @@ -25,7 +25,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialCentroidCartesianPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java index e20a3fb1cfa35..fd5b519bc3d53 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java @@ -26,7 +26,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialCentroidCartesianPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialCentroidCartesianPointSourceValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java index 9b0d7c5f64cd7..b43fb64f6730b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier.java @@ -13,7 +13,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialCentroidCartesianPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java index 6ae2b444efe98..f767d5e39d1d1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java @@ -28,7 +28,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialCentroidCartesianPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java index b2c237a904796..22e3a6fa69801 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialCentroidGeoPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialCentroidGeoPointDocValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java index 46c2777e8c77a..34414a9e9c5c3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier.java @@ -13,7 +13,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialCentroidGeoPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java index 0cce9b7cf1cd5..ce99c9086cca3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java @@ -25,7 +25,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialCentroidGeoPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java index db61420fb8cbe..f583accfb9f71 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java @@ -26,7 +26,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialCentroidGeoPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialCentroidGeoPointSourceValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java index 02b975f03890f..80f608a10a6fb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier.java @@ -13,7 +13,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialCentroidGeoPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java index 6c4d6635846df..bb07444e913ae 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java @@ -28,7 +28,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialCentroidGeoPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java index 21306036fbf50..18d466442f750 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentCartesianPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentCartesianPointDocValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java index 751ea3b4c4a9d..c9447dfce0f19 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java @@ -13,7 +13,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentCartesianPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java index a5191e57959b8..c528cef4d3863 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentCartesianPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java index 6610168e1df21..e7068b708a984 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentCartesianPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentCartesianPointSourceValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java index 7f4d1d69ae928..d0cd2e33fe0f8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java @@ -13,7 +13,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentCartesianPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java index 4e06158952fc3..7ee441fe88f16 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentCartesianPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunction.java index 3471aafc3a53b..4d1bd972434b1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentCartesianShapeDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentCartesianShapeDocValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier.java index b53d779912fc9..2fa68f5226488 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier.java @@ -13,7 +13,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentCartesianShapeDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentCartesianShapeDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.java index aa3c1a7ba56ae..48161b3ea4bf3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentCartesianShapeDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentCartesianShapeDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunction.java index 014a2d454f576..62440eba29355 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentCartesianShapeSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentCartesianShapeSourceValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier.java index c8b1372d44b68..822a10fbe4794 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier.java @@ -13,7 +13,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentCartesianShapeSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentCartesianShapeSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.java index d932038a26ec7..77893dd350b86 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentCartesianShapeSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentCartesianShapeSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java index 4e76d3dbe0298..49b9ca1bad69d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentGeoPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentGeoPointDocValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java index f72a4cc648ec8..3c5d7c8355133 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java @@ -13,7 +13,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentGeoPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java index 9a97a37b22ca1..235bd10c3e8e2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java @@ -23,7 +23,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentGeoPointDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentGeoPointDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java index 05bcc79db4f34..7d31bea612321 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentGeoPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentGeoPointSourceValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java index 1af20d72d08b0..8018b7d8d829b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java @@ -13,7 +13,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentGeoPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java index 1231e24382887..d1c715d5b5f35 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentGeoPointSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunction.java index fefef6edf6dc7..d181ae1305c7e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentGeoShapeDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentGeoShapeDocValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier.java index d104c74bc5072..cd36ee8fd14a2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier.java @@ -13,7 +13,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentGeoShapeDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentGeoShapeDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.java index 7d286eba12ffc..00df4fe3282e6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction.java @@ -21,7 +21,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentGeoShapeDocValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentGeoShapeDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunction.java index a16f8911d7816..b72a8f23eb5ca 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link AggregatorFunction} implementation for {@link SpatialExtentGeoShapeSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorImplementer} instead. */ public final class SpatialExtentGeoShapeSourceValuesAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier.java index 1eeb17367d852..95aa4f3d30070 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier.java @@ -13,7 +13,7 @@ /** * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentGeoShapeSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. */ public final class SpatialExtentGeoShapeSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.java index 8c768496e5905..c9be8deaf649c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ /** * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentGeoShapeSourceValuesAggregator}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final class SpatialExtentGeoShapeSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st index ad0ffc1d7e993..58a0a35e79d5d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st @@ -39,7 +39,7 @@ import org.elasticsearch.core.Releasables; * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayState.java.st} instead. *

*/ final class $Type$ArrayState extends AbstractArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleArrayState.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleArrayState.java.st index 3c57ab948a79f..5235e308ddf47 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleArrayState.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleArrayState.java.st @@ -40,7 +40,7 @@ import org.elasticsearch.core.Releasables; * call {@link #enableGroupIdTracking} to transition the state into a mode * where it'll track which {@code groupIds} have been written. *

- * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleArrayState.java.st} instead. *

*/ final class $Type$FallibleArrayState extends AbstractFallibleArrayState implements GroupingAggregatorState { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleState.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleState.java.st index 27609383e4f61..9eab53b1edcf2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleState.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-FallibleState.java.st @@ -13,7 +13,7 @@ import org.elasticsearch.compute.operator.DriverContext; /** * Aggregator state for a single $type$. * It stores a third boolean to store if the aggregation failed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-FallibleState.java.st} instead. */ final class $Type$FallibleState implements AggregatorState { private $type$ value; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-State.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-State.java.st index 7e0949c86faaa..8894dc9c80cff 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-State.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-State.java.st @@ -12,7 +12,7 @@ import org.elasticsearch.compute.operator.DriverContext; /** * Aggregator state for a single $type$. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-State.java.st} instead. */ final class $Type$State implements AggregatorState { private $type$ value; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st index 2a3d1143236ac..d87ff9ba66442 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st @@ -59,7 +59,7 @@ import java.util.BitSet; $endif$ /** * Maps a {@link $Type$Block} column to group ids. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockHash.java.st} instead. */ final class $Type$BlockHash extends BlockHash { private final int channel; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st index 16e2a62b9d030..707b19165bb3b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st @@ -29,7 +29,7 @@ import java.util.BitSet; $if(BytesRef)$ * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. $endif$ - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayBlock.java.st} instead. */ final class $Type$ArrayBlock extends AbstractArrayBlock implements $Type$Block { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index 3bb13674ce477..521e09d909a1c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -36,7 +36,7 @@ $endif$ $if(BytesRef)$ * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. $endif$ - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ArrayVector.java.st} instead. */ final class $Type$ArrayVector extends AbstractVector implements $Type$Vector { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st index 23632bf41349c..387730de84556 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st @@ -20,7 +20,7 @@ import java.util.BitSet; /** * Block implementation that stores values in a {@link $Type$BigArrayVector}. Does not take ownership of the given * {@link $Array$} and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayBlock.java.st} instead. */ public final class $Type$BigArrayBlock extends AbstractArrayBlock implements $Type$Block { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st index 106d0769ebb07..a1145311fabc3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st @@ -20,7 +20,7 @@ import java.io.IOException; /** * Vector implementation that defers to an enclosed {@link $if(boolean)$Bit$else$$Type$$endif$Array}. * Does not take ownership of the array and does not adjust circuit breakers to account for it. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BigArrayVector.java.st} instead. */ public final class $Type$BigArrayVector extends AbstractVector implements $Type$Vector, Releasable { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index 6c1616c370721..670f9fa15842d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -22,7 +22,7 @@ import java.io.IOException; /** * Block that stores $type$ values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Block.java.st} instead. */ $if(BytesRef)$ public sealed interface BytesRefBlock extends Block permits BytesRefArrayBlock, BytesRefVectorBlock, ConstantNullBlock, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index d60e1de179d20..6553011e5b413 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -25,7 +25,7 @@ $endif$ /** * Block build of $Type$Blocks. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-BlockBuilder.java.st} instead. */ final class $Type$BlockBuilder extends AbstractBlockBuilder implements $Type$Block.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st index 5d0d4c8a956f3..ebac760031678 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st @@ -22,7 +22,7 @@ import org.elasticsearch.core.ReleasableIterator; $endif$ /** * Vector implementation that stores a constant $type$ value. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-ConstantVector.java.st} instead. */ final class Constant$Type$Vector extends AbstractVector implements $Type$Vector { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Lookup.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Lookup.java.st index 668752fe3f59f..ad3d93a76ad40 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Lookup.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Lookup.java.st @@ -17,7 +17,7 @@ import org.elasticsearch.core.Releasables; /** * Generic {@link Block#lookup} implementation {@link $Type$Block}s. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Lookup.java.st} instead. */ final class $Type$Lookup implements ReleasableIterator<$Type$Block> { $if(BytesRef)$ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index c556cba7ef2e4..47a7dc5735fd2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -20,7 +20,7 @@ import java.io.IOException; /** * Vector that stores $type$ values. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-Vector.java.st} instead. */ $if(BytesRef)$ public sealed interface BytesRefVector extends Vector permits ConstantBytesRefVector, BytesRefArrayVector, ConstantNullVector, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st index d4c6859e64b2a..5ab410e843eca 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st @@ -16,7 +16,7 @@ import org.elasticsearch.core.Releasables; /** * Block view of a {@link $Type$Vector}. Cannot represent multi-values or nulls. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBlock.java.st} instead. */ public final class $Type$VectorBlock extends AbstractVectorBlock implements $Type$Block { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st index da074c75f7c4d..4169728a6a596 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st @@ -19,7 +19,7 @@ $endif$ /** * Builder for {@link $Type$Vector}s that grows as needed. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorBuilder.java.st} instead. */ final class $Type$VectorBuilder extends AbstractVectorBuilder implements $Type$Vector.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st index c08478829c818..8bfc48972f995 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st @@ -13,7 +13,7 @@ import org.apache.lucene.util.RamUsageEstimator; * Builder for {@link $Type$Vector}s that never grows. Prefer this to * {@link $Type$VectorBuilder} if you know the precise size up front because * it's faster. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code X-VectorFixedBuilder.java.st} instead. */ public final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { private final BlockFactory blockFactory; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java index b25f81a79b7ce..255c162df3495 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Not}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java index 27345b384375e..581ae82afbd21 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestBooleanEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java index c8b8c3ac501ec..9926644551faf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java index e62b0f9877cdc..a0129f7762379 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java index 8c05f00fb1e0d..7b2d3e76a027b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java index 78237c8d389bd..94c6bbd934751 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreatestLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java index 82fc13cffbe7e..8bf830d030f63 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastBooleanEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java index b37408e434148..fe06aa9b5f32e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java index bf501d53211e6..2c9c45e363d63 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java index 803f23d994bbe..e14d83dafb951 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java index 7408dd1165a01..da2e98b59220f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeastLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java index 9ba95999f052b..c4a22cc06900d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link FromBase64}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class FromBase64Evaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java index 2ed07e440a301..3fafd237030db 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToBase64}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ToBase64Evaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java index c4264fb78be92..d5df80376c484 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToBoolean}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToBooleanFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java index 43ac58d1f0fc4..a4f17f9892b82 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToBoolean}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToBooleanFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java index c8b2814a3f6da..5f62c70b04972 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToBoolean}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToBooleanFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java index 8859bfce25ba1..9e12947199948 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToBoolean}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToBooleanFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java index 2f4037ff3b116..40436df749e04 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToBoolean}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToBooleanFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToBooleanFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java index 7c47e39dfba19..68d755000902d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToCartesianPoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToCartesianPointFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToCartesianPointFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java index 6ae079e153e0b..fcbe066acda25 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToCartesianShape}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToCartesianShapeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToCartesianShapeFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java index e00e7e044ae12..2b7262c19128e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDatetimeEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDateNanos}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDateNanosFromDatetimeEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDateNanosFromDatetimeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java index 23b30e669241b..49f894998312d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromDoubleEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDateNanos}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDateNanosFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDateNanosFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java index cc52208ce5a25..a5084102ffe5a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDateNanos}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDateNanosFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDateNanosFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java index c5a20ac298da7..3bc9ecfcfaf4b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosFromStringEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDateNanos}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDateNanosFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDateNanosFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromDateNanosEvaluator.java index 92b629657b95b..56b8decbe7e4e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromDateNanosEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDatetime}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDatetimeFromDateNanosEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDatetimeFromDateNanosEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java index 3e074dba3d456..46bba56031163 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDatetime}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDatetimeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDatetimeFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java index 11bf9ffed0fbd..a5d0e8af694b5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDegrees}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDegreesEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDegreesEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java index 60433ea5efae7..250147a255ec2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDouble}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDoubleFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java index 1e3c48f472ad2..7b2ff9f39d56e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDouble}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDoubleFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java index 6e959a28459aa..e19f9c4cb4432 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDouble}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDoubleFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java index 6613fc1dd6b94..4ca9427149250 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDouble}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDoubleFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java index 9badb00fc472c..6015e631093a6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToDouble}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToDoubleFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToDoubleFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java index ad33737f3da11..4444256cf6b2c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToGeoPoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToGeoPointFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToGeoPointFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java index db59fd3a16da8..ede98457d21dc 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToGeoShape}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToGeoShapeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToGeoShapeFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java index 7a2b2a016d60f..20808c3493285 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToIP}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIPFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToIPFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java index 9bd1304024ad6..72837d5c7d6aa 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToInteger}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToIntegerFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java index 5057037993f60..c45f332f0d22e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToInteger}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToIntegerFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java index b2e891a6e65d5..b829a506dc5a0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToInteger}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToIntegerFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java index d50c18501e37f..1a0abf5ed6300 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToInteger}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToIntegerFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java index 31fadc9f28845..433ad4815dc06 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToInteger}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToIntegerFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToIntegerFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java index 668bedfa4440e..d74861f4ef11d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToLongFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java index cb1c10558f10e..a63c54184fb88 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToLongFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java index 74be177061f7a..95623d0ef2672 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToLongFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java index 1d58a05c7d970..283d97606e9a4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToLongFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java index af911e5b787ac..134ea9698c47f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToLongFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToLongFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java index 6aed22da1b015..b1a480878a9c2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToRadians}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToRadiansEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToRadiansEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java index 47af1b25c88e8..d9d1cecd2af93 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java index d42c945c0cee6..aeaf940d51ff0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromCartesianPointEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromCartesianPointEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java index 93901e1c4486c..2d4c04e6afcdd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromCartesianShapeEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromCartesianShapeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDateNanosEvaluator.java index 37f13ea340a26..189607a077d74 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDateNanosEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromDateNanosEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromDateNanosEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java index e179f92665a7c..a7f2d537e1801 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromDatetimeEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromDatetimeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java index 7815b33845394..29091a037dada 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java index 42b3c37fed892..5dfdbf37257e0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromGeoPointEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromGeoPointEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java index a8c1b8e241ba4..a207b4dbf1875 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromGeoShapeEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromGeoShapeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java index d51ae78956c21..a9663c2a4fdf5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromIPEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromIPEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java index cfff78cf3b550..604061865dcd4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java index f4e0046f93f4b..c444c5949a4c5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java index 57275460a1813..6c3789ac182a4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java index 816963dd73536..183cfaba27e42 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToStringFromVersionEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToStringFromVersionEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java index 3b7dd65b68f2d..74d41a0489987 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToUnsignedLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToUnsignedLongFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java index 1a6b9ee26557d..6f0e1a676920e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToUnsignedLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToUnsignedLongFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java index 56c3c0cecc222..f1c65e0e5f1a7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java @@ -18,7 +18,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToUnsignedLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToUnsignedLongFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java index 323661261ce56..5b16dc8bc5b98 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToUnsignedLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToUnsignedLongFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java index 0f3096c4824da..b13be58bfcbc8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToUnsignedLong}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToUnsignedLongFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToUnsignedLongFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java index fecd2b62e53ab..b9ad6ded04f1c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToVersion}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class ToVersionFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { public ToVersionFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java deleted file mode 100644 index 0ad09ee55ca1f..0000000000000 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java +++ /dev/null @@ -1,168 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.date; - -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.compute.operator.Warnings; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.core.InvalidArgumentException; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. - */ -public final class DateDiffConstantEvaluator implements EvalOperator.ExpressionEvaluator { - private final Source source; - - private final DateDiff.Part datePartFieldUnit; - - private final EvalOperator.ExpressionEvaluator startTimestamp; - - private final EvalOperator.ExpressionEvaluator endTimestamp; - - private final DriverContext driverContext; - - private Warnings warnings; - - public DateDiffConstantEvaluator(Source source, DateDiff.Part datePartFieldUnit, - EvalOperator.ExpressionEvaluator startTimestamp, - EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { - this.source = source; - this.datePartFieldUnit = datePartFieldUnit; - this.startTimestamp = startTimestamp; - this.endTimestamp = endTimestamp; - this.driverContext = driverContext; - } - - @Override - public Block eval(Page page) { - try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { - try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { - LongVector startTimestampVector = startTimestampBlock.asVector(); - if (startTimestampVector == null) { - return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); - } - LongVector endTimestampVector = endTimestampBlock.asVector(); - if (endTimestampVector == null) { - return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); - } - return eval(page.getPositionCount(), startTimestampVector, endTimestampVector); - } - } - } - - public IntBlock eval(int positionCount, LongBlock startTimestampBlock, - LongBlock endTimestampBlock) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - if (startTimestampBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (startTimestampBlock.getValueCount(p) != 1) { - if (startTimestampBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (endTimestampBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (endTimestampBlock.getValueCount(p) != 1) { - if (endTimestampBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - try { - result.appendInt(DateDiff.process(this.datePartFieldUnit, startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); - } catch (IllegalArgumentException | InvalidArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public IntBlock eval(int positionCount, LongVector startTimestampVector, - LongVector endTimestampVector) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendInt(DateDiff.process(this.datePartFieldUnit, startTimestampVector.getLong(p), endTimestampVector.getLong(p))); - } catch (IllegalArgumentException | InvalidArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "DateDiffConstantEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(startTimestamp, endTimestamp); - } - - private Warnings warnings() { - if (warnings == null) { - this.warnings = Warnings.createWarnings( - driverContext.warningsMode(), - source.source().getLineNumber(), - source.source().getColumnNumber(), - source.text() - ); - } - return warnings; - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final DateDiff.Part datePartFieldUnit; - - private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; - - private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; - - public Factory(Source source, DateDiff.Part datePartFieldUnit, - EvalOperator.ExpressionEvaluator.Factory startTimestamp, - EvalOperator.ExpressionEvaluator.Factory endTimestamp) { - this.source = source; - this.datePartFieldUnit = datePartFieldUnit; - this.startTimestamp = startTimestamp; - this.endTimestamp = endTimestamp; - } - - @Override - public DateDiffConstantEvaluator get(DriverContext context) { - return new DateDiffConstantEvaluator(source, datePartFieldUnit, startTimestamp.get(context), endTimestamp.get(context), context); - } - - @Override - public String toString() { - return "DateDiffConstantEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java index 0ff047f9bd819..eee8c756930ab 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffConstantMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java index 880531ca53707..5a0a6051abebb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffConstantMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java index 99f7d1cb2e247..dc28b97fef9aa 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffConstantNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java index 842930a040ed0..477fb22548d2e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffConstantNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java deleted file mode 100644 index 82fb55e97f1f2..0000000000000 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java +++ /dev/null @@ -1,190 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.date; - -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.compute.operator.Warnings; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.core.InvalidArgumentException; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. - */ -public final class DateDiffEvaluator implements EvalOperator.ExpressionEvaluator { - private final Source source; - - private final EvalOperator.ExpressionEvaluator unit; - - private final EvalOperator.ExpressionEvaluator startTimestamp; - - private final EvalOperator.ExpressionEvaluator endTimestamp; - - private final DriverContext driverContext; - - private Warnings warnings; - - public DateDiffEvaluator(Source source, EvalOperator.ExpressionEvaluator unit, - EvalOperator.ExpressionEvaluator startTimestamp, - EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { - this.source = source; - this.unit = unit; - this.startTimestamp = startTimestamp; - this.endTimestamp = endTimestamp; - this.driverContext = driverContext; - } - - @Override - public Block eval(Page page) { - try (BytesRefBlock unitBlock = (BytesRefBlock) unit.eval(page)) { - try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { - try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { - BytesRefVector unitVector = unitBlock.asVector(); - if (unitVector == null) { - return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); - } - LongVector startTimestampVector = startTimestampBlock.asVector(); - if (startTimestampVector == null) { - return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); - } - LongVector endTimestampVector = endTimestampBlock.asVector(); - if (endTimestampVector == null) { - return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); - } - return eval(page.getPositionCount(), unitVector, startTimestampVector, endTimestampVector); - } - } - } - } - - public IntBlock eval(int positionCount, BytesRefBlock unitBlock, LongBlock startTimestampBlock, - LongBlock endTimestampBlock) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - BytesRef unitScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - if (unitBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (unitBlock.getValueCount(p) != 1) { - if (unitBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (startTimestampBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (startTimestampBlock.getValueCount(p) != 1) { - if (startTimestampBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (endTimestampBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (endTimestampBlock.getValueCount(p) != 1) { - if (endTimestampBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - try { - result.appendInt(DateDiff.process(unitBlock.getBytesRef(unitBlock.getFirstValueIndex(p), unitScratch), startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); - } catch (IllegalArgumentException | InvalidArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public IntBlock eval(int positionCount, BytesRefVector unitVector, - LongVector startTimestampVector, LongVector endTimestampVector) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - BytesRef unitScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendInt(DateDiff.process(unitVector.getBytesRef(p, unitScratch), startTimestampVector.getLong(p), endTimestampVector.getLong(p))); - } catch (IllegalArgumentException | InvalidArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "DateDiffEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(unit, startTimestamp, endTimestamp); - } - - private Warnings warnings() { - if (warnings == null) { - this.warnings = Warnings.createWarnings( - driverContext.warningsMode(), - source.source().getLineNumber(), - source.source().getColumnNumber(), - source.text() - ); - } - return warnings; - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final EvalOperator.ExpressionEvaluator.Factory unit; - - private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; - - private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; - - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory unit, - EvalOperator.ExpressionEvaluator.Factory startTimestamp, - EvalOperator.ExpressionEvaluator.Factory endTimestamp) { - this.source = source; - this.unit = unit; - this.startTimestamp = startTimestamp; - this.endTimestamp = endTimestamp; - } - - @Override - public DateDiffEvaluator get(DriverContext context) { - return new DateDiffEvaluator(source, unit.get(context), startTimestamp.get(context), endTimestamp.get(context), context); - } - - @Override - public String toString() { - return "DateDiffEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java index a464d0c5cafc7..bf938f135b6fa 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java index 4586e2cb720fd..2e4f71d8636b4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java index 95a54c3a24ec5..1fb8e2c744cd3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java index a7694647aec54..f08424a09d1b4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateDiffNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java deleted file mode 100644 index 37e900245a877..0000000000000 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java +++ /dev/null @@ -1,137 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.date; - -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import java.time.ZoneId; -import java.time.temporal.ChronoField; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.compute.operator.Warnings; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. - * This class is generated. Do not edit it. - */ -public final class DateExtractConstantEvaluator implements EvalOperator.ExpressionEvaluator { - private final Source source; - - private final EvalOperator.ExpressionEvaluator value; - - private final ChronoField chronoField; - - private final ZoneId zone; - - private final DriverContext driverContext; - - private Warnings warnings; - - public DateExtractConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator value, - ChronoField chronoField, ZoneId zone, DriverContext driverContext) { - this.source = source; - this.value = value; - this.chronoField = chronoField; - this.zone = zone; - this.driverContext = driverContext; - } - - @Override - public Block eval(Page page) { - try (LongBlock valueBlock = (LongBlock) value.eval(page)) { - LongVector valueVector = valueBlock.asVector(); - if (valueVector == null) { - return eval(page.getPositionCount(), valueBlock); - } - return eval(page.getPositionCount(), valueVector).asBlock(); - } - } - - public LongBlock eval(int positionCount, LongBlock valueBlock) { - try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - if (valueBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (valueBlock.getValueCount(p) != 1) { - if (valueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - result.appendLong(DateExtract.process(valueBlock.getLong(valueBlock.getFirstValueIndex(p)), this.chronoField, this.zone)); - } - return result.build(); - } - } - - public LongVector eval(int positionCount, LongVector valueVector) { - try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - result.appendLong(p, DateExtract.process(valueVector.getLong(p), this.chronoField, this.zone)); - } - return result.build(); - } - } - - @Override - public String toString() { - return "DateExtractConstantEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(value); - } - - private Warnings warnings() { - if (warnings == null) { - this.warnings = Warnings.createWarnings( - driverContext.warningsMode(), - source.source().getLineNumber(), - source.source().getColumnNumber(), - source.text() - ); - } - return warnings; - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final EvalOperator.ExpressionEvaluator.Factory value; - - private final ChronoField chronoField; - - private final ZoneId zone; - - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value, - ChronoField chronoField, ZoneId zone) { - this.source = source; - this.value = value; - this.chronoField = chronoField; - this.zone = zone; - } - - @Override - public DateExtractConstantEvaluator get(DriverContext context) { - return new DateExtractConstantEvaluator(source, value.get(context), chronoField, zone, context); - } - - @Override - public String toString() { - return "DateExtractConstantEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java index 11da518a01ce1..fe283a95f3c2e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateExtractConstantMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java index bbd0a59c87ceb..35bca67388d78 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateExtractConstantNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java deleted file mode 100644 index 6d56fd1c0d6a2..0000000000000 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.date; - -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import java.time.ZoneId; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.compute.operator.Warnings; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. - * This class is generated. Do not edit it. - */ -public final class DateExtractEvaluator implements EvalOperator.ExpressionEvaluator { - private final Source source; - - private final EvalOperator.ExpressionEvaluator value; - - private final EvalOperator.ExpressionEvaluator chronoField; - - private final ZoneId zone; - - private final DriverContext driverContext; - - private Warnings warnings; - - public DateExtractEvaluator(Source source, EvalOperator.ExpressionEvaluator value, - EvalOperator.ExpressionEvaluator chronoField, ZoneId zone, DriverContext driverContext) { - this.source = source; - this.value = value; - this.chronoField = chronoField; - this.zone = zone; - this.driverContext = driverContext; - } - - @Override - public Block eval(Page page) { - try (LongBlock valueBlock = (LongBlock) value.eval(page)) { - try (BytesRefBlock chronoFieldBlock = (BytesRefBlock) chronoField.eval(page)) { - LongVector valueVector = valueBlock.asVector(); - if (valueVector == null) { - return eval(page.getPositionCount(), valueBlock, chronoFieldBlock); - } - BytesRefVector chronoFieldVector = chronoFieldBlock.asVector(); - if (chronoFieldVector == null) { - return eval(page.getPositionCount(), valueBlock, chronoFieldBlock); - } - return eval(page.getPositionCount(), valueVector, chronoFieldVector); - } - } - } - - public LongBlock eval(int positionCount, LongBlock valueBlock, BytesRefBlock chronoFieldBlock) { - try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { - BytesRef chronoFieldScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - if (valueBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (valueBlock.getValueCount(p) != 1) { - if (valueBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (chronoFieldBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (chronoFieldBlock.getValueCount(p) != 1) { - if (chronoFieldBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - try { - result.appendLong(DateExtract.process(valueBlock.getLong(valueBlock.getFirstValueIndex(p)), chronoFieldBlock.getBytesRef(chronoFieldBlock.getFirstValueIndex(p), chronoFieldScratch), this.zone)); - } catch (IllegalArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public LongBlock eval(int positionCount, LongVector valueVector, - BytesRefVector chronoFieldVector) { - try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { - BytesRef chronoFieldScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendLong(DateExtract.process(valueVector.getLong(p), chronoFieldVector.getBytesRef(p, chronoFieldScratch), this.zone)); - } catch (IllegalArgumentException e) { - warnings().registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "DateExtractEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(value, chronoField); - } - - private Warnings warnings() { - if (warnings == null) { - this.warnings = Warnings.createWarnings( - driverContext.warningsMode(), - source.source().getLineNumber(), - source.source().getColumnNumber(), - source.text() - ); - } - return warnings; - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final EvalOperator.ExpressionEvaluator.Factory value; - - private final EvalOperator.ExpressionEvaluator.Factory chronoField; - - private final ZoneId zone; - - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value, - EvalOperator.ExpressionEvaluator.Factory chronoField, ZoneId zone) { - this.source = source; - this.value = value; - this.chronoField = chronoField; - this.zone = zone; - } - - @Override - public DateExtractEvaluator get(DriverContext context) { - return new DateExtractEvaluator(source, value.get(context), chronoField.get(context), zone, context); - } - - @Override - public String toString() { - return "DateExtractEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java index edc0b2cb0f0ce..dcb8a543f5c35 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractMillisEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateExtractMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java index 97a04f0d06a74..6b961447df830 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractNanosEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateExtractNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java index 2f41a7440bb06..77aa06913c565 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisConstantEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateFormatMillisConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java index 29da191dbe781..428b932df3978 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatMillisEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateFormatMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java index 1488833227dcb..e1a5dd3272900 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosConstantEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateFormatNanosConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java index a94d522014813..8d27a1aaeede2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatNanosEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateFormatNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java index 0ddc731827894..4f792b640f560 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateParse}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateParseConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java index 6c432855e38fb..752cc72971fe8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateParse}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateParseEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java index 2d34fc613bc74..1b21c2fc872d5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateTrunc}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateTruncDateNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java index b9e49dd7e795c..96a6c192f53cf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateTrunc}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DateTruncDatetimeEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java index 3eca39f980347..b45856d6012ca 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Now}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NowEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java index 0120af54299e3..f65c74af6be09 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link CIDRMatch}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CIDRMatchEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java index 53a367aff7cd6..fcc084a7bf240 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link IpPrefix}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class IpPrefixEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java index 69697d81b8bcd..8cda84da6e192 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Abs}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AbsDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java index 44007fcb9c6f4..eeb7f46d2224d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Abs}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AbsIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java index 2bb17f9f2512d..92946a8691ee9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Abs}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AbsLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java index 17d8c34a63731..8f5ab21f30b1a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Acos}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AcosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java index 267ba167572ae..0e0ac4b93d11f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Asin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AsinEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java index 2f1fdabc4097f..09d0b1a7fd35c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Atan2}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Atan2Evaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java index 2e4d4c80e6a9a..0cd8adda340e5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Atan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AtanEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java index d99d6d918a215..e43daae68ed66 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastIntToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java index 585b0f392bee9..f1d850fa935f5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastIntToLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java index ff5ffcb6710cd..69e2a69a59027 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastIntToUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java index 9d6851cde0510..e82ed233839f1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastLongToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java index b72ea03cef25d..ba78e37cf5ee3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastLongToUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java index d7ab56113ebfc..458a74ad704cc 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CastUnsignedLongToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java index 66727d2ba0db7..923ab07f61ce0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cbrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CbrtDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java index dd811c2ef7c5d..63f41e08a65f1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cbrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CbrtIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java index d931106c65b0a..a94ea6dab1446 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cbrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CbrtLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java index 13e5878ec524a..a959e01f44a1f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cbrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CbrtUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java index 02617726145dc..32233fbc24d04 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Ceil}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CeilDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java index 8718eb606c209..5c854d91e9aa8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cos}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java index 409432a83da00..8baec8554bb9a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cosh}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class CoshEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java index 4d7362b8bc8ea..6b7fa6df9798e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpDoubleEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Exp}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ExpDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java index 482d44b1087ac..9a46ca17081a0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpIntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Exp}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ExpIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java index bbd17fe57c184..38dcd454cfb38 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Exp}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ExpLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java index 9fd90dca32f20..57f4b53ce0ba1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/ExpUnsignedLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Exp}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ExpUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java index df9e533b94309..41fd18c464367 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Floor}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class FloorDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java index 22094f7e623e6..fed6a8becea4b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Hypot}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class HypotEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java index 75a1f57520b39..ca6f38e573cdf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Log10}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Log10DoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java index d702185235cce..bb0963750e4ac 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Log10}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Log10IntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java index c13c477e4f689..88f518cbe2654 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Log10}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Log10LongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java index 8c955f499c9cf..ab2ebdd4f2ec8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Log10}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class Log10UnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java index c6b4009e6a779..2cdbb295126d9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Log}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LogConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java index 583cc06ba7dba..50917b21add4d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Log}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LogEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java index d1c9f91463922..9bf553632a98e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Pow}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class PowEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java index 347549bb78cca..cebb54ee59cc5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java index bfabc34721c67..d81d10e1519b9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundDoubleNoDecimalsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java index 220dd7a547cc9..034bad3fdc1a3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java index a2b7b51d42e0f..b72465cd14b8a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java index 9cc233b8aff0c..70cc9986f2d9d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RoundUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java index e80559397464f..9f73c895b6f42 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SignumDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java index 410c818fcf926..e6a270c3e344d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SignumIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java index b5406bab5ee39..35569ecd80476 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SignumLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java index 269ea507bfd05..677a8ec1fe6b5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SignumUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java index b4a4a1b1a2a41..51514290e8254 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SinEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java index ccce05ee8f7cf..99a6afb3b1843 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sinh}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SinhEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java index db8cb73222062..30fa92073cc29 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sqrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SqrtDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java index fc791b22aae8e..f2e8c3c14bc2b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sqrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SqrtIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java index 1e656dbfd7a3d..040ddae13ce5f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sqrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SqrtLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java index f5dc994c3ea83..21d026cb44c07 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sqrt}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SqrtUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java index 1dcc611410a60..978d202c7f3ce 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Tan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class TanEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java index 860a4f5b0e60d..a7b594d130ba4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Tanh}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class TanhEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java index d4fab518a0e5d..c126bd7bef196 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendBooleanEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java index 959449310ce92..3afd3534b92f6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java index d2fb9ce2926db..315150a20e354 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java index dd5a491281c45..0291e8c07d9ff 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java index 6f6050e7f245b..c23d036550fc8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java @@ -17,7 +17,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvAppendLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java index d87444746d2c6..b0a99ab33320d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java @@ -15,7 +15,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAvg}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvAvgDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvAvgDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java index 63e6b4eb12106..abc2157d30d03 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAvg}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvAvgIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvAvgIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java index d699070747b49..b323f92b3b02f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAvg}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvAvgLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvAvgLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java index 5c63508fa3560..f12634f721c94 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAvg}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvAvgUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvAvgUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java index bf946aab347d2..72cc92114d9a5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvFirstBooleanEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java index e0cb6ca4c289b..d0bcfda7a4209 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java @@ -15,7 +15,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvFirstBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java index 584319cc1ab82..d38ec51990ac0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvFirstDoubleEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java index 1e9c50d135559..7cb6e53326b7a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvFirstIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java index 7e118a1eb9eb8..3a34c55940248 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvFirstLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvFirstLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java index 75b49f2c3e8ee..08022d6580ebf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvLastBooleanEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java index fcca356b38576..29c5e19aee827 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java @@ -15,7 +15,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvLastBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java index b0cf7bf59900b..b4f5c8d147f03 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvLastDoubleEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java index 5c2af9218308d..d43e11571102f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvLastIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java index 37b95378f1f5b..897f7e513aebb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvLastLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvLastLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java index 44b4432edbf6f..2b7decc4c5537 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMaxBooleanEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java index 4e2180f2ec467..ed90337cb1947 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java @@ -15,7 +15,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMaxBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java index 4a1be1673bb7c..9fcce8e6c6538 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMaxDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java index b0ed499efd84f..5b03f65e27374 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMaxIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java index 24397b64c9ccc..2c9e89a5d3c2a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMaxLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMaxLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationDoubleEvaluator.java index 7cefde819dedc..a94f92f203e9a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationDoubleEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedianAbsoluteDeviation}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianAbsoluteDeviationDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianAbsoluteDeviationDoubleEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationIntEvaluator.java index 76013ca1115db..4fb12ff939a31 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationIntEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedianAbsoluteDeviation}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianAbsoluteDeviationIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianAbsoluteDeviationIntEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationLongEvaluator.java index e7883d92708b7..92c87dd6df912 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedianAbsoluteDeviation}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianAbsoluteDeviationLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianAbsoluteDeviationLongEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationUnsignedLongEvaluator.java index ef8781e1dc048..657e98c0e4d01 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationUnsignedLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedianAbsoluteDeviation}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianAbsoluteDeviationUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianAbsoluteDeviationUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java index e3b539d8210aa..14ca0c301159f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedian}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianDoubleEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java index d07dc41e1d04b..0f3aa297ae521 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedian}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java index f1cd87aefd3d0..1af845514baf9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedian}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianLongEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java index 031280b767b41..edc68673d3f4c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMedian}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMedianUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMedianUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java index ea8b04cc5c4a5..3a3f87a518f20 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMinBooleanEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java index 31d41ff61e196..a8258c86a3f42 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java @@ -15,7 +15,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMinBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java index 5390350751ee7..14656910b7c7b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMinDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java index 918b049780905..36c3682dc3c0a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMinIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java index 37a6709d46d4d..0bcfdf0036e52 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java @@ -14,7 +14,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvMinLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvMinLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumDoubleEvaluator.java index 7ff79b0a0708b..11864b18a65d4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumDoubleEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvPSeriesWeightedSum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvPSeriesWeightedSumDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileDoubleEvaluator.java index 7f7fdd42a237b..014e9230ce4ed 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileDoubleEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvPercentile}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvPercentileDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileIntegerEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileIntegerEvaluator.java index ed55fe6f556a2..63f16bc6f7466 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileIntegerEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileIntegerEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvPercentile}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvPercentileIntegerEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileLongEvaluator.java index f57de7c35d824..c5d2232f52e22 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileLongEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvPercentile}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvPercentileLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java index ae04f0916c471..a0d8274a1dead 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSlice}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceBooleanEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java index a366b4ae765e1..84c97343c7b47 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSlice}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java index 1607bcf078706..f71f2ecf9fdb8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSlice}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java index 69bcc2f8998f4..fa4b6ffa2130b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSlice}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceIntEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java index ad5a55a506214..7aa76eb53952e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSlice}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvSliceLongEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java index b49a92404ecd1..b275415b2dd77 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java @@ -15,7 +15,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvSumDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { public MvSumDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java index 8f27c4f472ba6..f22773bfb1540 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvSumIntEvaluator extends AbstractMultivalueFunction.AbstractNullableEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java index 8e4d183279e76..ae009b97852cf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvSumLongEvaluator extends AbstractMultivalueFunction.AbstractNullableEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java index e17b5934271d6..93566b531e06c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java @@ -16,7 +16,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvSum}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code MvEvaluatorImplementer} instead. */ public final class MvSumUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractNullableEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java index 9f204abbe0b43..557ba45f02cbe 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvZip}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MvZipEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java index 6d22335fb91e1..d99e7086ee895 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java index 593bfdec01325..956df2deb42f0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java index 79639505283ab..6375ba99122e0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java index 3d91a4323ba4c..27c1c608faf6f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java index 0a4c1e5c69bff..de384995136b8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java index 523546ffa5a38..373d4829f46ed 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java index 328565fd089c1..83a927517b0db 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java index da46839694c49..87e58c8320ea2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialContainsGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java index 5560d1d90b6aa..61302b49d2564 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java index 4f6197a3cde4b..c09daa0f3f8d3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java index 45b9a1f2251ef..ac8c63a11ebbe 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java index 0a18ad86cf8bb..77fae3ea04b23 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java index f379e6502e9f7..acba535905292 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java index 0411ca8e61dfb..a600b69f1ec34 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java index f24c8991e0ba4..c3f535e9b2dad 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java index ae9c3d2c4d323..b1d849749af60 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialDisjointGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java index c9d1493c4e10b..f85521e790f93 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java index 330e06a00f481..3575f6a4130ce 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java index 6b5167d136cf5..02ad8b8e95c5f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java index c96e912b64924..8d277443653ae 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java index 9cdd7e345f8cc..861bc6a7d8f91 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java index 50497429eac41..5b428fec29a5b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java index 92d2bd55021b2..47b5e68d03bef 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java index 06033360dd6cf..fdf6c21d4a05e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialIntersectsGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java index 55e86b398a9a9..f342bbdcdd2ca 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java index 0d9e7d8c460aa..032e20003c788 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java index 011ba3df96dae..3df427c4e03f4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java index e07f5203a45ca..ebbef2086182b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java index 82285639768da..eacce4c73d714 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java index 33bd70b76ae99..06666a4cfe8b0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java index 9335e0d93e0ab..7fdba8dfecf1d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java index 665f4c94722d4..54488302c7487 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpatialWithinGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java index 4917b71464dce..f1ebad92bea1a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndConstantEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java index 03c8ba1a04ab6..3af1fa1f990b8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianPointDocValuesAndSourceEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java index 1085f71e95b73..08d882b0b2cf2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndConstantEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java index c8554a3041c89..b29915ff22c7e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceCartesianSourceAndSourceEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java index 8f37d3157fac6..fc042e0f31c13 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndConstantEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java index a7664987739e2..7e68261503800 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoPointDocValuesAndSourceEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java index 06e44f996daf5..44e7b49ded915 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndConstantEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java index 31e20d9f42197..3bf5b1499ebe7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistanceGeoSourceAndSourceEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StDistance}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StDistanceGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBEvaluator.java index 3d6dc7277080a..67530172987ca 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StEnvelope}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StEnvelopeFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StEnvelopeFromWKBEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBGeoEvaluator.java index c61e825c0ee71..63ac0b27bd46d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeFromWKBGeoEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StEnvelope}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StEnvelopeFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StEnvelopeFromWKBGeoEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXFromWKBEvaluator.java index d396529f532ed..9fdffdb4a047c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXFromWKBEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StX}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StXFromWKBEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBEvaluator.java index 0d51ef709c217..3e5e70648704e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StXMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXMaxFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StXMaxFromWKBEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBGeoEvaluator.java index 3707bf421d550..bf0d72af3e254 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxFromWKBGeoEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StXMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXMaxFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StXMaxFromWKBGeoEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBEvaluator.java index 699402ad68dee..c1126b4826056 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StXMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXMinFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StXMinFromWKBEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBGeoEvaluator.java index 6a8c041595c1c..feb4610134cea 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinFromWKBGeoEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StXMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StXMinFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StXMinFromWKBGeoEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYFromWKBEvaluator.java index 4e6e3a2ccd75a..765f2d20f0862 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYFromWKBEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StY}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StYFromWKBEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBEvaluator.java index e8b50099f38f6..6953433fa83bb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StYMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYMaxFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StYMaxFromWKBEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBGeoEvaluator.java index 00e75f862a86c..9163932dd5a33 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxFromWKBGeoEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StYMax}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYMaxFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StYMaxFromWKBGeoEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBEvaluator.java index cab66683261aa..94cd8169a8dc6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StYMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYMinFromWKBEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StYMinFromWKBEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBGeoEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBGeoEvaluator.java index 8bae9d369fbb4..751fc84930dfe 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBGeoEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBGeoEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StYMin}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code ConvertEvaluatorImplementer} instead. */ public final class StYMinFromWKBGeoEvaluator extends AbstractConvertFunction.AbstractEvaluator { public StYMinFromWKBGeoEvaluator(EvalOperator.ExpressionEvaluator field, Source source, diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java index 1352e5650bbfe..acf9613b29e7a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link AutomataMatch}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AutomataMatchEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java index 6564a2f3ef167..b7fc1d5f84ad4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link BitLength}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class BitLengthEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ByteLengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ByteLengthEvaluator.java index 1b0bff92d7d04..93c264a2e5b87 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ByteLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ByteLengthEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ByteLength}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ByteLengthEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChangeCaseEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChangeCaseEvaluator.java index 02d1b1c86ea32..735bddc9918dd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChangeCaseEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChangeCaseEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ChangeCase}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ChangeCaseEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java index e0cff017c14fd..9d22936ba7d02 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Concat}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ConcatEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java index d3d6e02bd9d73..f9b96479e87a4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link EndsWith}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EndsWithEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashConstantEvaluator.java index 34cff73018634..6eac2084410c8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashConstantEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Hash}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class HashConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashEvaluator.java index 8b01cc0330142..aee3d333fd517 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Hash}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class HashEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java index 7925787425d6e..ce46bad9159f5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LTrim}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LTrimEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java index 9adcfbbdd9f39..443d41a7c9ff2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Left}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LeftEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java index 799f422414060..cbfcc4f81221c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Length}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LengthEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java index 3ca7db9e5685e..afbd759f4bbb8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Locate}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LocateEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java index 378252a4bbea9..5c015d2a04f6f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Locate}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LocateNoStartEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java index 6fdeeddd3ab94..9b4a5a4165b42 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RTrim}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RTrimEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java index e679842ed47a1..c0799f7187322 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Repeat}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RepeatConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java index 58e0aeb6af318..169df1f7faaae 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Repeat}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RepeatEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java index 02a495285e6c9..a5aa37a0db56e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Replace}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ReplaceConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java index 8c58a76cc481e..7a7a947453d0a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Replace}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ReplaceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java index 408c16a9c4f7e..5dbcc7c38d90b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Reverse}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ReverseEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java index a296096a13fb3..18c4087b23cb2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java @@ -24,7 +24,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Right}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class RightEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceEvaluator.java index 14228522d9419..80167de794eec 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Space}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SpaceEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java index 8b80b4ec06189..512222880b630 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Split}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SplitSingleByteEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java index 54a1d6863cd84..d5bc8e7dcfd6d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Split}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SplitVariableEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java index b175ea0b0d17e..347bd8c0747f6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StartsWith}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class StartsWithEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java index 2ceb2230fb8f0..4a754daae9453 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Substring}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubstringEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java index 31268b4557a62..f9d168e21548c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Substring}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubstringNoLengthEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java index 44215c06c9068..b756fd69302e5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Trim}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class TrimEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java index fe80536ea5d0d..a484dd87d0829 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddDateNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java index 3cd91fdeb7ea2..2493924276af1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddDatetimesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java index 61ef4215b3e29..cd052b38c7791 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java index 3616711127ce6..680b70089b105 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java index fd046049980fe..b309702ccae6a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java index d89a0c83acd80..9267d93104541 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class AddUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java index fb2fc81b25230..e8d2d440dc475 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Div}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DivDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java index fc3e587596881..c86acaaf8c05b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Div}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DivIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java index 49f15a6c90a4e..40c0df781f9ad 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Div}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DivLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java index 2307b4984f491..84547b719e3cb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Div}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class DivUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java index b9a3e1419a124..1d48d6ba595b8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mod}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ModDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java index 38aaab26eccda..ed5fec274e62c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mod}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ModIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java index ec6cdaa93b1b0..f061968a07167 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mod}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ModLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java index e85291598ba53..b22af43813552 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mod}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class ModUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java index 65ea4fe3e0a1d..7e11f0828b5af 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mul}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MulDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java index cfc30966deae3..dbb4157798e4e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mul}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MulIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java index 8e5a12b9ea1be..00433c86570c4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mul}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MulLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java index b4babd6b93176..d3b0b3c6d54be 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mul}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class MulUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java index 8f573c9f431b5..ad0bbfd4f9bb7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Neg}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NegDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java index 7da1c10802933..bd186e723a86f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Neg}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NegIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java index d259edb076e40..2f2ef86cbaa3c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Neg}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NegLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java index 3b6f4c1046d40..7418d000281f1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubDateNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java index 0ad395b4e5753..11a3a97416ef9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubDatetimesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java index 30e44ed5b72ed..21776d21cadea 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java index 83680e58640f4..8a2f431908406 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java index fbead2000b585..5ff2efe3f6683 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java index 6acb5deb06225..856399ba0e4af 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java @@ -20,7 +20,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class SubUnsignedLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java index 9403efd709083..70a3f0bd70aab 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsBoolsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java index 9b3daa7317677..80e20c27d5485 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java index 73c0ab28f154b..16f83442f79d5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsGeometriesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java index 93a776f558e90..b78f45d7fbd1e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java index 578ec20bfd183..e65359e8ee156 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java index 606e8d0a39efd..c8bde6b05afd2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsMillisNanosEvaluator.java index b5013c4080507..91cd4614cc9d5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsMillisNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsNanosMillisEvaluator.java index 3ed1e922608e6..cdcc4c931fb19 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsNanosMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class EqualsNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java index 0f24db5826999..e3f1649048c9d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java index f7d0e46efd5d3..5014310820b06 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java index 970f42f80bdf2..d99a2ff6bda70 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java index 9a5c2b03b2b98..7d202da760601 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanMillisNanosEvaluator.java index bdd877c7f866e..c8c337f2af085 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanMillisNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanNanosMillisEvaluator.java index d509547eb17ce..54683dd07523a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanNanosMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java index 149e5c62a6975..10ec48cbbb8dd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java index dfcf205342938..9ae1c86788d86 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java index fb1d92c45a75a..e4fba4970409b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java index b64c8093e5be5..085e41ec5fc20 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualMillisNanosEvaluator.java index 7a0da0a55d0dc..ffb411ca82d42 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualMillisNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualNanosMillisEvaluator.java index d4386a64aaf8a..1419308f4ba4a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualNanosMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class GreaterThanOrEqualNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java index eedaf97545380..1199a9d5ab7d1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java @@ -23,7 +23,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link InsensitiveEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class InsensitiveEqualsConstantEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java index 432c70dd1ae55..8c7abcfe891d5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link InsensitiveEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class InsensitiveEqualsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java index cc6661a3f02c7..7f0b7e8f66b66 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java index 93be674d49725..cd0997a513c85 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java index 69d73a7f134a1..e88a9ae30d00b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java index 86bb587e1af46..5377441b5e8b5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanMillisNanosEvaluator.java index 21d7d50af5b1e..dd63a3c364cd3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanMillisNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanNanosMillisEvaluator.java index 48593f9d537f3..317a861b16ded 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanNanosMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java index 07d1dad29fcad..bcfe416941b33 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java index 819878ff7c0ef..83a8b4abc1c4d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java index bfc192443b402..13e6b336286aa 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java index 7ebf8695839f6..0e01abc93ce82 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualMillisNanosEvaluator.java index 06973e71e834a..38d84fbd7a6d4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualMillisNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualNanosMillisEvaluator.java index 4763629873d02..ee46cb74e10ca 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualNanosMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class LessThanOrEqualNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java index ceb67a59021fa..1d96506ea34cf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java @@ -19,7 +19,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsBoolsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java index 0f0a60fc03f23..11e3c7d1021ac 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java index a745eaffaf27d..d871a0cad6879 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsGeometriesEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java index a3c803169b98e..15103562050cb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsIntsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java index 9e28b6d1dfe4b..5eb75c0bcf604 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java @@ -22,7 +22,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java index 8114c6cb934af..0ba697142944f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsLongsEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsMillisNanosEvaluator.java index 9bede03737a5f..8716cb3fee431 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsMillisNanosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsMillisNanosEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsNanosMillisEvaluator.java index e8e28eec7ee27..15fd009e7046e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsNanosMillisEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsNanosMillisEvaluator.java @@ -21,7 +21,7 @@ /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. - * This class is generated. Do not edit it. + * This class is generated. Edit {@code EvaluatorImplementer} instead. */ public final class NotEqualsNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; From f498c70dbb80619aa749efe1c84a9c1ebac3432e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mariusz=20J=C3=B3zala?= <377355+jozala@users.noreply.github.com> Date: Wed, 29 Jan 2025 14:08:28 +0100 Subject: [PATCH 195/383] [TEST] Typo corrected in the comment (#121154) --- .../org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index 7ddb2ca354cfc..b4095e133114b 100644 --- a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -286,7 +286,7 @@ protected boolean isWatcherTest() { /** * Compares the results of running two analyzers against many random - * strings. The goal is to figure out if two anlayzers are "the same" by + * strings. The goal is to figure out if two analyzers are "the same" by * comparing their results. This is far from perfect but should be fairly * accurate, especially for gross things like missing {@code decimal_digit} * token filters, and should be fairly fast because it compares a fairly From 17216d9161319551400d3f28db1f120f7245c083 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 00:17:53 +1100 Subject: [PATCH 196/383] Mute org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase testRestoreIndex {p0=[9.0.0, 9.0.0, 8.18.0]} #121170 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 8335577dfe016..c13f8fbc780bd 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -305,6 +305,9 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testProfileIndexAutoCreation issue: https://github.com/elastic/elasticsearch/issues/120987 +- class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase + method: testRestoreIndex {p0=[9.0.0, 9.0.0, 8.18.0]} + issue: https://github.com/elastic/elasticsearch/issues/121170 # Examples: # From b69eb8a5968ed232a8ed673d7551532279210ed2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 00:18:06 +1100 Subject: [PATCH 197/383] Mute org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase testRestoreIndex {p0=[9.0.0, 8.18.0, 8.18.0]} #121171 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c13f8fbc780bd..c95a7f88ee6dc 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -308,6 +308,9 @@ tests: - class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase method: testRestoreIndex {p0=[9.0.0, 9.0.0, 8.18.0]} issue: https://github.com/elastic/elasticsearch/issues/121170 +- class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase + method: testRestoreIndex {p0=[9.0.0, 8.18.0, 8.18.0]} + issue: https://github.com/elastic/elasticsearch/issues/121171 # Examples: # From 7d9a8455d6ce82a391209a8aff9be314bc394e36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Fern=C3=A1ndez=20Casta=C3=B1o?= Date: Wed, 29 Jan 2025 14:23:35 +0100 Subject: [PATCH 198/383] Remove unused parameter from IndexRoutingTable#readyForSearch (#121152) --- .../cluster/routing/IndexRoutingTable.java | 3 +-- .../cluster/routing/ShardRouting.java | 3 +-- .../cluster/routing/IndexRoutingTableTests.java | 14 +++++++------- .../ml/MlConfigMigrationEligibilityCheck.java | 2 +- .../ml/datafeed/DatafeedConfigAutoUpdater.java | 4 +--- .../xpack/ml/datafeed/DatafeedNodeSelector.java | 4 +--- .../ml/inference/TrainedModelStatsService.java | 4 +--- .../task/AbstractJobPersistentTasksExecutor.java | 4 +--- .../security/support/SecurityIndexManager.java | 2 +- .../persistence/TransformInternalIndex.java | 2 +- .../TransformPersistentTasksExecutor.java | 4 +--- 11 files changed, 17 insertions(+), 29 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java index bcacf21fcedbf..4504207a24631 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java @@ -9,7 +9,6 @@ package org.elasticsearch.cluster.routing; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -236,7 +235,7 @@ public boolean allPrimaryShardsActive() { /** * @return true if an index is available to service search queries. */ - public boolean readyForSearch(ClusterState clusterState) { + public boolean readyForSearch() { for (IndexShardRoutingTable shardRoutingTable : this.shards) { boolean found = false; for (int idx = 0; idx < shardRoutingTable.size(); idx++) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java index 157d28e61057c..7e947da0210ea 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java @@ -11,7 +11,6 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource.ExistingStoreRecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.PeerRecoverySource; @@ -935,7 +934,7 @@ public boolean isPromotableToPrimary() { } /** - * Determine if role searchable. Consumers should prefer {@link IndexRoutingTable#readyForSearch(ClusterState)} to determine if an index + * Determine if role searchable. Consumers should prefer {@link IndexRoutingTable#readyForSearch()} to determine if an index * is ready to be searched. */ public boolean isSearchable() { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java index 912326162e5c4..fd9a2a154d47f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java @@ -44,13 +44,13 @@ public void testReadyForSearch() { List.of(getShard(p2, true, ShardRoutingState.STARTED, ShardRouting.Role.DEFAULT)) ); IndexRoutingTable indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - assertTrue(indexRoutingTable.readyForSearch(clusterState)); + assertTrue(indexRoutingTable.readyForSearch()); // 2 primaries that are index only shardTable1 = new IndexShardRoutingTable(p1, List.of(getShard(p1, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); shardTable2 = new IndexShardRoutingTable(p2, List.of(getShard(p2, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - assertFalse(indexRoutingTable.readyForSearch(clusterState)); + assertFalse(indexRoutingTable.readyForSearch()); // 2 unassigned primaries that are index only shardTable1 = new IndexShardRoutingTable( @@ -62,7 +62,7 @@ public void testReadyForSearch() { List.of(getShard(p2, true, ShardRoutingState.UNASSIGNED, ShardRouting.Role.INDEX_ONLY)) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - assertFalse(indexRoutingTable.readyForSearch(clusterState)); + assertFalse(indexRoutingTable.readyForSearch()); // 2 primaries that are index only with replicas that are not all available shardTable1 = new IndexShardRoutingTable( @@ -82,7 +82,7 @@ public void testReadyForSearch() { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - assertFalse(indexRoutingTable.readyForSearch(clusterState)); + assertFalse(indexRoutingTable.readyForSearch()); // 2 primaries that are index only with some replicas that are all available shardTable1 = new IndexShardRoutingTable( @@ -102,7 +102,7 @@ public void testReadyForSearch() { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - assertTrue(indexRoutingTable.readyForSearch(clusterState)); + assertTrue(indexRoutingTable.readyForSearch()); // 2 unassigned primaries that are index only with some replicas that are all available shardTable1 = new IndexShardRoutingTable( @@ -122,7 +122,7 @@ public void testReadyForSearch() { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - assertTrue(indexRoutingTable.readyForSearch(clusterState)); + assertTrue(indexRoutingTable.readyForSearch()); // 2 primaries that are index only with at least 1 replica per primary that is available shardTable1 = new IndexShardRoutingTable( @@ -142,7 +142,7 @@ public void testReadyForSearch() { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - assertTrue(indexRoutingTable.readyForSearch(clusterState)); + assertTrue(indexRoutingTable.readyForSearch()); } private ShardRouting getShard(ShardId shardId, boolean isPrimary, ShardRoutingState state, ShardRouting.Role role) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrationEligibilityCheck.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrationEligibilityCheck.java index d1137069fea41..b2af9bb872705 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrationEligibilityCheck.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrationEligibilityCheck.java @@ -65,6 +65,6 @@ static boolean mlConfigIndexIsAllocated(ClusterState clusterState) { } IndexRoutingTable routingTable = clusterState.getRoutingTable().index(configIndexOrAlias.getWriteIndex()); - return routingTable != null && routingTable.allPrimaryShardsActive() && routingTable.readyForSearch(clusterState); + return routingTable != null && routingTable.allPrimaryShardsActive() && routingTable.readyForSearch(); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java index 9fe9a5226f286..ec459ab81dcfa 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java @@ -62,9 +62,7 @@ public boolean isAbleToRun(ClusterState latestState) { continue; } IndexRoutingTable routingTable = latestState.getRoutingTable().index(index); - if (routingTable == null - || routingTable.allPrimaryShardsActive() == false - || routingTable.readyForSearch(latestState) == false) { + if (routingTable == null || routingTable.allPrimaryShardsActive() == false || routingTable.readyForSearch() == false) { return false; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java index 31add7b37ac5f..787142bac94a7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java @@ -206,9 +206,7 @@ private AssignmentFailure verifyIndicesActive() { for (String concreteIndex : concreteIndices) { IndexRoutingTable routingTable = clusterState.getRoutingTable().index(concreteIndex); - if (routingTable == null - || routingTable.allPrimaryShardsActive() == false - || routingTable.readyForSearch(clusterState) == false) { + if (routingTable == null || routingTable.allPrimaryShardsActive() == false || routingTable.readyForSearch() == false) { return new AssignmentFailure( "cannot start datafeed [" + datafeedId diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java index 67f2ea74464d0..ba9c5cd7267b5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java @@ -240,9 +240,7 @@ static boolean verifyIndicesExistAndPrimaryShardsAreActive(ClusterState clusterS return false; } IndexRoutingTable routingTable = clusterState.getRoutingTable().index(index); - if (routingTable == null - || routingTable.allPrimaryShardsActive() == false - || routingTable.readyForSearch(clusterState) == false) { + if (routingTable == null || routingTable.allPrimaryShardsActive() == false || routingTable.readyForSearch() == false) { return false; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/task/AbstractJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/task/AbstractJobPersistentTasksExecutor.java index 7e0ff4f029bd4..33b5bc7bf9ebb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/task/AbstractJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/task/AbstractJobPersistentTasksExecutor.java @@ -67,9 +67,7 @@ public static List verifyIndicesPrimaryShardsAreActive( continue; } IndexRoutingTable routingTable = clusterState.getRoutingTable().index(index); - if (routingTable == null - || routingTable.allPrimaryShardsActive() == false - || routingTable.readyForSearch(clusterState) == false) { + if (routingTable == null || routingTable.allPrimaryShardsActive() == false || routingTable.readyForSearch() == false) { unavailableIndices.add(index); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java index 39c7a45d51dfd..41bda63292de0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java @@ -513,7 +513,7 @@ private Tuple checkIndexAvailable(ClusterState state) { if (routingTable != null && routingTable.allPrimaryShardsActive()) { allPrimaryShards = true; } - if (routingTable != null && routingTable.readyForSearch(state)) { + if (routingTable != null && routingTable.readyForSearch()) { searchShards = true; } if (allPrimaryShards == false || searchShards == false) { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java index 14e9292d10fd1..0cd8558b3dbe4 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java @@ -385,7 +385,7 @@ protected static boolean hasLatestVersionedIndex(ClusterState state) { protected static boolean allPrimaryShardsActiveForLatestVersionedIndex(ClusterState state) { IndexRoutingTable indexRouting = state.routingTable().index(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME); - return indexRouting != null && indexRouting.allPrimaryShardsActive() && indexRouting.readyForSearch(state); + return indexRouting != null && indexRouting.allPrimaryShardsActive() && indexRouting.readyForSearch(); } private static void waitForLatestVersionedIndexShardsActive(Client client, ActionListener listener) { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java index 9cd0f3abcb07d..b7bd434194b80 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java @@ -188,9 +188,7 @@ static List verifyIndicesPrimaryShardsAreActive(ClusterState clusterStat List unavailableIndices = new ArrayList<>(indices.length); for (String index : indices) { IndexRoutingTable routingTable = clusterState.getRoutingTable().index(index); - if (routingTable == null - || routingTable.allPrimaryShardsActive() == false - || routingTable.readyForSearch(clusterState) == false) { + if (routingTable == null || routingTable.allPrimaryShardsActive() == false || routingTable.readyForSearch() == false) { unavailableIndices.add(index); } } From 5ed47bed8b9b05d2eebe84f9b2b63859fef17e10 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Wed, 29 Jan 2025 14:30:18 +0100 Subject: [PATCH 199/383] Better index pattern randomization (#120788) --- .../xpack/esql/IdentifierGenerator.java | 62 +++++++++++++------ .../esql/parser/StatementParserTests.java | 3 +- 2 files changed, 44 insertions(+), 21 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java index a1ae1f43ef877..3392ae9b6e3bc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/IdentifierGenerator.java @@ -7,8 +7,16 @@ package org.elasticsearch.xpack.esql; +import org.elasticsearch.common.Strings; import org.elasticsearch.test.ESTestCase; +import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ESTestCase.randomFrom; +import static org.elasticsearch.test.ESTestCase.randomInt; +import static org.elasticsearch.test.ESTestCase.randomIntBetween; +import static org.elasticsearch.test.ESTestCase.randomList; +import static org.elasticsearch.test.ESTestCase.randomValueOtherThan; + public class IdentifierGenerator { /** @@ -22,7 +30,7 @@ public static String randomIdentifier() { * Generates one or several coma separated index patterns */ public static String randomIndexPatterns(Feature... features) { - return maybeQuote(String.join(",", ESTestCase.randomList(1, 5, () -> randomIndexPattern(features)))); + return maybeQuote(String.join(",", randomList(1, 5, () -> randomIndexPattern(features)))); } /** @@ -40,45 +48,61 @@ public static String randomIndexPattern(Feature... features) { index.append('.'); } index.append(randomCharacterFrom(validFirstCharacters)); - for (int i = 0; i < ESTestCase.randomIntBetween(1, 100); i++) { + for (int i = 0; i < randomIntBetween(1, 100); i++) { index.append(randomCharacterFrom(validCharacters)); } if (canAdd(Features.WILDCARD_PATTERN, features)) { - if (ESTestCase.randomBoolean()) { + if (randomBoolean()) { index.append('*'); } else { - index.insert(ESTestCase.randomIntBetween(0, index.length() - 1), '*'); + for (int i = 0; i < randomIntBetween(1, 3); i++) { + index.insert(randomIntBetween(0, index.length()), '*'); + } } - } else if (canAdd(Features.DATE_MATH, features)) { + } + if (canAdd(Features.DATE_MATH, features)) { // https://www.elastic.co/guide/en/elasticsearch/reference/8.17/api-conventions.html#api-date-math-index-names index.insert(0, "<"); index.append("-{now/"); - index.append(ESTestCase.randomFrom("d", "M", "M-1M")); - if (ESTestCase.randomBoolean()) { - index.append("{").append(ESTestCase.randomFrom("yyyy.MM", "yyyy.MM.dd")).append("}"); + index.append(randomFrom("d", "M", "M-1M")); + if (randomBoolean()) { + index.append("{").append(switch (randomIntBetween(0, 2)) { + case 0 -> "yyyy.MM"; + case 1 -> "yyyy.MM.dd"; + default -> "yyyy.MM.dd|" + Strings.format("%+03d", randomValueOtherThan(0, () -> randomIntBetween(-18, 18))) + ":00"; + }).append("}"); } index.append("}>"); } + if (canAdd(Features.EXCLUDE_PATTERN, features)) { + index.insert(0, "-"); + } var pattern = maybeQuote(index.toString()); if (canAdd(Features.CROSS_CLUSTER, features)) { var cluster = randomIdentifier(); pattern = maybeQuote(cluster + ":" + pattern); } + + if (pattern.contains("|") && pattern.contains("\"") == false) { + pattern = quote(pattern); + } + return pattern; } private static char randomCharacterFrom(String str) { - return str.charAt(ESTestCase.randomInt(str.length() - 1)); + return str.charAt(randomInt(str.length() - 1)); } public interface Feature {} public enum Features implements Feature { CROSS_CLUSTER, + HIDDEN_INDEX, WILDCARD_PATTERN, - DATE_MATH, - HIDDEN_INDEX + EXCLUDE_PATTERN, + DATE_MATH } private record ExcludedFeature(Feature feature) implements Feature {} @@ -96,18 +120,16 @@ private static boolean canAdd(Feature feature, Feature... features) { return false; } } - return ESTestCase.randomBoolean(); + return randomBoolean(); } public static String maybeQuote(String term) { - if (term.contains("\"")) { - return term; - } - return switch (ESTestCase.randomIntBetween(0, 5)) { - case 0 -> "\"" + term + "\""; - case 1 -> "\"\"\"" + term + "\"\"\""; - default -> term;// no quotes are more likely - }; + return randomBoolean() && term.contains("\"") == false ? quote(term) : term; + } + + public static String quote(String term) { + var quote = randomFrom("\"", "\"\"\""); + return quote + term + quote; } public static String unquoteIndexPattern(String term) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index af0a9c2f97961..9bbada3cca53b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -489,7 +489,8 @@ public void testStringAsIndexPattern() { clusterAndIndexAsIndexPattern(command, "cluster:index"); clusterAndIndexAsIndexPattern(command, "cluster:.index"); clusterAndIndexAsIndexPattern(command, "cluster*:index*"); - clusterAndIndexAsIndexPattern(command, "cluster*:*"); + clusterAndIndexAsIndexPattern(command, "cluster*:*");// this is not a valid pattern, * should be inside <> + clusterAndIndexAsIndexPattern(command, "cluster*:"); clusterAndIndexAsIndexPattern(command, "cluster*:*"); clusterAndIndexAsIndexPattern(command, "*:index*"); clusterAndIndexAsIndexPattern(command, "*:*"); From 5d1818756a960b596518ea8e05a604ebdbc85753 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 00:32:48 +1100 Subject: [PATCH 200/383] Mute org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT testFileSettingsReprocessedOnRestartWithoutVersionChange #120964 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c95a7f88ee6dc..f1c41252421a9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -311,6 +311,9 @@ tests: - class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase method: testRestoreIndex {p0=[9.0.0, 8.18.0, 8.18.0]} issue: https://github.com/elastic/elasticsearch/issues/121171 +- class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT + method: testFileSettingsReprocessedOnRestartWithoutVersionChange + issue: https://github.com/elastic/elasticsearch/issues/120964 # Examples: # From f5f0e3bd7fcfe21187a0e54b6a59b15b3a2c6ea9 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Wed, 29 Jan 2025 14:52:11 +0100 Subject: [PATCH 201/383] [DOCS] Update getting-started.asciidoc (#116151) (#121173) Update `new_field` to `language` which is the actual new field added in dynamic mapping Co-authored-by: Ekwinder --- docs/reference/quickstart/getting-started.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/quickstart/getting-started.asciidoc b/docs/reference/quickstart/getting-started.asciidoc index 03bfb62548b25..192b70c11c635 100644 --- a/docs/reference/quickstart/getting-started.asciidoc +++ b/docs/reference/quickstart/getting-started.asciidoc @@ -293,7 +293,7 @@ POST /books/_doc // TEST[continued] <1> The new field. -View the mapping for the `books` index with the <>. The new field `new_field` has been added to the mapping with a `text` data type. +View the mapping for the `books` index with the <>. The new field `language` has been added to the mapping with a `text` data type. [source,console] ---- @@ -328,7 +328,7 @@ GET /books/_mapping } } }, - "new_field": { + "language": { "type": "text", "fields": { "keyword": { From 439dcdcfd19acd71c18bcd73811e95b23559d3fb Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 00:56:00 +1100 Subject: [PATCH 202/383] Mute org.elasticsearch.xpack.ml.integration.ClassificationIT testWithOnlyTrainingRowsAndTrainingPercentIsFifty_DependentVariableIsKeyword #120071 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f1c41252421a9..d7b858f86d3ee 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -314,6 +314,9 @@ tests: - class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT method: testFileSettingsReprocessedOnRestartWithoutVersionChange issue: https://github.com/elastic/elasticsearch/issues/120964 +- class: org.elasticsearch.xpack.ml.integration.ClassificationIT + method: testWithOnlyTrainingRowsAndTrainingPercentIsFifty_DependentVariableIsKeyword + issue: https://github.com/elastic/elasticsearch/issues/120071 # Examples: # From 2b669167e11443d2839381cba9eac498e01b200a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 01:02:16 +1100 Subject: [PATCH 203/383] Mute org.elasticsearch.xpack.security.profile.ProfileIntegTests testGetUsersWithProfileUidWhenProfileIndexDoesNotExists #121179 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index d7b858f86d3ee..b3fc32646df00 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -317,6 +317,9 @@ tests: - class: org.elasticsearch.xpack.ml.integration.ClassificationIT method: testWithOnlyTrainingRowsAndTrainingPercentIsFifty_DependentVariableIsKeyword issue: https://github.com/elastic/elasticsearch/issues/120071 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testGetUsersWithProfileUidWhenProfileIndexDoesNotExists + issue: https://github.com/elastic/elasticsearch/issues/121179 # Examples: # From 952bf229fb8dd26a14c0c96d970de4a982a2ee14 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 29 Jan 2025 15:03:28 +0100 Subject: [PATCH 204/383] Conditionally enable logsdb by default (#121049) Enable logsdb by default if logsdb.prior_logs_usage has not been set to true. Meaning that if no data streams were created matching with the logs-- pattern in 8.x, then logsdb will be enabled by default for data streams matching with logs-*-* pattern. Also removes LogsPatternUsageService as with version 9.0 and beyond, this component is no longer necessary. Followup from #120708 Closes #106489 --- .../src/main/resources/changelog-schema.json | 1 + docs/changelog/121049.yaml | 19 ++ docs/reference/data-streams/logs.asciidoc | 6 + docs/reference/rest-api/info.asciidoc | 2 +- docs/reference/rest-api/usage.asciidoc | 2 +- .../LogsIndexModeFullClusterRestartIT.java | 4 + .../upgrades/LogsUsageRollingUpgradeIT.java | 71 ++++++ .../LogsdbIndexingRollingUpgradeIT.java | 14 +- .../upgrades/NoLogsUsageRollingUpgradeIT.java | 49 ++++ .../LogsIndexModeDisabledRestTestIT.java | 20 +- .../xpack/logsdb/LogsDBPlugin.java | 26 +-- .../xpack/logsdb/LogsPatternUsageService.java | 166 -------------- ...gsPatternUsageServiceIntegrationTests.java | 139 ------------ .../logsdb/LogsPatternUsageServiceTests.java | 213 ------------------ 14 files changed, 178 insertions(+), 554 deletions(-) create mode 100644 docs/changelog/121049.yaml create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsUsageRollingUpgradeIT.java create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NoLogsUsageRollingUpgradeIT.java delete mode 100644 x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageService.java delete mode 100644 x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceIntegrationTests.java delete mode 100644 x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceTests.java diff --git a/build-tools-internal/src/main/resources/changelog-schema.json b/build-tools-internal/src/main/resources/changelog-schema.json index 9692af7adc5e6..7229571fc8bf4 100644 --- a/build-tools-internal/src/main/resources/changelog-schema.json +++ b/build-tools-internal/src/main/resources/changelog-schema.json @@ -291,6 +291,7 @@ "JVM option", "Java API", "Logging", + "Logs", "Mapping", "Packaging", "Painless", diff --git a/docs/changelog/121049.yaml b/docs/changelog/121049.yaml new file mode 100644 index 0000000000000..760deb62e149b --- /dev/null +++ b/docs/changelog/121049.yaml @@ -0,0 +1,19 @@ +pr: 121049 +summary: Conditionally enable logsdb by default for data streams matching with logs-*-* + pattern. +area: Logs +type: breaking +issues: + - 106489 +breaking: + title: Conditionally enable logsdb by default + area: Logs + details: |- + Logsdb will be enabled by default for data streams matching with logs-*-* pattern. + If upgrading from 8.x to 9.x and data streams matching with log-*-* do exist, + then Logsdb will not be enabled by default. + impact: |- + Logsdb reduce storage footprint in Elasticsearch for logs, but there are side effects + to be taken into account that are described in the Logsdb docs: + https://www.elastic.co/guide/en/elasticsearch/reference/current/logs-data-stream.html#upgrade-to-logsdb-notes + notable: true diff --git a/docs/reference/data-streams/logs.asciidoc b/docs/reference/data-streams/logs.asciidoc index 7058cfe51496f..797efb7bef945 100644 --- a/docs/reference/data-streams/logs.asciidoc +++ b/docs/reference/data-streams/logs.asciidoc @@ -237,3 +237,9 @@ The `logsdb` index mode uses the following settings: * **`index.mapping.ignore_above`**: `8191` * **`index.mapping.total_fields.ignore_dynamic_beyond_limit`**: `true` + +[discrete] +[[upgrade-to-logsdb-notes]] +=== Notes about upgrading to Logsdb + +TODO: add notes. diff --git a/docs/reference/rest-api/info.asciidoc b/docs/reference/rest-api/info.asciidoc index 318170ab089bb..e1a7246342a36 100644 --- a/docs/reference/rest-api/info.asciidoc +++ b/docs/reference/rest-api/info.asciidoc @@ -177,7 +177,7 @@ Example response: }, "logsdb": { "available": true, - "enabled": false + "enabled": true } }, "tagline" : "You know, for X" diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index bb46c41b4bcc9..1c907ec63d8fe 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -514,7 +514,7 @@ GET /_xpack/usage }, "logsdb": { "available": true, - "enabled": false, + "enabled": true, "indices_count": 0, "indices_with_synthetic_source": 0, "num_docs": 0, diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java index 9866d94dccc3c..ebf72b26a2111 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java @@ -120,6 +120,10 @@ protected ElasticsearchCluster getUpgradeCluster() { }"""; public void testLogsIndexing() throws IOException { + assumeTrue( + "otherwise first backing index of logs-apache-production will be in logsdb mode", + getOldClusterTestVersion().before("9.0.0") + ); if (isRunningAgainstOldCluster()) { assertOK(client().performRequest(putTemplate(client(), "logs-template", STANDARD_TEMPLATE))); assertOK(client().performRequest(createDataStream("logs-apache-production"))); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsUsageRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsUsageRollingUpgradeIT.java new file mode 100644 index 0000000000000..ab9855b7398fe --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsUsageRollingUpgradeIT.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; + +import java.io.IOException; +import java.time.Instant; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.upgrades.LogsdbIndexingRollingUpgradeIT.bulkIndex; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; + +public class LogsUsageRollingUpgradeIT extends AbstractRollingUpgradeTestCase { + + public LogsUsageRollingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + public void testUsage() throws Exception { + assumeTrue("logsdb.prior_logs_usage only gets set in 8.x", getOldClusterTestVersion().before("9.0.0")); + String dataStreamName = "logs-mysql-error"; + if (isOldCluster()) { + bulkIndex(dataStreamName, 4, 256, Instant.now()); + ensureGreen(dataStreamName); + assertBusy(() -> { + var getClusterSettingsResponse = getClusterSettings(); + Map persistentSettings = (Map) getClusterSettingsResponse.get("persistent"); + assertThat(persistentSettings, hasEntry("logsdb.prior_logs_usage", "true")); + }, 2, TimeUnit.MINUTES); + } else { + String newIndex = rolloverDataStream(dataStreamName); + bulkIndex(dataStreamName, 4, 256, Instant.now()); + Map indexResponse = (Map) getIndexSettings(newIndex, true).get(newIndex); + Map settings = (Map) indexResponse.get("settings"); + Map defaults = (Map) indexResponse.get("defaults"); + assertThat(settings, not(hasKey("index.mode"))); + assertThat(defaults, hasEntry("index.mode", "standard")); + } + } + + static Map getClusterSettings() throws IOException { + var request = new Request("GET", "/_cluster/settings"); + request.addParameter("flat_settings", "true"); + request.addParameter("include_defaults", "true"); + var response = client().performRequest(request); + assertOK(response); + return entityAsMap(response); + } + + static String rolloverDataStream(String dataStreamName) throws IOException { + var request = new Request("POST", "/" + dataStreamName + "/_rollover"); + var response = client().performRequest(request); + assertOK(response); + var responseBody = entityAsMap(response); + return (String) responseBody.get("new_index"); + } + +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java index 9cb91438e09c0..6b2a889d3c1ac 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java @@ -22,13 +22,18 @@ import java.io.IOException; import java.io.InputStream; import java.time.Instant; +import java.util.List; import java.util.Locale; import java.util.Map; import static org.elasticsearch.upgrades.LogsIndexModeRollingUpgradeIT.enableLogsdbByDefault; import static org.elasticsearch.upgrades.LogsIndexModeRollingUpgradeIT.getWriteBackingIndex; import static org.elasticsearch.upgrades.TsdbIT.formatInstant; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; public class LogsdbIndexingRollingUpgradeIT extends AbstractRollingUpgradeTestCase { @@ -122,7 +127,8 @@ static void createTemplate(String dataStreamName, String id, String template) th assertOK(client().performRequest(putIndexTemplateRequest)); } - static void bulkIndex(String dataStreamName, int numRequest, int numDocs, Instant startTime) throws Exception { + static String bulkIndex(String dataStreamName, int numRequest, int numDocs, Instant startTime) throws Exception { + String firstIndex = null; for (int i = 0; i < numRequest; i++) { var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk"); StringBuilder requestBody = new StringBuilder(); @@ -155,7 +161,11 @@ static void bulkIndex(String dataStreamName, int numRequest, int numDocs, Instan assertOK(response); var responseBody = entityAsMap(response); assertThat("errors in response:\n " + responseBody, responseBody.get("errors"), equalTo(false)); + if (firstIndex == null) { + firstIndex = (String) ((Map) ((Map) ((List) responseBody.get("items")).get(0)).get("create")).get("_index"); + } } + return firstIndex; } void search(String dataStreamName) throws Exception { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NoLogsUsageRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NoLogsUsageRollingUpgradeIT.java new file mode 100644 index 0000000000000..57e5655fda3b6 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NoLogsUsageRollingUpgradeIT.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import java.time.Instant; +import java.util.Map; + +import static org.elasticsearch.upgrades.LogsUsageRollingUpgradeIT.getClusterSettings; +import static org.elasticsearch.upgrades.LogsdbIndexingRollingUpgradeIT.bulkIndex; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; + +public class NoLogsUsageRollingUpgradeIT extends AbstractRollingUpgradeTestCase { + + public NoLogsUsageRollingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + public void testUsage() throws Exception { + String dataStreamName = "logs-mysql-error"; + if (isOldCluster()) { + dataStreamName = dataStreamName.replace("logs-", "log-"); + bulkIndex(dataStreamName, 4, 256, Instant.now()); + ensureGreen(dataStreamName); + } else if (isUpgradedCluster()) { + String newIndex = bulkIndex(dataStreamName, 4, 256, Instant.now()); + ensureGreen(dataStreamName); + Map indexResponse = (Map) getIndexSettings(newIndex, true).get(newIndex); + Map settings = (Map) indexResponse.get("settings"); + assertThat(settings, hasEntry("index.mode", "logsdb")); + var getClusterSettingsResponse = getClusterSettings(); + Map defaults = (Map) getClusterSettingsResponse.get("defaults"); + Map persistentSettings = (Map) getClusterSettingsResponse.get("persistent"); + assertThat(persistentSettings, not(hasKey("logsdb.prior_logs_usage"))); + assertThat(defaults, hasEntry("cluster.logsdb.enabled", "true")); + } + } + +} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeDisabledRestTestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeDisabledRestTestIT.java index 40aab696dc9c4..4ae1e9961a109 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeDisabledRestTestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeDisabledRestTestIT.java @@ -63,14 +63,14 @@ public void setup() throws Exception { private RestClient client; - public void testLogsSettingsIndexModeDisabled() throws IOException { + public void testLogsSettingsIndexModeEnabledByDefault() throws IOException { assertOK(createDataStream(client, "logs-custom-dev")); final String indexMode = (String) getSetting( client, getDataStreamBackingIndex(client, "logs-custom-dev", 0), IndexSettings.MODE.getKey() ); - assertThat(indexMode, Matchers.not(equalTo(IndexMode.LOGSDB.getName()))); + assertThat(indexMode, equalTo(IndexMode.LOGSDB.getName())); } public void testTogglingLogsdb() throws IOException { @@ -81,29 +81,21 @@ public void testTogglingLogsdb() throws IOException { getDataStreamBackingIndex(client, "logs-custom-dev", 0), IndexSettings.MODE.getKey() ); - assertThat(indexModeBefore, Matchers.not(equalTo(IndexMode.LOGSDB.getName()))); - assertOK(putClusterSetting(client, "cluster.logsdb.enabled", "true")); + assertThat(indexModeBefore, equalTo(IndexMode.LOGSDB.getName())); + assertOK(putClusterSetting(client, "cluster.logsdb.enabled", "false")); final String indexModeAfter = (String) getSetting( client, getDataStreamBackingIndex(client, "logs-custom-dev", 0), IndexSettings.MODE.getKey() ); - assertThat(indexModeAfter, Matchers.not(equalTo(IndexMode.LOGSDB.getName()))); + assertThat(indexModeAfter, equalTo(IndexMode.LOGSDB.getName())); assertOK(rolloverDataStream(client, "logs-custom-dev")); final String indexModeLater = (String) getSetting( client, getDataStreamBackingIndex(client, "logs-custom-dev", 1), IndexSettings.MODE.getKey() ); - assertThat(indexModeLater, equalTo(IndexMode.LOGSDB.getName())); - assertOK(putClusterSetting(client, "cluster.logsdb.enabled", "false")); - assertOK(rolloverDataStream(client, "logs-custom-dev")); - final String indexModeFinal = (String) getSetting( - client, - getDataStreamBackingIndex(client, "logs-custom-dev", 2), - IndexSettings.MODE.getKey() - ); - assertThat(indexModeFinal, Matchers.not(equalTo(IndexMode.LOGSDB.getName()))); + assertThat(indexModeLater, Matchers.not(equalTo(IndexMode.LOGSDB.getName()))); } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index 4720ec87cb85c..455e707cc0d2c 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -11,10 +11,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; @@ -42,17 +40,21 @@ import java.util.function.Predicate; import java.util.function.Supplier; -import static org.elasticsearch.xpack.logsdb.LogsPatternUsageService.LOGSDB_PRIOR_LOGS_USAGE; -import static org.elasticsearch.xpack.logsdb.LogsPatternUsageService.USAGE_CHECK_MAX_PERIOD; import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseService.FALLBACK_SETTING; public class LogsDBPlugin extends Plugin implements ActionPlugin { private final Settings settings; private final SyntheticSourceLicenseService licenseService; + private static final Setting LOGSDB_PRIOR_LOGS_USAGE = Setting.boolSetting( + "logsdb.prior_logs_usage", + false, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); public static final Setting CLUSTER_LOGSDB_ENABLED = Setting.boolSetting( "cluster.logsdb.enabled", - false, + settings -> Boolean.toString(LOGSDB_PRIOR_LOGS_USAGE.get(settings) == false), Setting.Property.Dynamic, Setting.Property.NodeScope ); @@ -81,18 +83,6 @@ public Collection createComponents(PluginServices services) { logsdbIndexModeSettingsProvider::updateClusterIndexModeLogsdbEnabled ); - var clusterService = services.clusterService(); - Supplier metadataSupplier = () -> clusterService.state().metadata(); - var historicLogsUsageService = new LogsPatternUsageService(services.client(), settings, services.threadPool(), metadataSupplier); - clusterService.addLocalNodeMasterListener(historicLogsUsageService); - clusterService.addLifecycleListener(new LifecycleListener() { - - @Override - public void beforeStop() { - historicLogsUsageService.offMaster(); - } - }); - // Nothing to share here: return super.createComponents(services); } @@ -112,7 +102,7 @@ public Collection getAdditionalIndexSettingProviders(Index @Override public List> getSettings() { - return List.of(FALLBACK_SETTING, CLUSTER_LOGSDB_ENABLED, USAGE_CHECK_MAX_PERIOD, LOGSDB_PRIOR_LOGS_USAGE); + return List.of(FALLBACK_SETTING, CLUSTER_LOGSDB_ENABLED, LOGSDB_PRIOR_LOGS_USAGE); } @Override diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageService.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageService.java deleted file mode 100644 index 929db16a618a0..0000000000000 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageService.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.logsdb; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.LocalNodeMasterListener; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; -import org.elasticsearch.threadpool.Scheduler; -import org.elasticsearch.threadpool.ThreadPool; - -import java.util.concurrent.TimeUnit; -import java.util.function.Supplier; - -import static org.elasticsearch.xpack.logsdb.LogsdbIndexModeSettingsProvider.LOGS_PATTERN; - -/** - * A component that checks in the background whether there are data streams that match log-*-* pattern and if so records this - * as persistent setting in cluster state. If logs-*-* data stream usage has been found then this component will no longer - * run in the background. - *

- * After {@link #onMaster()} is invoked, the first check is scheduled to run after 1 minute. If no logs-*-* data streams are - * found, then the next check runs after 2 minutes. The schedule time will double if no data streams with logs-*-* pattern - * are found up until the maximum configured period in the {@link #USAGE_CHECK_MAX_PERIOD} setting (defaults to 24 hours). - *

- * If during a check one or more logs-*-* data streams are found, then the {@link #LOGSDB_PRIOR_LOGS_USAGE} setting gets set - * as persistent cluster setting and this component will not schedule new checks. The mentioned setting is visible in persistent settings - * of cluster state and a signal that upon upgrading to 9.x logsdb will not be enabled by default for data streams matching the - * logs-*-* pattern. It isn't recommended to manually set the {@link #LOGSDB_PRIOR_LOGS_USAGE} setting. - */ -final class LogsPatternUsageService implements LocalNodeMasterListener { - - private static final Logger LOGGER = LogManager.getLogger(LogsPatternUsageService.class); - private static final TimeValue USAGE_CHECK_MINIMUM = TimeValue.timeValueSeconds(30); - static final Setting USAGE_CHECK_MAX_PERIOD = Setting.timeSetting( - "logsdb.usage_check.max_period", - new TimeValue(24, TimeUnit.HOURS), - Setting.Property.NodeScope - ); - static final Setting LOGSDB_PRIOR_LOGS_USAGE = Setting.boolSetting( - "logsdb.prior_logs_usage", - false, - Setting.Property.Dynamic, - Setting.Property.NodeScope - ); - - private final Client client; - private final Settings nodeSettings; - private final ThreadPool threadPool; - private final Supplier metadataSupplier; - - // Initializing to 30s, so first time will run with a delay of 60s: - volatile TimeValue nextWaitTime = USAGE_CHECK_MINIMUM; - volatile boolean isMaster; - volatile boolean hasPriorLogsUsage; - volatile Scheduler.Cancellable cancellable; - - LogsPatternUsageService(Client client, Settings nodeSettings, ThreadPool threadPool, Supplier metadataSupplier) { - this.client = client; - this.nodeSettings = nodeSettings; - this.threadPool = threadPool; - this.metadataSupplier = metadataSupplier; - } - - @Override - public void onMaster() { - if (cancellable == null || cancellable.isCancelled()) { - isMaster = true; - nextWaitTime = USAGE_CHECK_MINIMUM; - scheduleNext(); - } - } - - @Override - public void offMaster() { - isMaster = false; - if (cancellable != null && cancellable.isCancelled() == false) { - cancellable.cancel(); - cancellable = null; - } - } - - void scheduleNext() { - TimeValue maxWaitTime = USAGE_CHECK_MAX_PERIOD.get(nodeSettings); - nextWaitTime = TimeValue.timeValueMillis(Math.min(nextWaitTime.millis() * 2, maxWaitTime.millis())); - scheduleNext(nextWaitTime); - } - - void scheduleNext(TimeValue waitTime) { - if (isMaster && hasPriorLogsUsage == false) { - try { - cancellable = threadPool.schedule(this::check, waitTime, threadPool.generic()); - } catch (EsRejectedExecutionException e) { - if (e.isExecutorShutdown()) { - LOGGER.debug("Failed to check; Shutting down", e); - } else { - throw e; - } - } - } else { - LOGGER.debug("Skipping check, because [{}]/[{}]", isMaster, hasPriorLogsUsage); - } - } - - void check() { - LOGGER.debug("Starting logs-*-* usage check"); - if (isMaster) { - var metadata = metadataSupplier.get(); - if (LOGSDB_PRIOR_LOGS_USAGE.exists(metadata.persistentSettings())) { - LOGGER.debug("Using persistent logs-*-* usage check"); - hasPriorLogsUsage = true; - return; - } - - if (hasLogsUsage(metadata)) { - updateSetting(); - } else { - LOGGER.debug("No usage found; Skipping check"); - scheduleNext(); - } - } else { - LOGGER.debug("No longer master; Skipping check"); - } - } - - static boolean hasLogsUsage(Metadata metadata) { - for (var dataStream : metadata.dataStreams().values()) { - if (Regex.simpleMatch(LOGS_PATTERN, dataStream.getName())) { - return true; - } - } - return false; - } - - void updateSetting() { - var settingsToUpdate = Settings.builder().put(LOGSDB_PRIOR_LOGS_USAGE.getKey(), true).build(); - var request = new ClusterUpdateSettingsRequest(TimeValue.ONE_MINUTE, TimeValue.ONE_MINUTE); - request.persistentSettings(settingsToUpdate); - client.execute(ClusterUpdateSettingsAction.INSTANCE, request, ActionListener.wrap(resp -> { - if (resp.isAcknowledged() && LOGSDB_PRIOR_LOGS_USAGE.exists(resp.getPersistentSettings())) { - hasPriorLogsUsage = true; - cancellable = null; - } else { - LOGGER.debug(() -> "unexpected response [" + LOGSDB_PRIOR_LOGS_USAGE.getKey() + "]"); - scheduleNext(TimeValue.ONE_MINUTE); - } - }, e -> { - LOGGER.debug(() -> "Failed to update [" + LOGSDB_PRIOR_LOGS_USAGE.getKey() + "]", e); - scheduleNext(TimeValue.ONE_MINUTE); - })); - } -} diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceIntegrationTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceIntegrationTests.java deleted file mode 100644 index fcd1d311df802..0000000000000 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceIntegrationTests.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.logsdb; - -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; -import org.elasticsearch.action.datastreams.DeleteDataStreamAction; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; -import org.elasticsearch.cluster.metadata.Template; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.datastreams.DataStreamsPlugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.threadpool.ThreadPoolStats; - -import java.util.Collection; -import java.util.List; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.nullValue; - -public class LogsPatternUsageServiceIntegrationTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return List.of(LogsDBPlugin.class, DataStreamsPlugin.class); - } - - @Override - protected Settings nodeSettings() { - return Settings.builder().put("logsdb.usage_check.max_period", "1s").build(); - } - - @Override - protected boolean resetNodeAfterTest() { - return true; - } - - public void testLogsPatternUsage() throws Exception { - var template = ComposableIndexTemplate.builder() - .indexPatterns(List.of("logs-*-*")) - .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) - .build(); - assertAcked( - client().execute( - TransportPutComposableIndexTemplateAction.TYPE, - new TransportPutComposableIndexTemplateAction.Request("1").indexTemplate(template) - ).actionGet() - ); - - IndexRequest indexRequest = new IndexRequest("my-index").create(true).source("field", "value"); - var indexResponse = client().index(indexRequest).actionGet(); - assertThat(indexResponse.getResult(), equalTo(DocWriteResponse.Result.CREATED)); - - { - var response = client().execute(ClusterGetSettingsAction.INSTANCE, new ClusterGetSettingsAction.Request(TimeValue.ONE_MINUTE)) - .actionGet(); - assertThat(response.persistentSettings().get("logsdb.prior_logs_usage"), nullValue()); - } - - indexRequest = new IndexRequest("logs-myapp-prod").create(true).source("@timestamp", "2000-01-01T00:00"); - indexResponse = client().index(indexRequest).actionGet(); - assertThat(indexResponse.getResult(), equalTo(DocWriteResponse.Result.CREATED)); - - assertBusy(() -> { - var response = client().execute(ClusterGetSettingsAction.INSTANCE, new ClusterGetSettingsAction.Request(TimeValue.ONE_MINUTE)) - .actionGet(); - assertThat(response.persistentSettings().get("logsdb.prior_logs_usage"), equalTo("true")); - }); - } - - public void testLogsPatternUsageNoLogsStarDashStarUsage() throws Exception { - var template = ComposableIndexTemplate.builder() - .indexPatterns(List.of("log-*-*")) - .template(new Template(Settings.builder().put("index.number_of_replicas", 0).build(), null, null)) - .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) - .build(); - assertAcked( - client().execute( - TransportPutComposableIndexTemplateAction.TYPE, - new TransportPutComposableIndexTemplateAction.Request("1").indexTemplate(template) - ).actionGet() - ); - - var indexRequest = new IndexRequest("log-myapp-prod").create(true).source("@timestamp", "2000-01-01T00:00"); - var indexResponse = client().index(indexRequest).actionGet(); - assertThat(indexResponse.getResult(), equalTo(DocWriteResponse.Result.CREATED)); - - ensureGreen("log-myapp-prod"); - // Check that LogsPatternUsageService checked three times by checking generic threadpool stats. - // (the LogsPatternUsageService's check is scheduled via the generic threadpool) - var threadPool = getInstanceFromNode(ThreadPool.class); - var beforeStat = getGenericThreadpoolStat(threadPool); - assertBusy(() -> { - var stat = getGenericThreadpoolStat(threadPool); - assertThat(stat.completed(), greaterThanOrEqualTo(beforeStat.completed() + 3)); - }); - var response = client().execute(ClusterGetSettingsAction.INSTANCE, new ClusterGetSettingsAction.Request(TimeValue.ONE_MINUTE)) - .actionGet(); - assertThat(response.persistentSettings().get("logsdb.prior_logs_usage"), nullValue()); - } - - private static ThreadPoolStats.Stats getGenericThreadpoolStat(ThreadPool threadPool) { - var result = threadPool.stats().stats().stream().filter(stats -> stats.name().equals(ThreadPool.Names.GENERIC)).toList(); - assertThat(result.size(), equalTo(1)); - return result.get(0); - } - - @Override - public void tearDown() throws Exception { - // Need to clean up the data stream and logsdb.prior_logs_usage setting because ESSingleNodeTestCase tests aren't allowed to leave - // persistent cluster settings around. - - var deleteDataStreamsRequest = new DeleteDataStreamAction.Request(TEST_REQUEST_TIMEOUT, "*"); - deleteDataStreamsRequest.indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN); - assertAcked(client().execute(DeleteDataStreamAction.INSTANCE, deleteDataStreamsRequest)); - - var settings = Settings.builder().put("logsdb.prior_logs_usage", (String) null).build(); - client().admin() - .cluster() - .updateSettings(new ClusterUpdateSettingsRequest(TimeValue.ONE_MINUTE, TimeValue.ONE_MINUTE).persistentSettings(settings)) - .actionGet(); - - super.tearDown(); - } -} diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceTests.java deleted file mode 100644 index 2cd2f9216aba3..0000000000000 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsPatternUsageServiceTests.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.logsdb; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.DataStreamTestHelper; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.Scheduler; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; - -import java.util.List; -import java.util.function.Supplier; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.same; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.when; - -public class LogsPatternUsageServiceTests extends ESTestCase { - - public void testOnMaster() throws Exception { - var nodeSettings = Settings.builder().put("logsdb.usage_check.max_period", "1s").build(); - var client = mock(Client.class); - doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock - .getArguments()[2]; - var persistentSettings = Settings.builder().put("logsdb.prior_logs_usage", true).build(); - listener.onResponse(new ClusterUpdateSettingsResponse(true, Settings.EMPTY, persistentSettings)); - return null; - }).when(client).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); - - try (var threadPool = new TestThreadPool(getTestName())) { - var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1-prod", 1)), List.of()); - Supplier metadataSupplier = clusterState::metadata; - - var service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); - // pre-check: - assertFalse(service.isMaster); - assertFalse(service.hasPriorLogsUsage); - assertNull(service.cancellable); - // Trigger service: - service.onMaster(); - assertBusy(() -> { - assertTrue(service.isMaster); - assertTrue(service.hasPriorLogsUsage); - assertNull(service.cancellable); - }); - } - } - - public void testCheckHasUsage() { - var nodeSettings = Settings.EMPTY; - var client = mock(Client.class); - doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock - .getArguments()[2]; - var persistentSettings = Settings.builder().put("logsdb.prior_logs_usage", true).build(); - listener.onResponse(new ClusterUpdateSettingsResponse(true, Settings.EMPTY, persistentSettings)); - return null; - }).when(client).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); - - var threadPool = mock(ThreadPool.class); - var scheduledCancellable = mock(Scheduler.ScheduledCancellable.class); - when(threadPool.schedule(any(), any(), any())).thenReturn(scheduledCancellable); - var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1-prod", 1)), List.of()); - Supplier metadataSupplier = clusterState::metadata; - - LogsPatternUsageService service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); - service.onMaster(); - assertFalse(service.hasPriorLogsUsage); - assertNotNull(service.cancellable); - assertEquals(service.nextWaitTime, TimeValue.timeValueMinutes(1)); - service.check(); - assertTrue(service.hasPriorLogsUsage); - assertNull(service.cancellable); - assertEquals(service.nextWaitTime, TimeValue.timeValueMinutes(1)); - - verify(threadPool, times(1)).schedule(any(), any(), any()); - verify(client, times(1)).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); - } - - public void testCheckHasUsageNoMatch() { - var nodeSettings = Settings.EMPTY; - var client = mock(Client.class); - - var threadPool = mock(ThreadPool.class); - var scheduledCancellable = mock(Scheduler.ScheduledCancellable.class); - when(threadPool.schedule(any(), any(), any())).thenReturn(scheduledCancellable); - var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("log-app1-prod", 1)), List.of()); - Supplier metadataSupplier = clusterState::metadata; - - LogsPatternUsageService service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); - service.onMaster(); - assertFalse(service.hasPriorLogsUsage); - assertNotNull(service.cancellable); - assertEquals(service.nextWaitTime, TimeValue.timeValueMinutes(1)); - service.check(); - assertFalse(service.hasPriorLogsUsage); - assertNotNull(service.cancellable); - assertEquals(service.nextWaitTime, TimeValue.timeValueMinutes(2)); - - verify(threadPool, times(2)).schedule(any(), any(), any()); - verifyNoInteractions(client); - } - - public void testCheckPriorLogsUsageAlreadySet() { - var nodeSettings = Settings.EMPTY; - var client = mock(Client.class); - - var threadPool = mock(ThreadPool.class); - var scheduledCancellable = mock(Scheduler.ScheduledCancellable.class); - when(threadPool.schedule(any(), any(), any())).thenReturn(scheduledCancellable); - var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("log-app1-prod", 1)), List.of()); - clusterState = ClusterState.builder(clusterState) - .metadata( - Metadata.builder(clusterState.getMetadata()) - .persistentSettings(Settings.builder().put("logsdb.prior_logs_usage", true).build()) - .build() - ) - .build(); - Supplier metadataSupplier = clusterState::metadata; - - LogsPatternUsageService service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); - service.isMaster = true; - assertFalse(service.hasPriorLogsUsage); - assertNull(service.cancellable); - service.check(); - assertTrue(service.hasPriorLogsUsage); - assertNull(service.cancellable); - - verifyNoInteractions(client, threadPool); - } - - public void testCheckHasUsageUnexpectedResponse() { - var nodeSettings = Settings.EMPTY; - var client = mock(Client.class); - doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock - .getArguments()[2]; - ClusterUpdateSettingsResponse response; - if (randomBoolean()) { - var persistentSettings = Settings.builder().put("logsdb.prior_logs_usage", true).build(); - response = new ClusterUpdateSettingsResponse(false, Settings.EMPTY, persistentSettings); - } else { - response = new ClusterUpdateSettingsResponse(true, Settings.EMPTY, Settings.EMPTY); - } - listener.onResponse(response); - return null; - }).when(client).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); - - var threadPool = mock(ThreadPool.class); - var scheduledCancellable = mock(Scheduler.ScheduledCancellable.class); - when(threadPool.schedule(any(), any(), any())).thenReturn(scheduledCancellable); - var clusterState = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1-prod", 1)), List.of()); - Supplier metadataSupplier = clusterState::metadata; - - LogsPatternUsageService service = new LogsPatternUsageService(client, nodeSettings, threadPool, metadataSupplier); - service.isMaster = true; - assertFalse(service.hasPriorLogsUsage); - assertNull(service.cancellable); - service.check(); - assertFalse(service.hasPriorLogsUsage); - assertNotNull(service.cancellable); - - verify(threadPool, times(1)).schedule(any(), any(), any()); - verify(client, times(1)).execute(same(ClusterUpdateSettingsAction.INSTANCE), any(), any()); - } - - public void testHasLogsUsage() { - var metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(), List.of()).getMetadata(); - assertFalse(LogsPatternUsageService.hasLogsUsage(metadata)); - metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("log-app1", 1)), List.of()).getMetadata(); - assertFalse(LogsPatternUsageService.hasLogsUsage(metadata)); - metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1", 1)), List.of()).getMetadata(); - assertFalse(LogsPatternUsageService.hasLogsUsage(metadata)); - metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("log-app1-prod", 1)), List.of()).getMetadata(); - assertFalse(LogsPatternUsageService.hasLogsUsage(metadata)); - metadata = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>("logs-app1-prod", 1)), List.of()).getMetadata(); - assertTrue(LogsPatternUsageService.hasLogsUsage(metadata)); - metadata = DataStreamTestHelper.getClusterStateWithDataStreams( - List.of(new Tuple<>("log-app1-prod", 1), new Tuple<>("logs-app2-prod", 1)), - List.of() - ).getMetadata(); - assertTrue(LogsPatternUsageService.hasLogsUsage(metadata)); - metadata = DataStreamTestHelper.getClusterStateWithDataStreams( - List.of(new Tuple<>("log-app1", 1), new Tuple<>("logs-app2-prod", 1)), - List.of() - ).getMetadata(); - assertTrue(LogsPatternUsageService.hasLogsUsage(metadata)); - } - -} From 59b3f79f885ce4dfd2f427988eeaa5e4ba1d5a71 Mon Sep 17 00:00:00 2001 From: Iraklis Psaroudakis Date: Wed, 29 Jan 2025 16:22:06 +0200 Subject: [PATCH 205/383] Move hollow hook from permits to bulk action (#120945) We do not need the hook for unhollowing shards on all actions taking primary permits, but rather only one the one that can ingest into the engine. So moving the hook to the bulk action. Relates ES-10654 --- .../action/bulk/TransportShardBulkAction.java | 15 +++++++++++ .../index/CompositeIndexEventListener.java | 12 --------- .../index/shard/IndexEventListener.java | 12 --------- .../elasticsearch/index/shard/IndexShard.java | 19 ++++---------- .../shard/IndexShardOperationPermits.java | 2 +- .../shard/IndexingOperationListener.java | 25 +++++++++++++++++++ 6 files changed, 46 insertions(+), 39 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 33c73898c0394..8609dd80d15d7 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.replication.PostWriteRefresh; import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.action.support.replication.TransportWriteAction; @@ -145,6 +146,20 @@ protected BulkShardResponse newResponseInstance(StreamInput in) throws IOExcepti return new BulkShardResponse(in); } + @Override + protected void shardOperationOnPrimary( + BulkShardRequest request, + IndexShard primary, + ActionListener> listener + ) { + final ActionListener wrappedListener = listener.delegateFailure( + (l, ignored) -> super.shardOperationOnPrimary(request, primary, l) + ); + try (var preBulkProceedListeners = new RefCountingListener(wrappedListener)) { + primary.getIndexingOperationListener().preBulkOnPrimary(primary, () -> preBulkProceedListeners.acquire()); + } + } + @Override protected void dispatchedShardOperationOnPrimary( BulkShardRequest request, diff --git a/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java b/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java index f3d58fe4b051f..56d12b6d563c1 100644 --- a/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java +++ b/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java @@ -28,7 +28,6 @@ import java.util.Iterator; import java.util.List; import java.util.function.Consumer; -import java.util.function.Supplier; import static org.elasticsearch.core.Strings.format; @@ -351,15 +350,4 @@ public void afterFilesRestoredFromRepository(IndexShard indexShard) { } } - @Override - public void onAcquirePrimaryOperationPermit(IndexShard indexShard, Supplier> onPermitAcquiredListenerSupplier) { - for (IndexEventListener listener : listeners) { - try { - listener.onAcquirePrimaryOperationPermit(indexShard, onPermitAcquiredListenerSupplier); - } catch (Exception e) { - logger.warn(() -> "[" + indexShard.shardId() + "] failed to invoke the listener on acquiring a primary permit", e); - throw e; - } - } - } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java b/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java index e5104948cc426..4e55a2e9599d5 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java @@ -17,8 +17,6 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason; -import java.util.function.Supplier; - /** * An index event listener is the primary extension point for plugins and build-in services * to react / listen to per-index and per-shard events. These listeners are registered per-index @@ -192,14 +190,4 @@ default void afterIndexShardRecovery(IndexShard indexShard, ActionListener * @param indexShard the shard that is recovering */ default void afterFilesRestoredFromRepository(IndexShard indexShard) {} - - /** - * Called when a single primary permit is acquired for the given shard (see - * {@link IndexShard#acquirePrimaryOperationPermit(ActionListener, java.util.concurrent.Executor)}). - * - * @param indexShard the shard of which a primary permit is requested - * @param onPermitAcquiredListenerSupplier call this immediately to get a listener when the permit is acquired. The listener must be - * completed in order for the permit to be given to the acquiring operation. - */ - default void onAcquirePrimaryOperationPermit(IndexShard indexShard, Supplier> onPermitAcquiredListenerSupplier) {} } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index bfa286858f8ba..f199e8f202959 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -34,7 +34,6 @@ import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.replication.PendingReplicationActions; import org.elasticsearch.action.support.replication.ReplicationResponse; @@ -452,6 +451,10 @@ public BulkOperationListener getBulkOperationListener() { return this.bulkOperationListener; } + public IndexingOperationListener getIndexingOperationListener() { + return this.indexingOperationListeners; + } + public ShardIndexWarmerService warmerService() { return this.shardWarmerService; } @@ -3585,19 +3588,7 @@ public void acquirePrimaryOperationPermit( ) { verifyNotClosed(); assert shardRouting.primary() : "acquirePrimaryOperationPermit should only be called on primary shard: " + shardRouting; - - ActionListener onPermitAcquiredWrapped = onPermitAcquired.delegateFailureAndWrap((delegate, releasable) -> { - final ActionListener wrappedListener = indexShardOperationPermits.wrapContextPreservingActionListener( - delegate, - executorOnDelay, - forceExecution - ); - try (var listeners = new RefCountingListener(wrappedListener.map(unused -> releasable))) { - indexEventListener.onAcquirePrimaryOperationPermit(this, () -> listeners.acquire()); - } - }); - - indexShardOperationPermits.acquire(wrapPrimaryOperationPermitListener(onPermitAcquiredWrapped), executorOnDelay, forceExecution); + indexShardOperationPermits.acquire(wrapPrimaryOperationPermitListener(onPermitAcquired), executorOnDelay, forceExecution); } public boolean isPrimaryMode() { diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java index 79f5d054df30d..0427e9c99ea35 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java @@ -230,7 +230,7 @@ private void innerAcquire( onAcquired.onResponse(releasable); } - public ActionListener wrapContextPreservingActionListener( + private ActionListener wrapContextPreservingActionListener( ActionListener listener, @Nullable final Executor executorOnDelay, final boolean forceExecution diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexingOperationListener.java b/server/src/main/java/org/elasticsearch/index/shard/IndexingOperationListener.java index 0e796d2fb38ce..8cb155aa2e99b 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexingOperationListener.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexingOperationListener.java @@ -9,9 +9,11 @@ package org.elasticsearch.index.shard; import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.index.engine.Engine; import java.util.List; +import java.util.function.Supplier; /** * An indexing listener for indexing, delete, events. @@ -62,6 +64,18 @@ default void postDelete(ShardId shardId, Engine.Delete delete, Engine.DeleteResu */ default void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) {} + /** + * Called when a {@link org.elasticsearch.action.bulk.TransportShardBulkAction} is about to perform index and/or delete operation(s) + * on a primary shard. + * + * This is called from a transport thread and therefore the function should be lightweight and not block the thread. The acquired + * listener(s) can be asynchronously completed on another thread at a later time. + * + * @param indexShard the shard the bulk is about to be performed on + * @param proceedListenerSupplier call this immediately to get a listener which must be completed so that the bulk can proceed. + */ + default void preBulkOnPrimary(IndexShard indexShard, Supplier> proceedListenerSupplier) {} + /** * A Composite listener that multiplexes calls to each of the listeners methods. */ @@ -149,5 +163,16 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) { } } } + + @Override + public void preBulkOnPrimary(IndexShard indexShard, Supplier> proceedListenerSupplier) { + for (IndexingOperationListener listener : listeners) { + try { + listener.preBulkOnPrimary(indexShard, proceedListenerSupplier); + } catch (Exception e) { + logger.warn(() -> "preBulkOnPrimary listener [" + listener + "] failed", e); + } + } + } } } From d108c7aa99467368c84b213105ba94ab1ef1f032 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 01:32:57 +1100 Subject: [PATCH 206/383] Mute org.elasticsearch.xpack.security.profile.ProfileIntegTests testSetEnabled #121183 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b3fc32646df00..2e120746e2673 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -320,6 +320,9 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testGetUsersWithProfileUidWhenProfileIndexDoesNotExists issue: https://github.com/elastic/elasticsearch/issues/121179 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testSetEnabled + issue: https://github.com/elastic/elasticsearch/issues/121183 # Examples: # From 9009606a47f3ed7fca8ac2fbd6a402e12a6c1fae Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Wed, 29 Jan 2025 09:33:16 -0500 Subject: [PATCH 207/383] [Transform] add support for extended_stats (#120340) Building off of `stats` and multi-value aggregations, including the limitation: - all values of extended_stats will be mapped to `double` if mapping deduction is used Relates #51925 --- docs/changelog/120340.yaml | 5 ++ docs/reference/rest-api/common-parms.asciidoc | 1 + .../ExtendedStatsAggregationBuilder.java | 6 ++ .../metrics/InternalExtendedStats.java | 63 ++++++++++++++ .../metrics/ExtendedStatsAggregatorTests.java | 11 ++- .../metrics/InternalExtendedStatsTests.java | 84 +++++++++++++++++++ .../integration/TransformPivotRestIT.java | 78 +++++++++++++++++ .../pivot/AggregationResultUtils.java | 12 +++ .../pivot/TransformAggregations.java | 4 +- .../pivot/TransformAggregationsTests.java | 39 +++++++++ 10 files changed, 300 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/120340.yaml diff --git a/docs/changelog/120340.yaml b/docs/changelog/120340.yaml new file mode 100644 index 0000000000000..3c2200170c0c4 --- /dev/null +++ b/docs/changelog/120340.yaml @@ -0,0 +1,5 @@ +pr: 120340 +summary: Add support for `extended_stats` +area: Transform +type: enhancement +issues: [] diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 37c5528812900..74956fb1f205a 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -808,6 +808,7 @@ currently supported: * <> * <> * <> +* <> * <> * <> * <> diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java index 21bfffa883f50..af47141730e60 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java @@ -28,6 +28,7 @@ import java.io.IOException; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.Set; public class ExtendedStatsAggregationBuilder extends ValuesSourceAggregationBuilder.MetricsAggregationBuilder< @@ -87,6 +88,11 @@ public Set metricNames() { return InternalExtendedStats.METRIC_NAMES; } + @Override + public Optional> getOutputFieldNames() { + return Optional.of(InternalExtendedStats.Fields.OUTPUT_FORMAT); + } + @Override protected ValuesSourceType defaultValueSourceType() { return CoreValuesSourceType.NUMERIC; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java index f74206c7af8b7..7965211e24683 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.search.aggregations.metrics; +import org.elasticsearch.common.TriConsumer; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -19,6 +20,7 @@ import java.io.IOException; import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -337,6 +339,67 @@ static class Fields { public static final String LOWER_POPULATION = "lower_population"; public static final String UPPER_SAMPLING = "upper_sampling"; public static final String LOWER_SAMPLING = "lower_sampling"; + + static final Set OUTPUT_FORMAT = Set.of( + Metrics.count.name(), + Metrics.sum.name(), + Metrics.min.name(), + Metrics.max.name(), + Metrics.avg.name(), + SUM_OF_SQRS, + VARIANCE, + VARIANCE_POPULATION, + VARIANCE_SAMPLING, + STD_DEVIATION, + STD_DEVIATION_POPULATION, + STD_DEVIATION_SAMPLING, + STD_DEVIATION_BOUNDS + "." + UPPER, + STD_DEVIATION_BOUNDS + "." + LOWER, + STD_DEVIATION_BOUNDS + "." + UPPER_POPULATION, + STD_DEVIATION_BOUNDS + "." + LOWER_POPULATION, + STD_DEVIATION_BOUNDS + "." + UPPER_SAMPLING, + STD_DEVIATION_BOUNDS + "." + LOWER_SAMPLING + ); + } + + public Map asIndexableMap() { + if (count != 0) { + // NumberFieldMapper will invalidate non-finite doubles + TriConsumer, String, Double> putIfValidDouble = (map, key, value) -> { + if (Double.isFinite(value)) { + map.put(key, value); + } + }; + var extendedStatsMap = new HashMap(13); + extendedStatsMap.put(Metrics.count.name(), getCount()); + putIfValidDouble.apply(extendedStatsMap, Metrics.sum.name(), getSum()); + putIfValidDouble.apply(extendedStatsMap, Metrics.min.name(), getMin()); + putIfValidDouble.apply(extendedStatsMap, Metrics.max.name(), getMax()); + putIfValidDouble.apply(extendedStatsMap, Metrics.avg.name(), getAvg()); + + putIfValidDouble.apply(extendedStatsMap, Fields.SUM_OF_SQRS, sumOfSqrs); + putIfValidDouble.apply(extendedStatsMap, Fields.VARIANCE, getVariance()); + putIfValidDouble.apply(extendedStatsMap, Fields.VARIANCE_POPULATION, getVariancePopulation()); + putIfValidDouble.apply(extendedStatsMap, Fields.VARIANCE_SAMPLING, getVarianceSampling()); + putIfValidDouble.apply(extendedStatsMap, Fields.STD_DEVIATION, getStdDeviation()); + putIfValidDouble.apply(extendedStatsMap, Fields.STD_DEVIATION_POPULATION, getStdDeviationPopulation()); + putIfValidDouble.apply(extendedStatsMap, Fields.STD_DEVIATION_SAMPLING, getStdDeviationSampling()); + + var stdDevBounds = new HashMap(6); + putIfValidDouble.apply(stdDevBounds, Fields.UPPER, getStdDeviationBound(Bounds.UPPER)); + putIfValidDouble.apply(stdDevBounds, Fields.LOWER, getStdDeviationBound(Bounds.LOWER)); + putIfValidDouble.apply(stdDevBounds, Fields.UPPER_POPULATION, getStdDeviationBound(Bounds.UPPER_POPULATION)); + putIfValidDouble.apply(stdDevBounds, Fields.LOWER_POPULATION, getStdDeviationBound(Bounds.LOWER_POPULATION)); + putIfValidDouble.apply(stdDevBounds, Fields.UPPER_SAMPLING, getStdDeviationBound(Bounds.UPPER_SAMPLING)); + putIfValidDouble.apply(stdDevBounds, Fields.LOWER_SAMPLING, getStdDeviationBound(Bounds.LOWER_SAMPLING)); + if (stdDevBounds.isEmpty() == false) { + extendedStatsMap.put(Fields.STD_DEVIATION_BOUNDS, stdDevBounds); + } + + return extendedStatsMap; + } else { + return Map.of(); + } } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java index ae4ed3568683a..4151beda6ba0c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java @@ -19,7 +19,9 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; @@ -27,7 +29,7 @@ import java.util.function.Consumer; import static java.util.Collections.singleton; -import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; +import static org.hamcrest.Matchers.is; public class ExtendedStatsAggregatorTests extends AggregatorTestCase { private static final double TOLERANCE = 1e-5; @@ -304,6 +306,13 @@ public void testCase( testCase(buildIndex, verify, new AggTestConfig(aggBuilder, ft)); } + @Override + protected void verifyOutputFieldNames(T aggregationBuilder, V agg) + throws IOException { + assertTrue(aggregationBuilder.getOutputFieldNames().isPresent()); + assertThat(aggregationBuilder.getOutputFieldNames().get(), is(InternalExtendedStats.Fields.OUTPUT_FORMAT)); + } + static class ExtendedSimpleStatsAggregator extends StatsAggregatorTests.SimpleStatsAggregator { double sumOfSqrs = 0; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java index bbb007c9155ba..b919428c00ef9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java @@ -9,16 +9,29 @@ package org.elasticsearch.search.aggregations.metrics; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentFactory; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Predicate; + +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isA; +import static org.hamcrest.Matchers.notNullValue; public class InternalExtendedStatsTests extends InternalAggregationTestCase { @@ -209,4 +222,75 @@ private void verifySumOfSqrsOfDoubles(double[] values, double expectedSumOfSqrs, InternalExtendedStats reduced = (InternalExtendedStats) InternalAggregationTestCase.reduce(aggregations, null); assertEquals(expectedSumOfSqrs, reduced.getSumOfSquares(), delta); } + + @SuppressWarnings(value = "unchecked") + public void testAsMapMatchesXContent() throws IOException { + var stats = new InternalExtendedStats( + "testAsMapIsSameAsXContent", + randomLongBetween(1, 50), + randomDoubleBetween(1, 50, true), + randomDoubleBetween(1, 50, true), + randomDoubleBetween(1, 50, true), + randomDoubleBetween(1, 50, true), + sigma, + DocValueFormat.RAW, + Map.of() + ); + + var outputMap = stats.asIndexableMap(); + assertThat(outputMap, notNullValue()); + + Map xContentMap; + try (var builder = XContentFactory.jsonBuilder()) { + builder.startObject(); + stats.doXContentBody(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + xContentMap = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); + } + assertThat(xContentMap, notNullValue()); + + // serializing -> deserializing converts the long to an int, so we convert it back to test + var countMetricName = InternalStats.Metrics.count.name(); + var xContentCount = xContentMap.get(countMetricName); + assertThat(xContentCount, isA(Integer.class)); + assertThat(((Integer) xContentCount).longValue(), equalTo(outputMap.get(countMetricName))); + + // verify the entries in the bounds map are similar + var xContentStdDevBounds = (Map) xContentMap.get(InternalExtendedStats.Fields.STD_DEVIATION_BOUNDS); + var outputStdDevBounds = (Map) outputMap.get(InternalExtendedStats.Fields.STD_DEVIATION_BOUNDS); + xContentStdDevBounds.forEach((key, value) -> { + if (value instanceof String == false || Double.isFinite(Double.parseDouble(value.toString()))) { + assertThat(outputStdDevBounds.get(key), equalTo(value)); + } + }); + + // verify all the other entries that are not "std_deviation_bounds" or "count" + Predicate> notCountOrStdDevBounds = Predicate.not( + e -> e.getKey().equals(countMetricName) || e.getKey().equals(InternalExtendedStats.Fields.STD_DEVIATION_BOUNDS) + ); + xContentMap.entrySet().stream().filter(notCountOrStdDevBounds).forEach(e -> { + if (e.getValue() instanceof String == false || Double.isFinite(Double.parseDouble(e.getValue().toString()))) { + assertThat(outputMap.get(e.getKey()), equalTo(e.getValue())); + } + }); + } + + public void testIndexableMapExcludesNaN() { + var stats = new InternalExtendedStats( + "testAsMapIsSameAsXContent", + randomLongBetween(1, 50), + Double.NaN, + Double.NaN, + Double.NaN, + Double.NaN, + sigma, + DocValueFormat.RAW, + Map.of() + ); + + var outputMap = stats.asIndexableMap(); + assertThat(outputMap, is(aMapWithSize(1))); + assertThat(outputMap, hasKey(InternalStats.Metrics.count.name())); + assertThat(outputMap.get(InternalStats.Metrics.count.name()), is(stats.getCount())); + } } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java index 322ac63a819fc..83f731e298159 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java @@ -2003,6 +2003,84 @@ public void testPivotWithTopMetrics() throws Exception { assertEquals("business_3", actual); } + @SuppressWarnings(value = "unchecked") + public void testPivotWithExtendedStats() throws Exception { + var transformId = "extended_stats_transform"; + var transformIndex = "extended_stats_pivot_reviews"; + setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, transformIndex); + + var createTransformRequest = createRequestWithAuth( + "PUT", + getTransformEndpoint() + transformId, + BASIC_AUTH_VALUE_TRANSFORM_ADMIN_WITH_SOME_DATA_ACCESS + ); + + var config = Strings.format(""" + { + "source": { + "index": "%s" + }, + "dest": { + "index": "%s" + }, + "pivot": { + "group_by": { + "reviewer": { + "terms": { + "field": "user_id" + } + } + }, + "aggregations": { + "stars": { + "extended_stats": { + "field": "stars" + } + } + } + } + }""", REVIEWS_INDEX_NAME, transformIndex); + + createTransformRequest.setJsonEntity(config); + var createTransformResponse = entityAsMap(client().performRequest(createTransformRequest)); + assertThat(createTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + + startAndWaitForTransform(transformId, transformIndex, BASIC_AUTH_VALUE_TRANSFORM_ADMIN_WITH_SOME_DATA_ACCESS); + assertTrue(indexExists(transformIndex)); + + var searchResult = getAsMap(transformIndex + "/_search?q=reviewer:user_4"); + assertEquals(1, XContentMapValues.extractValue("hits.total.value", searchResult)); + var stdDevMap = (Map) ((List) XContentMapValues.extractValue("hits.hits._source.stars", searchResult)).get(0); + assertThat(stdDevMap.get("count"), equalTo(41)); + assertThat( + stdDevMap, + allOf( + hasEntry("sum", 159.0), + hasEntry("min", 1.0), + hasEntry("max", 5.0), + hasEntry("avg", 3.8780487804878048), + hasEntry("sum_of_squares", 711.0), + hasEntry("variance", 2.3022010707911953), + hasEntry("variance_population", 2.3022010707911953), + hasEntry("variance_sampling", 2.3597560975609753), + hasEntry("std_deviation", 1.5173005868288574), + hasEntry("std_deviation_sampling", 1.5361497640402693), + hasEntry("std_deviation_population", 1.5173005868288574) + ) + ); + assertThat( + (Map) stdDevMap.get("std_deviation_bounds"), + allOf( + hasEntry("upper", 6.91264995414552), + hasEntry("lower", 0.84344760683009), + hasEntry("upper_population", 6.91264995414552), + hasEntry("lower_population", 0.84344760683009), + hasEntry("upper_sampling", 6.950348308568343), + hasEntry("lower_sampling", 0.8057492524072662) + ) + ); + } + public void testPivotWithBoxplot() throws Exception { String transformId = "boxplot_transform"; String transformIndex = "boxplot_pivot_reviews"; diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtils.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtils.java index a851e4a47f1cc..663b2acb0a01b 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtils.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtils.java @@ -23,6 +23,7 @@ import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.metrics.GeoBounds; import org.elasticsearch.search.aggregations.metrics.GeoCentroid; +import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats; import org.elasticsearch.search.aggregations.metrics.MultiValueAggregation; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation.MultiValue; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation.SingleValue; @@ -69,6 +70,7 @@ public final class AggregationResultUtils { tempMap.put(GeoShapeMetricAggregation.class.getName(), new GeoShapeMetricAggExtractor()); tempMap.put(MultiValue.class.getName(), new NumericMultiValueAggExtractor()); tempMap.put(MultiValueAggregation.class.getName(), new MultiValueAggExtractor()); + tempMap.put(InternalExtendedStats.class.getName(), new ExtendedStatsExtractor()); TYPE_VALUE_EXTRACTOR_MAP = Collections.unmodifiableMap(tempMap); } @@ -171,6 +173,9 @@ static AggValueExtractor getExtractor(Aggregation aggregation) { // TODO: can the Range extractor be removed? } else if (aggregation instanceof Range) { return TYPE_VALUE_EXTRACTOR_MAP.get(Range.class.getName()); + } else if (aggregation instanceof InternalExtendedStats) { + // note: extended stats is also a multi bucket agg, therefore check range first + return TYPE_VALUE_EXTRACTOR_MAP.get(InternalExtendedStats.class.getName()); } else if (aggregation instanceof MultiValue) { return TYPE_VALUE_EXTRACTOR_MAP.get(MultiValue.class.getName()); } else if (aggregation instanceof MultiValueAggregation) { @@ -281,6 +286,13 @@ public Object value(Aggregation agg, Map fieldTypeMap, String lo } } + static class ExtendedStatsExtractor implements AggValueExtractor { + @Override + public Object value(Aggregation agg, Map fieldTypeMap, String lookupFieldPrefix) { + return ((InternalExtendedStats) agg).asIndexableMap(); + } + } + static class MultiValueAggExtractor implements AggValueExtractor { @Override public Object value(Aggregation agg, Map fieldTypeMap, String lookupFieldPrefix) { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregations.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregations.java index 95e05d93ff03a..16ad1eb8fcd51 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregations.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregations.java @@ -60,7 +60,6 @@ public final class TransformAggregations { "date_histogram", "date_range", "diversified_sampler", - "extended_stats", // https://github.com/elastic/elasticsearch/issues/51925 "filters", "geo_distance", "geohash_grid", @@ -120,7 +119,8 @@ enum AggregationType { MISSING("missing", LONG), TOP_METRICS("top_metrics", SOURCE), STATS("stats", DOUBLE), - BOXPLOT("boxplot", DOUBLE); + BOXPLOT("boxplot", DOUBLE), + EXTENDED_STATS("extended_stats", DOUBLE); private final String aggregationType; private final String targetMapping; diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregationsTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregationsTests.java index 4564ec5cc67ea..41a913ec4c2b6 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregationsTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregationsTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.PercentilesAggregationBuilder; @@ -31,7 +32,9 @@ import java.util.Map; import java.util.stream.Collectors; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.is; public class TransformAggregationsTests extends ESTestCase { @@ -137,6 +140,9 @@ public void testResolveTargetMapping() { assertEquals("double", TransformAggregations.resolveTargetMapping("stats", null)); assertEquals("double", TransformAggregations.resolveTargetMapping("stats", "int")); + // extended stats + assertEquals("double", TransformAggregations.resolveTargetMapping("extended_stats", "double")); + // boxplot assertEquals("double", TransformAggregations.resolveTargetMapping("boxplot", "double")); @@ -220,6 +226,39 @@ public void testGetAggregationOutputTypesStats() { assertEquals("stats", outputTypes.get("stats.sum")); } + public void testGetAggregationOutputTypesExtendedStats() { + var extendedStatsAggregationBuilder = new ExtendedStatsAggregationBuilder("extended_stats"); + + var inputAndOutputTypes = TransformAggregations.getAggregationInputAndOutputTypes(extendedStatsAggregationBuilder); + var outputTypes = inputAndOutputTypes.v2(); + assertEquals(18, outputTypes.size()); + assertThat( + outputTypes, + allOf( + hasEntry("extended_stats.count", "extended_stats"), + hasEntry("extended_stats.sum", "extended_stats"), + hasEntry("extended_stats.avg", "extended_stats"), + hasEntry("extended_stats.min", "extended_stats"), + hasEntry("extended_stats.max", "extended_stats"), + + hasEntry("extended_stats.sum_of_squares", "extended_stats"), + hasEntry("extended_stats.variance", "extended_stats"), + hasEntry("extended_stats.variance_population", "extended_stats"), + hasEntry("extended_stats.variance_sampling", "extended_stats"), + hasEntry("extended_stats.std_deviation", "extended_stats"), + hasEntry("extended_stats.std_deviation_population", "extended_stats"), + hasEntry("extended_stats.std_deviation_sampling", "extended_stats"), + + hasEntry("extended_stats.std_deviation_bounds.upper", "extended_stats"), + hasEntry("extended_stats.std_deviation_bounds.lower", "extended_stats"), + hasEntry("extended_stats.std_deviation_bounds.upper_population", "extended_stats"), + hasEntry("extended_stats.std_deviation_bounds.lower_population", "extended_stats"), + hasEntry("extended_stats.std_deviation_bounds.upper_sampling", "extended_stats"), + hasEntry("extended_stats.std_deviation_bounds.lower_sampling", "extended_stats") + ) + ); + } + public void testGetAggregationOutputTypesRange() { { AggregationBuilder rangeAggregationBuilder = new RangeAggregationBuilder("range_agg_name").addUnboundedTo(100) From 7e24308062aaa3100fdfa2ba79f21eddfa5f9a11 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 01:35:56 +1100 Subject: [PATCH 208/383] Mute org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizerTests org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizerTests #121185 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 2e120746e2673..801830a678c88 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -323,6 +323,8 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testSetEnabled issue: https://github.com/elastic/elasticsearch/issues/121183 +- class: org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizerTests + issue: https://github.com/elastic/elasticsearch/issues/121185 # Examples: # From 49a20c149cc665b1ea77f89b9f3654f2892d4694 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 01:40:58 +1100 Subject: [PATCH 209/383] Mute org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT test {yaml=cat.aliases/10_basic/Simple alias} #121186 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 801830a678c88..8d4189273c01e 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -325,6 +325,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/121183 - class: org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizerTests issue: https://github.com/elastic/elasticsearch/issues/121185 +- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT + method: test {yaml=cat.aliases/10_basic/Simple alias} + issue: https://github.com/elastic/elasticsearch/issues/121186 # Examples: # From 7b7cd1f87c6460e58f9ce6dce19163942ca15918 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Wed, 29 Jan 2025 15:43:26 +0100 Subject: [PATCH 210/383] Support recovery for closed shard in N-2 version (#121141) Allow shard recovery for shards in version N-2 that have been verified before being closed, but not verified as read-only, in 7.x or 8.x. Reopening such closed indices automatically adds an index.blocks.write. Requires #120595 for the 8.x changes. Closes ES-10320 Closes #121170 Closes #121171 --- muted-tests.yml | 6 -- ...sterRestartLuceneIndexCompatibilityIT.java | 98 +++++++++++++---- ...earchableSnapshotIndexCompatibilityIT.java | 13 +-- ...gradeLuceneIndexCompatibilityTestCase.java | 100 +++++++++++++++--- ...earchableSnapshotIndexCompatibilityIT.java | 13 +-- .../metadata/IndexMetadataVerifier.java | 9 +- .../metadata/MetadataIndexStateService.java | 13 +++ 7 files changed, 184 insertions(+), 68 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 8d4189273c01e..499ea074fa697 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -305,12 +305,6 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testProfileIndexAutoCreation issue: https://github.com/elastic/elasticsearch/issues/120987 -- class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase - method: testRestoreIndex {p0=[9.0.0, 9.0.0, 8.18.0]} - issue: https://github.com/elastic/elasticsearch/issues/121170 -- class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase - method: testRestoreIndex {p0=[9.0.0, 8.18.0, 8.18.0]} - issue: https://github.com/elastic/elasticsearch/issues/121171 - class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT method: testFileSettingsReprocessedOnRestartWithoutVersionChange issue: https://github.com/elastic/elasticsearch/issues/120964 diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java index a442bc2f53736..f37fca16a4b78 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java @@ -9,10 +9,10 @@ package org.elasticsearch.lucene; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.test.cluster.util.Version; @@ -22,10 +22,10 @@ import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.VERIFIED_BEFORE_CLOSE_SETTING; import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING; import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; public class FullClusterRestartLuceneIndexCompatibilityIT extends FullClusterRestartIndexCompatibilityTestCase { @@ -51,7 +51,6 @@ public void testIndexUpgrade() throws Exception { Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, randomInt(2)) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) .build() ); indexDocs(index, numDocs); @@ -111,12 +110,10 @@ public void testIndexUpgrade() throws Exception { .putNull(IndexMetadata.APIBlock.WRITE.settingName()) .putNull(IndexMetadata.APIBlock.READ_ONLY.settingName()) ); - logger.debug("--> but attempts to re-opening [{}] should fail due to the missing block", index); - var ex = expectThrows(ResponseException.class, () -> openIndex(index)); - assertThat(ex.getMessage(), containsString("must be marked as read-only")); - // TODO this could be randomized once we support recovering verified-before-close closed indices with no write/ro block - addIndexBlock(index, IndexMetadata.APIBlock.WRITE); + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); } var block = indexBlocks(index).stream().filter(c -> c.equals(INDEX_WRITE_BLOCK) || c.equals(INDEX_READ_ONLY_BLOCK)).findFirst(); @@ -128,11 +125,11 @@ public void testIndexUpgrade() throws Exception { .putNull(IndexMetadata.APIBlock.READ_ONLY.settingName()) .put(IndexMetadata.APIBlock.WRITE.settingName(), true) ); - } - assertThat(indexBlocks(index), isClosed ? contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK) : contains(INDEX_WRITE_BLOCK)); - assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(isClosed)); - assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + assertThat(indexBlocks(index), isClosed ? contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK) : contains(INDEX_WRITE_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(isClosed)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + } var numberOfReplicas = getNumberOfReplicas(index); if (0 < numberOfReplicas) { @@ -173,6 +170,71 @@ public void testIndexUpgrade() throws Exception { } } + /** + * Creates an index on N-2, closes it on N-1 (without marking it as read-only), then upgrades to N. + */ + public void testClosedIndexUpgrade() throws Exception { + final String index = suffix("index"); + final int numDocs = 2437; + + if (isFullyUpgradedTo(VERSION_MINUS_2)) { + createIndex( + client(), + index, + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, randomInt(2)) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), randomFrom(Translog.Durability.values())) + .build() + ); + indexDocs(index, numDocs); + return; + } + + assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + ensureGreen(index); + + if (isIndexClosed(index) == false) { + assertDocCount(client(), index, numDocs); + } + + if (isFullyUpgradedTo(VERSION_MINUS_1)) { + logger.debug("--> [{}] closing index before upgrade without adding a read_only/write block", index); + closeIndex(index); + + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK)); + assertThat(indexBlocks(index), not(contains(INDEX_WRITE_BLOCK))); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(false)); + return; + } + + if (isFullyUpgradedTo(VERSION_CURRENT)) { + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(false)); + + logger.debug("--> re-opening index [{}] will add a write block", index); + openIndex(index); + ensureGreen(index); + + assertThat(indexBlocks(index), contains(INDEX_WRITE_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(false)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + assertDocCount(client(), index, numDocs); + + logger.debug("--> closing index [{}]", index); + closeIndex(index); + ensureGreen(index); + + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + + deleteIndex(index); + } + } + /** * Creates an index on N-2, marks as read-only on N-1 and creates a snapshot, then restores the snapshot on N. */ @@ -190,11 +252,7 @@ public void testRestoreIndex() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); @@ -272,11 +330,7 @@ public void testRestoreIndexOverClosedIndex() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartSearchableSnapshotIndexCompatibilityIT.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartSearchableSnapshotIndexCompatibilityIT.java index 477f2099477cc..8566c568e9f47 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartSearchableSnapshotIndexCompatibilityIT.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartSearchableSnapshotIndexCompatibilityIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.test.cluster.util.Version; @@ -46,11 +45,7 @@ public void testSearchableSnapshot() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); @@ -125,11 +120,7 @@ public void testSearchableSnapshotUpgrade() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java index 98054cb4b4f39..12374cf623a8c 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.test.cluster.util.Version; @@ -54,11 +55,7 @@ public void testIndexUpgrade() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); indexDocs(index, numDocs); return; @@ -181,6 +178,75 @@ public void testIndexUpgrade() throws Exception { } } + /** + * Creates an index on N-2, closes it on N-1 and then upgrades the cluster. + */ + public void testClosedIndexUpgrade() throws Exception { + final String index = suffix("closed-rolling-upgraded"); + final int numDocs = 1543; + + if (isFullyUpgradedTo(VERSION_MINUS_2)) { + createIndex( + client(), + index, + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), randomFrom(Translog.Durability.values())) + .build() + ); + indexDocs(index, numDocs); + return; + } + + assertThat(indexVersion(index), equalTo(VERSION_MINUS_2)); + ensureGreen(index); + + if (isIndexClosed(index) == false) { + assertDocCount(client(), index, numDocs); + } + + if (isFullyUpgradedTo(VERSION_MINUS_1)) { + logger.debug("--> closing index [{}]", index); + closeIndex(index); + + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(false)); + return; + } + + if (nodesVersions().values().stream().anyMatch(v -> v.onOrAfter(VERSION_CURRENT))) { + long upgradedNodes = nodesVersions().values().stream().filter(v -> v.onOrAfter(VERSION_CURRENT)).count(); + if (upgradedNodes == 1) { + // Mixed cluster with 1 of the 3 nodes upgraded: the index hasn't been reopened yet + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(false)); + + } else { + // Index has been reopened at least once, it should have an additional write block and the verified-read-only setting + assertThat(indexBlocks(index), contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + } + + openIndex(index); + ensureGreen(index); + + assertThat(indexBlocks(index), contains(INDEX_WRITE_BLOCK)); + assertIndexSetting(index, VERIFIED_BEFORE_CLOSE_SETTING, is(false)); + assertIndexSetting(index, VERIFIED_READ_ONLY_SETTING, is(true)); + assertDocCount(client(), index, numDocs); + + updateRandomIndexSettings(index); + updateRandomMappings(index); + + closeIndex(index); + ensureGreen(index); + } + } + /** * Creates an index on N-2, marks as read-only on N-1 and creates a snapshot, then restores the snapshot during rolling upgrades to N. */ @@ -198,11 +264,7 @@ public void testRestoreIndex() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); @@ -253,19 +315,29 @@ public void testRestoreIndex() throws Exception { closeIndex(restoredIndex); ensureGreen(restoredIndex); + assertThat(indexBlocks(restoredIndex), contains(INDEX_CLOSED_BLOCK, INDEX_WRITE_BLOCK)); + assertIndexSetting(restoredIndex, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(restoredIndex, VERIFIED_READ_ONLY_SETTING, is(true)); + logger.debug("--> write API block can be removed on a closed index: INDEX_CLOSED_BLOCK already blocks writes"); updateIndexSettings(restoredIndex, Settings.builder().putNull(IndexMetadata.APIBlock.WRITE.settingName())); - logger.debug("--> but attempts to re-opening [{}] should fail due to the missing block", restoredIndex); - ex = expectThrows(ResponseException.class, () -> openIndex(restoredIndex)); - assertThat(ex.getMessage(), containsString("must be marked as read-only")); + assertThat(indexBlocks(restoredIndex), contains(INDEX_CLOSED_BLOCK)); + assertIndexSetting(restoredIndex, VERIFIED_BEFORE_CLOSE_SETTING, is(true)); + assertIndexSetting(restoredIndex, VERIFIED_READ_ONLY_SETTING, is(true)); - addIndexBlock(restoredIndex, IndexMetadata.APIBlock.WRITE); + if (randomBoolean()) { + addIndexBlock(restoredIndex, IndexMetadata.APIBlock.WRITE); + } logger.debug("--> re-opening restored index [{}]", restoredIndex); openIndex(restoredIndex); ensureGreen(restoredIndex); + assertThat(indexBlocks(restoredIndex), contains(INDEX_WRITE_BLOCK)); + assertIndexSetting(restoredIndex, VERIFIED_BEFORE_CLOSE_SETTING, is(false)); + assertIndexSetting(restoredIndex, VERIFIED_READ_ONLY_SETTING, is(true)); + assertDocCount(client(), restoredIndex, numDocs); logger.debug("--> deleting restored index [{}]", restoredIndex); diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeSearchableSnapshotIndexCompatibilityIT.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeSearchableSnapshotIndexCompatibilityIT.java index 1117d36024bf0..f1b27d5c34c1b 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeSearchableSnapshotIndexCompatibilityIT.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeSearchableSnapshotIndexCompatibilityIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.test.cluster.util.Version; @@ -51,11 +50,7 @@ public void testMountSearchableSnapshot() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); @@ -122,11 +117,7 @@ public void testSearchableSnapshotUpgrade() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); logger.debug("--> indexing [{}] docs in [{}]", numDocs, index); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java index 9ae78d35527f0..b09bc563f4c50 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java @@ -39,6 +39,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.isIndexVerifiedBeforeClosed; import static org.elasticsearch.core.Strings.format; /** @@ -164,7 +165,7 @@ public static boolean isReadOnlySupportedVersion( ) { if (isReadOnlyCompatible(indexMetadata, minimumCompatible, minimumReadOnlyCompatible)) { assert isFullySupportedVersion(indexMetadata, minimumCompatible) == false : indexMetadata; - final boolean isReadOnly = hasIndexWritesBlock(indexMetadata); + final boolean isReadOnly = hasReadOnlyBlocks(indexMetadata) || isIndexVerifiedBeforeClosed(indexMetadata); if (isReadOnly == false) { throw new IllegalStateException( "The index " @@ -185,7 +186,7 @@ public static boolean isReadOnlySupportedVersion( return false; } - private static boolean isReadOnlyCompatible( + public static boolean isReadOnlyCompatible( IndexMetadata indexMetadata, IndexVersion minimumCompatible, IndexVersion minimumReadOnlyCompatible @@ -208,7 +209,7 @@ private static boolean isReadOnlyCompatible( return false; } - private static boolean hasIndexWritesBlock(IndexMetadata indexMetadata) { + static boolean hasReadOnlyBlocks(IndexMetadata indexMetadata) { var indexSettings = indexMetadata.getSettings(); if (IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.get(indexSettings) || IndexMetadata.INDEX_READ_ONLY_SETTING.get(indexSettings)) { return indexMetadata.isSearchableSnapshot() @@ -220,7 +221,7 @@ private static boolean hasIndexWritesBlock(IndexMetadata indexMetadata) { public static boolean isReadOnlyVerified(IndexMetadata indexMetadata) { if (isReadOnlyCompatible(indexMetadata, IndexVersions.MINIMUM_COMPATIBLE, IndexVersions.MINIMUM_READONLY_COMPATIBLE)) { - return hasIndexWritesBlock(indexMetadata); + return hasReadOnlyBlocks(indexMetadata); } return false; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java index e84fc8d27ba59..6144265e4a83a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java @@ -90,6 +90,7 @@ import java.util.stream.Collectors; import static java.util.stream.Collectors.joining; +import static org.elasticsearch.cluster.metadata.IndexMetadataVerifier.hasReadOnlyBlocks; import static org.elasticsearch.core.Strings.format; /** @@ -1185,6 +1186,18 @@ private ClusterState openIndices(final Index[] indices, final ClusterState curre final Settings.Builder updatedSettings = Settings.builder().put(indexMetadata.getSettings()); updatedSettings.remove(VERIFIED_BEFORE_CLOSE_SETTING.getKey()); + // Reopening a read-only compatible index that has not been marked as read-only is possible if the index was + // verified-before-close in the first place. + var compatibilityVersion = indexMetadata.getCompatibilityVersion(); + if (compatibilityVersion.before(minIndexCompatibilityVersion) && hasReadOnlyBlocks(indexMetadata) == false) { + if (isIndexVerifiedBeforeClosed(indexMetadata)) { + updatedSettings.put(VERIFIED_READ_ONLY_SETTING.getKey(), true); + // at least set a write block if the index was verified-before-close at the time the cluster was upgraded + blocks.addIndexBlock(index.getName(), APIBlock.WRITE.block); + updatedSettings.put(APIBlock.WRITE.settingName(), true); + } // or else, the following indexMetadataVerifier.verifyIndexMetadata() should throw. + } + IndexMetadata newIndexMetadata = IndexMetadata.builder(indexMetadata) .state(IndexMetadata.State.OPEN) .settingsVersion(indexMetadata.getSettingsVersion() + 1) From 038aab864e4cdfa3fbe1736813dea0a9c37607ee Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 29 Jan 2025 09:58:08 -0500 Subject: [PATCH 211/383] Mark bbq indices as GA and add rolling upgrade integration tests (#121105) With the introduction of our new backing algorithm and making rescoring easier with the `rescore_vector` API, let's mark bbq as GA. Additionally, this commit adds rolling upgrade tests to ensure stability. --- docs/changelog/121105.yaml | 5 + .../mapping/types/dense-vector.asciidoc | 10 +- .../upgrades/VectorSearchIT.java | 179 ++++++++++++++++++ 3 files changed, 189 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/121105.yaml diff --git a/docs/changelog/121105.yaml b/docs/changelog/121105.yaml new file mode 100644 index 0000000000000..925d3a036e5c2 --- /dev/null +++ b/docs/changelog/121105.yaml @@ -0,0 +1,5 @@ +pr: 121105 +summary: Mark bbq indices as GA and add rolling upgrade integration tests +area: Vector Search +type: feature +issues: [] diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index 75fbaea59c6bd..7cb0bc671264e 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -118,7 +118,7 @@ The three following quantization strategies are supported: * `int8` - Quantizes each dimension of the vector to 1-byte integers. This reduces the memory footprint by 75% (or 4x) at the cost of some accuracy. * `int4` - Quantizes each dimension of the vector to half-byte integers. This reduces the memory footprint by 87% (or 8x) at the cost of accuracy. -* `bbq` - experimental:[] Better binary quantization which reduces each dimension to a single bit precision. This reduces the memory footprint by 96% (or 32x) at a larger cost of accuracy. Generally, oversampling during query time and reranking can help mitigate the accuracy loss. +* `bbq` - Better binary quantization which reduces each dimension to a single bit precision. This reduces the memory footprint by 96% (or 32x) at a larger cost of accuracy. Generally, oversampling during query time and reranking can help mitigate the accuracy loss. When using a quantized format, you may want to oversample and rescore the results to improve accuracy. See <> for more information. @@ -133,7 +133,7 @@ This means disk usage will increase by ~25% for `int8`, ~12.5% for `int4`, and ~ NOTE: `int4` quantization requires an even number of vector dimensions. -NOTE: experimental:[] `bbq` quantization only supports vector dimensions that are greater than 64. +NOTE: `bbq` quantization only supports vector dimensions that are greater than 64. Here is an example of how to create a byte-quantized index: @@ -177,7 +177,7 @@ PUT my-byte-quantized-index } -------------------------------------------------- -experimental:[] Here is an example of how to create a binary quantized index: +Here is an example of how to create a binary quantized index: [source,console] -------------------------------------------------- @@ -325,7 +325,7 @@ by 4x at the cost of some accuracy. See <>. -* experimental:[] `bbq_hnsw` - This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] in addition to automatically binary +* `bbq_hnsw` - This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] in addition to automatically binary quantization for scalable approximate kNN search with `element_type` of `float`. This can reduce the memory footprint by 32x at the cost of accuracy. See <>. * `flat` - This utilizes a brute-force search algorithm for exact kNN search. This supports all `element_type` values. @@ -333,7 +333,7 @@ by 32x at the cost of accuracy. See < response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + + // search with knn + searchRequest = new Request("POST", "/" + BBQ_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "knn": { + "field": "vector", + "query_vector": [4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6], + "k": 2, + "num_candidates": 5 + } + } + """); + response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(2)); + hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(0.9934857, 0.005)); + } + + public void testFlatBBQVectorSearch() throws Exception { + assumeTrue( + "Quantized vector search is not supported on this version", + getOldClusterTestVersion().onOrAfter(BBQ_VECTOR_SEARCH_VERSION) + ); + if (isOldCluster()) { + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 64, + "index": true, + "similarity": "cosine", + "index_options": { + "type": "bbq_flat" + } + } + } + } + """; + // create index and index 10 random floating point vectors + createIndex(FLAT_BBQ_INDEX_NAME, Settings.EMPTY, mapping); + index64DimVectors(FLAT_BBQ_INDEX_NAME); + // force merge the index + client().performRequest(new Request("POST", "/" + FLAT_BBQ_INDEX_NAME + "/_forcemerge?max_num_segments=1")); + } + Request searchRequest = new Request("POST", "/" + FLAT_BBQ_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + + // search with knn + searchRequest = new Request("POST", "/" + FLAT_BBQ_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "knn": { + "field": "vector", + "query_vector": [4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6], + "k": 2, + "num_candidates": 5 + } + } + """); + response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(2)); + hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(0.9934857, 0.005)); + } + + private void index64DimVectors(String indexName) throws Exception { + String[] vectors = new String[] { + "{\"vector\":[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, " + + "1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]}", + "{\"vector\":[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, " + + "1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]}", + "{\"vector\":[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, " + + "1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]}", + "{\"vector\":[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, " + + "2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]}", + "{\"vector\":[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, " + + "3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]}", + "{\"vector\":[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, " + + "1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]}", + "{\"vector\":[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, " + + "1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]}", + "{}" }; + for (int i = 0; i < vectors.length; i++) { + Request indexRequest = new Request("PUT", "/" + indexName + "/_doc/" + i); + indexRequest.setJsonEntity(vectors[i]); + assertOK(client().performRequest(indexRequest)); + } + // always refresh to ensure the data is visible + refresh(indexName); + } + private void indexVectors(String indexName) throws Exception { String[] vectors = new String[] { "{\"vector\":[1, 1, 1]}", From 4b4c59de7f043bebbb6ff5a2baccdafb2b96b861 Mon Sep 17 00:00:00 2001 From: Kathleen DeRusso Date: Wed, 29 Jan 2025 10:05:05 -0500 Subject: [PATCH 212/383] Fix error in docs code snippet (#121187) --- docs/reference/mapping/types/semantic-text.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc index b252a0058258f..62e5075b9287d 100644 --- a/docs/reference/mapping/types/semantic-text.asciidoc +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -130,7 +130,7 @@ You can extract the most relevant fragments from a semantic text field by using [source,console] ------------------------------------------------------------ -PUT test-index +POST test-index/_search { "query": { "semantic": { @@ -240,4 +240,4 @@ PUT test-index `semantic_text` field types have the following limitations: * `semantic_text` fields are not currently supported as elements of <>. -* `semantic_text` fields can't currently be set as part of <>. \ No newline at end of file +* `semantic_text` fields can't currently be set as part of <>. From e9b877e58bb229f96d94f4211070095e875f00eb Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Wed, 29 Jan 2025 10:08:25 -0500 Subject: [PATCH 213/383] Clarify the behavior of remote/info and resolve/cluster for connected status of remotes (#118993) --- docs/reference/cluster/remote-info.asciidoc | 17 +++++- .../indices/resolve-cluster.asciidoc | 56 +++++++------------ 2 files changed, 35 insertions(+), 38 deletions(-) diff --git a/docs/reference/cluster/remote-info.asciidoc b/docs/reference/cluster/remote-info.asciidoc index 691acafd8ddbe..e91ccc4d8f4a1 100644 --- a/docs/reference/cluster/remote-info.asciidoc +++ b/docs/reference/cluster/remote-info.asciidoc @@ -26,10 +26,18 @@ Returns configured remote cluster information. [[cluster-remote-info-api-desc]] ==== {api-description-title} -The cluster remote info API allows you to retrieve all of the configured -remote cluster information. It returns connection and endpoint information keyed +The cluster remote info API allows you to retrieve information about configured +remote clusters. It returns connection and endpoint information keyed by the configured remote cluster alias. +TIP: This API returns information that reflects current state on the local cluster. +The `connected` field does not necessarily reflect whether a remote cluster is +down or unavailable, only whether there is currently an open connection to it. +Elasticsearch does not spontaneously try to reconnect to a disconnected remote +cluster. To trigger a reconnection, attempt a <>, +<>, or try the +<> endpoint. + [[cluster-remote-info-api-response-body]] ==== {api-response-body-title} @@ -39,7 +47,10 @@ by the configured remote cluster alias. `proxy`. `connected`:: - True if there is at least one connection to the remote cluster. + True if there is at least one open connection to the remote cluster. When + false, it means that the cluster no longer has an open connection to the + remote cluster. It does not necessarily mean that the remote cluster is + down or unavailable, just that at some point a connection was lost. `initial_connect_timeout`:: The initial connect timeout for remote cluster connections. diff --git a/docs/reference/indices/resolve-cluster.asciidoc b/docs/reference/indices/resolve-cluster.asciidoc index b1d379e50557c..f7d21e8c0b8ea 100644 --- a/docs/reference/indices/resolve-cluster.asciidoc +++ b/docs/reference/indices/resolve-cluster.asciidoc @@ -11,9 +11,7 @@ For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[In -- Resolves the specified index expressions to return information about -each cluster, including the local "querying" cluster, if included. If no index expression -is provided, this endpoint will return information about all the remote -clusters that are configured on the querying cluster. +each cluster, including the local "querying" cluster, if included. This endpoint is useful before doing a <> in order to determine which remote clusters should be included in a search. @@ -24,10 +22,12 @@ with this endpoint. For each cluster in scope, information is returned about: -1. whether the querying ("local") cluster is currently connected to it +1. whether the querying ("local") cluster was able to connect to each remote cluster + specified in the index expression. Note that this endpoint actively attempts to + contact the remote clusters, unlike the <> endpoint. 2. whether each remote cluster is configured with `skip_unavailable` as `true` or `false` 3. whether there are any indices, aliases or data streams on that cluster that match - the index expression (if one provided) + the index expression 4. whether the search is likely to have errors returned when you do a {ccs} (including any authorization errors if your user does not have permission to query a remote cluster or the indices on that cluster) @@ -42,12 +42,6 @@ Once the proper security permissions are obtained, then you can rely on the `con in the response to determine whether the remote cluster is available and ready for querying. ==== -NOTE: When querying older clusters that do not support the _resolve/cluster endpoint -without an index expression, the local cluster will send the index expression `dummy*` -to those remote clusters, so if an errors occur, you may see a reference to that index -expression even though you didn't request it. If it causes a problem, you can instead -include an index expression like `*:*` to this endpoint to bypass the issue. - //// [source,console] -------------------------------- @@ -77,14 +71,6 @@ PUT _cluster/settings // TEST[s/35.238.149.\d+:930\d+/\${transport_host}/] //// -[source,console] ----- -GET /_resolve/cluster ----- -// TEST[continued] - -Returns information about all remote clusters configured on the local cluster. - [source,console] ---- GET /_resolve/cluster/my-index-*,cluster*:my-index-* @@ -140,21 +126,28 @@ ignored when frozen. Defaults to `false`. + deprecated:[7.16.0] -[TIP] -==== -The index options above are only allowed when specifying an index expression. -You will get an error if you specify index options to the _resolve/cluster API -that takes no index expression. -==== - [discrete] [[usecases-for-resolve-cluster]] +=== Test availability of remote clusters + +The <> endpoint is commonly used to test whether the "local" +cluster (the cluster being queried) is connected to its remote clusters, but it does not +necessarily reflect whether the remote cluster is available or not. The remote cluster may +be available, while the local cluster is not currently connected to it. + +You can use the resolve-cluster API to attempt to reconnect to remote clusters +(for example with `GET _resolve/cluster/*:*`) and +the `connected` field in the response will indicate whether it was successful or not. +If a connection was (re-)established, this will also cause the +<> endpoint to now indicate a connected status. + + === Advantages of using this endpoint before a {ccs} You may want to exclude a cluster or index from a search when: -1. A remote cluster is not currently connected and is configured with `skip_unavailable`=`false`. +1. A remote cluster could not be connected to and is configured with `skip_unavailable`=`false`. Executing a {ccs} under those conditions will cause <>. @@ -268,14 +261,7 @@ GET /_resolve/cluster/not-present,clust*:my-index*,oldcluster:*?ignore_unavailab }, "cluster_two": { "connected": false, <3> - "skip_unavailable": false, - "matching_indices": true, - "version": { - "number": "8.13.0", - "build_flavor": "default", - "minimum_wire_compatibility_version": "7.17.0", - "minimum_index_compatibility_version": "7.0.0" - } + "skip_unavailable": false }, "oldcluster": { <4> "connected": true, From 6b76457a238ecd5c1e16906b664a44c22e4eb3e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Peter=20Stra=C3=9Fer?= Date: Wed, 29 Jan 2025 16:10:59 +0100 Subject: [PATCH 214/383] Fix syntax errors in the rescore retriever example (#121024) --- docs/reference/search/retriever.asciidoc | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index fe959c4e8cbee..5be2dd241d9dc 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -456,10 +456,13 @@ GET movies/_search "retriever": { "rescorer": { <2> "rescore": { - "query": { <3> - "window_size": 50, <4> + "window_size": 50, <3> + "query": { <4> "rescore_query": { "script_score": { + "query": { + "match_all": {} + }, "script": { "source": "cosineSimilarity(params.queryVector, 'product-vector_final_stage') + 1.0", "params": { @@ -516,8 +519,8 @@ GET movies/_search // TEST[skip:uses ELSER] <1> Specifies the number of top documents to return in the final response. <2> A `rescorer` retriever applied as the final step. -<3> The definition of the `query` rescorer. -<4> Defines the number of documents to rescore from the child retriever. +<3> Defines the number of documents to rescore from the child retriever. +<4> The definition of the `query` rescorer. <5> Specifies the child retriever definition. <6> Defines the number of documents returned by the `rrf` retriever, which limits the available documents to From 1b6a080473f1a7a182f08e2ef322d6b9ef30cee8 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Wed, 29 Jan 2025 16:31:36 +0100 Subject: [PATCH 215/383] Sparse doc values index for LogsDB `host.name` field (#120741) Here we introduce a new field type in `KeywordFieldMapper` that enables a sparse doc values index for `host.name` when: - Index mode is `LOGSDB` - The field is a keyword and part of the primary sort - Doc values are enabled and indexing is not explicitly disabled When these conditions are met: - `DocValuesSkipIndexType.RANGE` is applied - The inverted index is removed in favor of sparse doc values This reduces storage footprint and improves indexing throughput but may slow down some queries. The change is gated by a feature flag, with future plans to extend it to `@timestamp`. --- .../elasticsearch/index/IndexVersions.java | 1 + .../index/mapper/FieldMapper.java | 6 + .../index/mapper/KeywordFieldMapper.java | 143 +++++++++++++++- .../index/mapper/KeywordFieldMapperTests.java | 156 ++++++++++++++++++ 4 files changed, 297 insertions(+), 9 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index e801a07f11670..309b8a4f143d3 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -143,6 +143,7 @@ private static Version parseUnchecked(String version) { public static final IndexVersion INFERENCE_METADATA_FIELDS = def(9_005_00_0, Version.LUCENE_10_0_0); public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME = def(9_006_00_0, Version.LUCENE_10_0_0); public static final IndexVersion SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP = def(9_007_00_0, Version.LUCENE_10_0_0); + public static final IndexVersion HOSTNAME_DOC_VALUES_SPARSE_INDEX = def(9_008_00_0, Version.LUCENE_10_0_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 7c1f3678a5dc9..2dfbf871ea183 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexMode; @@ -63,6 +64,7 @@ public abstract class FieldMapper extends Mapper { private static final Logger logger = LogManager.getLogger(FieldMapper.class); + public static final FeatureFlag DOC_VALUES_SPARSE_INDEX = new FeatureFlag("doc_values_sparse_index"); public static final Setting IGNORE_MALFORMED_SETTING = Setting.boolSetting("index.mapping.ignore_malformed", settings -> { if (IndexSettings.MODE.get(settings) == IndexMode.LOGSDB && IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(settings).onOrAfter(IndexVersions.ENABLE_IGNORE_MALFORMED_LOGSDB)) { @@ -851,6 +853,10 @@ public boolean isConfigured() { return isSet && Objects.equals(value, getDefaultValue()) == false; } + public boolean isSet() { + return isSet; + } + /** * Allows the parameter to accept a {@code null} value */ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 7b14739d36246..b7528bd3729ee 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -18,6 +18,7 @@ import org.apache.lucene.document.InvertableType; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StoredField; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; @@ -38,7 +39,10 @@ import org.elasticsearch.common.lucene.search.AutomatonQueries; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSortConfig; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldData; @@ -87,9 +91,11 @@ public final class KeywordFieldMapper extends FieldMapper { private static final Logger logger = LogManager.getLogger(KeywordFieldMapper.class); public static final String CONTENT_TYPE = "keyword"; + private static final String HOST_NAME = "host.name"; public static class Defaults { public static final FieldType FIELD_TYPE; + public static final FieldType FIELD_TYPE_WITH_SKIP_DOC_VALUES; static { FieldType ft = new FieldType(); @@ -100,6 +106,16 @@ public static class Defaults { FIELD_TYPE = freezeAndDeduplicateFieldType(ft); } + static { + FieldType ft = new FieldType(); + ft.setTokenized(false); + ft.setOmitNorms(true); + ft.setIndexOptions(IndexOptions.NONE); + ft.setDocValuesType(DocValuesType.SORTED_SET); + ft.setDocValuesSkipIndexType(DocValuesSkipIndexType.RANGE); + FIELD_TYPE_WITH_SKIP_DOC_VALUES = freezeAndDeduplicateFieldType(ft); + } + public static final TextSearchInfo TEXT_SEARCH_INFO = new TextSearchInfo( FIELD_TYPE, null, @@ -154,7 +170,8 @@ public static final class Builder extends FieldMapper.DimensionBuilder { ); private final Parameter ignoreAbove; private final int ignoreAboveDefault; - + private final IndexSortConfig indexSortConfig; + private final IndexMode indexMode; private final Parameter indexOptions = TextParams.keywordIndexOptions(m -> toType(m).indexOptions); private final Parameter hasNorms = TextParams.norms(false, m -> toType(m).fieldType.omitNorms() == false); private final Parameter similarity = TextParams.similarity( @@ -189,7 +206,9 @@ public Builder(final String name, final MappingParserContext mappingParserContex mappingParserContext.getIndexAnalyzers(), mappingParserContext.scriptCompiler(), IGNORE_ABOVE_SETTING.get(mappingParserContext.getSettings()), - mappingParserContext.getIndexSettings().getIndexVersionCreated() + mappingParserContext.getIndexSettings().getIndexVersionCreated(), + mappingParserContext.getIndexSettings().getMode(), + mappingParserContext.getIndexSettings().getIndexSortConfig() ); } @@ -199,6 +218,18 @@ public Builder(final String name, final MappingParserContext mappingParserContex ScriptCompiler scriptCompiler, int ignoreAboveDefault, IndexVersion indexCreatedVersion + ) { + this(name, indexAnalyzers, scriptCompiler, ignoreAboveDefault, indexCreatedVersion, IndexMode.STANDARD, null); + } + + private Builder( + String name, + IndexAnalyzers indexAnalyzers, + ScriptCompiler scriptCompiler, + int ignoreAboveDefault, + IndexVersion indexCreatedVersion, + IndexMode indexMode, + IndexSortConfig indexSortConfig ) { super(name); this.indexAnalyzers = indexAnalyzers; @@ -233,6 +264,8 @@ public Builder(final String name, final MappingParserContext mappingParserContex throw new IllegalArgumentException("[ignore_above] must be positive, got [" + v + "]"); } }); + this.indexSortConfig = indexSortConfig; + this.indexMode = indexMode; } public Builder(String name, IndexVersion indexCreatedVersion) { @@ -359,15 +392,21 @@ private KeywordFieldType buildFieldType(MapperBuilderContext context, FieldType @Override public KeywordFieldMapper build(MapperBuilderContext context) { - FieldType fieldtype = new FieldType(Defaults.FIELD_TYPE); + FieldType fieldtype = resolveFieldType(indexCreatedVersion, indexSortConfig, indexMode, context.buildFullName(leafName())); fieldtype.setOmitNorms(this.hasNorms.getValue() == false); - fieldtype.setIndexOptions(TextParams.toIndexOptions(this.indexed.getValue(), this.indexOptions.getValue())); fieldtype.setStored(this.stored.getValue()); fieldtype.setDocValuesType(this.hasDocValues.getValue() ? DocValuesType.SORTED_SET : DocValuesType.NONE); + if (fieldtype.equals(Defaults.FIELD_TYPE_WITH_SKIP_DOC_VALUES) == false) { + // NOTE: override index options only if we are not using a sparse doc values index (and we use an inverted index) + fieldtype.setIndexOptions(TextParams.toIndexOptions(this.indexed.getValue(), this.indexOptions.getValue())); + } if (fieldtype.equals(Defaults.FIELD_TYPE)) { // deduplicate in the common default case to save some memory fieldtype = Defaults.FIELD_TYPE; } + if (fieldtype.equals(Defaults.FIELD_TYPE_WITH_SKIP_DOC_VALUES)) { + fieldtype = Defaults.FIELD_TYPE_WITH_SKIP_DOC_VALUES; + } super.hasScript = script.get() != null; super.onScriptError = onScriptError.getValue(); return new KeywordFieldMapper( @@ -379,6 +418,63 @@ public KeywordFieldMapper build(MapperBuilderContext context) { this ); } + + private FieldType resolveFieldType( + final IndexVersion indexCreatedVersion, + final IndexSortConfig indexSortConfig, + final IndexMode indexMode, + final String fullFieldName + ) { + if (FieldMapper.DOC_VALUES_SPARSE_INDEX.isEnabled() + && indexCreatedVersion.onOrAfter(IndexVersions.HOSTNAME_DOC_VALUES_SPARSE_INDEX) + && shouldUseDocValuesSparseIndex(indexSortConfig, indexMode, fullFieldName)) { + return new FieldType(Defaults.FIELD_TYPE_WITH_SKIP_DOC_VALUES); + } + return new FieldType(Defaults.FIELD_TYPE); + } + + /** + * Determines whether to use a sparse index representation for doc values. + * + *

If the field is explicitly indexed by setting {@code index: true}, we do not use + * a sparse doc values index but instead rely on the inverted index, as is typically + * the case for keyword fields.

+ * + *

This method checks several conditions to decide if the sparse index format + * should be applied:

+ * + *
    + *
  • Returns {@code false} immediately if the field is explicitly indexed.
  • + *
  • Ensures the field is not explicitly configured as indexed (i.e., {@code index} has its default value).
  • + *
  • Requires doc values to be enabled.
  • + *
  • Index mode must be {@link IndexMode#LOGSDB}.
  • + *
  • Field name must be {@code host.name}.
  • + *
  • The {@code host.name} field must be a primary sort field.
  • + *
+ * + *

Returns {@code true} if all conditions are met, indicating that sparse doc values + * should be used. Otherwise, returns {@code false}.

+ * + * @param indexSortConfig The index sort configuration, used to check primary sorting. + * @param indexMode The mode of the index, which must be {@link IndexMode#LOGSDB}. + * @param fullFieldName The name of the field being checked, which must be {@code host.name}. + * @return {@code true} if sparse doc values should be used, otherwise {@code false}. + */ + + private boolean shouldUseDocValuesSparseIndex( + final IndexSortConfig indexSortConfig, + final IndexMode indexMode, + final String fullFieldName + ) { + if (indexed.isSet() && indexed.getValue()) { + return false; + } + return indexed.isConfigured() == false + && hasDocValues.getValue() + && IndexMode.LOGSDB.equals(indexMode) + && HOST_NAME.equals(fullFieldName) + && (indexSortConfig != null && indexSortConfig.hasPrimarySortOnField(HOST_NAME)); + } } public static final TypeParser PARSER = createTypeParserWithLegacySupport(Builder::new); @@ -392,6 +488,9 @@ public static final class KeywordFieldType extends StringFieldType { private final FieldValues scriptValues; private final boolean isDimension; private final boolean isSyntheticSource; + private final IndexMode indexMode; + private final IndexSortConfig indexSortConfig; + private final boolean hasDocValuesSparseIndex; public KeywordFieldType( String name, @@ -417,6 +516,9 @@ public KeywordFieldType( this.scriptValues = builder.scriptValues(); this.isDimension = builder.dimension.getValue(); this.isSyntheticSource = isSyntheticSource; + this.indexMode = builder.indexMode; + this.indexSortConfig = builder.indexSortConfig; + this.hasDocValuesSparseIndex = DocValuesSkipIndexType.NONE.equals(fieldType.docValuesSkipIndexType()) == false; } public KeywordFieldType(String name, boolean isIndexed, boolean hasDocValues, Map meta) { @@ -428,6 +530,9 @@ public KeywordFieldType(String name, boolean isIndexed, boolean hasDocValues, Ma this.scriptValues = null; this.isDimension = false; this.isSyntheticSource = false; + this.indexMode = IndexMode.STANDARD; + this.indexSortConfig = null; + this.hasDocValuesSparseIndex = false; } public KeywordFieldType(String name) { @@ -450,6 +555,9 @@ public KeywordFieldType(String name, FieldType fieldType) { this.scriptValues = null; this.isDimension = false; this.isSyntheticSource = false; + this.indexMode = IndexMode.STANDARD; + this.indexSortConfig = null; + this.hasDocValuesSparseIndex = DocValuesSkipIndexType.NONE.equals(fieldType.docValuesSkipIndexType()) == false; } public KeywordFieldType(String name, NamedAnalyzer analyzer) { @@ -461,6 +569,9 @@ public KeywordFieldType(String name, NamedAnalyzer analyzer) { this.scriptValues = null; this.isDimension = false; this.isSyntheticSource = false; + this.indexMode = IndexMode.STANDARD; + this.indexSortConfig = null; + this.hasDocValuesSparseIndex = false; } @Override @@ -851,6 +962,18 @@ public boolean hasScriptValues() { public boolean hasNormalizer() { return normalizer != Lucene.KEYWORD_ANALYZER; } + + public IndexMode getIndexMode() { + return indexMode; + } + + public IndexSortConfig getIndexSortConfig() { + return indexSortConfig; + } + + public boolean hasDocValuesSparseIndex() { + return hasDocValuesSparseIndex; + } } private final boolean indexed; @@ -866,7 +989,8 @@ public boolean hasNormalizer() { private final IndexAnalyzers indexAnalyzers; private final int ignoreAboveDefault; - private final int ignoreAbove; + private final IndexMode indexMode; + private final IndexSortConfig indexSortConfig; private KeywordFieldMapper( String simpleName, @@ -890,7 +1014,8 @@ private KeywordFieldMapper( this.indexCreatedVersion = builder.indexCreatedVersion; this.isSyntheticSource = isSyntheticSource; this.ignoreAboveDefault = builder.ignoreAboveDefault; - this.ignoreAbove = builder.ignoreAbove.getValue(); + this.indexMode = builder.indexMode; + this.indexSortConfig = builder.indexSortConfig; } @Override @@ -1008,9 +1133,9 @@ public Map indexAnalyzers() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(leafName(), indexAnalyzers, scriptCompiler, ignoreAboveDefault, indexCreatedVersion).dimension( - fieldType().isDimension() - ).init(this); + return new Builder(leafName(), indexAnalyzers, scriptCompiler, ignoreAboveDefault, indexCreatedVersion, indexMode, indexSortConfig) + .dimension(fieldType().isDimension()) + .init(this); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 052bf995bdd48..d78f2110daa67 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexSortConfig; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.CharFilterFactory; @@ -770,4 +771,159 @@ public void testDocValuesLoadedFromStoredSynthetic() throws IOException { ); assertScriptDocValues(mapper, "foo", equalTo(List.of("foo"))); } + + public void testFieldTypeWithSkipDocValues_LogsDbMode() throws IOException { + final MapperService mapperService = createMapperService( + Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host.name") + .build(), + mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.endObject(); + }) + ); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertTrue(mapper.fieldType().hasDocValues()); + assertFalse(mapper.fieldType().isIndexed()); + assertTrue(mapper.fieldType().hasDocValuesSparseIndex()); + } + + public void testFieldTypeDefault_StandardMode() throws IOException { + final MapperService mapperService = createMapperService( + Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host.name") + .build(), + mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.endObject(); + }) + ); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertTrue(mapper.fieldType().hasDocValues()); + assertTrue(mapper.fieldType().isIndexed()); + assertFalse(mapper.fieldType().hasDocValuesSparseIndex()); + } + + public void testFieldTypeDefault_NonMatchingFieldName() throws IOException { + final MapperService mapperService = createMapperService( + Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "hostname") + .build(), + mapping(b -> { + b.startObject("hostname"); + b.field("type", "keyword"); + b.endObject(); + }) + ); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("hostname"); + assertTrue(mapper.fieldType().hasDocValues()); + assertTrue(mapper.fieldType().isIndexed()); + assertFalse(mapper.fieldType().hasDocValuesSparseIndex()); + } + + public void testFieldTypeDefault_ConfiguredIndexed() throws IOException { + final MapperService mapperService = createMapperService( + Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host.name") + .build(), + mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.field("index", true); + b.endObject(); + }) + ); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertTrue(mapper.fieldType().hasDocValues()); + assertTrue(mapper.fieldType().isIndexed()); + assertFalse(mapper.fieldType().hasDocValuesSparseIndex()); + } + + public void testFieldTypeDefault_ConfiguredDocValues() throws IOException { + final MapperService mapperService = createMapperService( + Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host.name") + .build(), + mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.field("doc_values", true); + b.endObject(); + }) + ); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertTrue(mapper.fieldType().hasDocValues()); + assertFalse(mapper.fieldType().isIndexed()); + assertTrue(mapper.fieldType().hasDocValuesSparseIndex()); + } + + public void testFieldTypeDefault_LogsDbMode_NonSortField() throws IOException { + final MapperService mapperService = createMapperService( + Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()).build(), + mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.endObject(); + }) + ); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertTrue(mapper.fieldType().hasDocValues()); + assertTrue(mapper.fieldType().isIndexed()); + assertFalse(mapper.fieldType().hasDocValuesSparseIndex()); + } + + public void testFieldTypeWithSkipDocValues_IndexedFalseDocValuesTrue() throws IOException { + final MapperService mapperService = createMapperService( + Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host.name") + .build(), + mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.field("index", false); + b.field("doc_values", true); + b.endObject(); + }) + ); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertTrue(mapper.fieldType().hasDocValues()); + assertFalse(mapper.fieldType().isIndexed()); + assertFalse(mapper.fieldType().hasDocValuesSparseIndex()); + } + + public void testFieldTypeDefault_IndexedFalseDocValuesFalse() throws IOException { + final MapperService mapperService = createMapperService( + Settings.builder() + .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) + .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "host.name") + .build(), + mapping(b -> { + b.startObject("host.name"); + b.field("type", "keyword"); + b.field("index", false); + b.field("doc_values", false); + b.endObject(); + }) + ); + + final KeywordFieldMapper mapper = (KeywordFieldMapper) mapperService.documentMapper().mappers().getMapper("host.name"); + assertFalse(mapper.fieldType().hasDocValues()); + assertFalse(mapper.fieldType().isIndexed()); + assertFalse(mapper.fieldType().hasDocValuesSparseIndex()); + } } From ceb0dc76683778c2cc29a876ed967c37f785e92f Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Wed, 29 Jan 2025 17:12:29 +0100 Subject: [PATCH 216/383] Update docker.elastic.co/wolfi/chainguard-base:latest Docker digest to bd40170 (#121030) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .../main/java/org/elasticsearch/gradle/internal/DockerBase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 24ba0740cfe26..59723823e9258 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -22,7 +22,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:dd66beec64a7f9b19c6c35a1195153b2b630a55e16ec71949ed5187c5947eea1", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:bd401704a162a7937cd1015f755ca9da9aba0fdf967fc6bf90bf8d3f6b2eb557", "-wolfi", "apk" ), From a52c26a6b491c28b820981b01f333f3f89e0f4ce Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 29 Jan 2025 16:20:50 +0000 Subject: [PATCH 217/383] [ML] Change the auditor to write via an alias (#120064) Changes the ml and transform auditor classes to write through an alias. The alias is required to rollover the index which required for upgrades --- docs/changelog/120064.yaml | 5 + muted-tests.yml | 2 + .../common/notifications/AbstractAuditor.java | 150 ++--- .../ml/notifications/NotificationsIndex.java | 6 +- .../xpack/core/ml/utils/MlIndexAndAlias.java | 42 +- .../TransformInternalIndexConstants.java | 1 + .../notifications/AbstractAuditorTests.java | 234 +++++-- .../core/ml/utils/MlIndexAndAliasTests.java | 7 +- .../ml/notifications_index_template.json | 2 +- .../ml/integration/AnnotationIndexIT.java | 8 +- .../AutodetectResultProcessorIT.java | 7 +- .../ChunkedTrainedModelPersisterIT.java | 15 +- .../integration/DataFrameAnalyticsCRUDIT.java | 8 +- .../DataFrameAnalyticsConfigProviderIT.java | 8 +- .../ml/integration/JobResultsProviderIT.java | 2 +- .../ml/integration/NotificationsIndexIT.java | 82 +++ .../xpack/ml/MachineLearning.java | 89 ++- .../xpack/ml/MlIndexTemplateRegistry.java | 3 +- .../TransportDeleteExpiredDataAction.java | 5 +- .../inference/ingest/InferenceProcessor.java | 11 +- .../upgrader/SnapshotUpgradeTaskExecutor.java | 4 +- .../task/OpenJobPersistentTasksExecutor.java | 4 +- .../ml/notifications/AbstractMlAuditor.java | 46 +- .../AnomalyDetectionAuditor.java | 10 +- .../DataFrameAnalyticsAuditor.java | 10 +- .../ml/notifications/InferenceAuditor.java | 10 +- .../xpack/ml/notifications/SystemAuditor.java | 6 +- ...sportGetTrainedModelsStatsActionTests.java | 9 +- .../InferenceProcessorFactoryTests.java | 630 +++++++++--------- .../OpenJobPersistentTasksExecutorTests.java | 3 +- .../auditor/NotificationsIndexIT.java | 72 ++ .../xpack/transform/Transform.java | 14 +- .../notifications/TransformAuditor.java | 57 +- .../action/TransformUpdaterTests.java | 4 +- .../notifications/MockTransformAuditor.java | 7 +- .../upgrades/MlMappingsUpgradeIT.java | 23 + 36 files changed, 1015 insertions(+), 581 deletions(-) create mode 100644 docs/changelog/120064.yaml create mode 100644 x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/NotificationsIndexIT.java create mode 100644 x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/auditor/NotificationsIndexIT.java diff --git a/docs/changelog/120064.yaml b/docs/changelog/120064.yaml new file mode 100644 index 0000000000000..8874b2ffe9588 --- /dev/null +++ b/docs/changelog/120064.yaml @@ -0,0 +1,5 @@ +pr: 120064 +summary: Change the auditor to write via an alias +area: Machine Learning +type: upgrade +issues: [] diff --git a/muted-tests.yml b/muted-tests.yml index 499ea074fa697..b835dd5bc53fd 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -314,6 +314,8 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testGetUsersWithProfileUidWhenProfileIndexDoesNotExists issue: https://github.com/elastic/elasticsearch/issues/121179 +- class: org.elasticsearch.xpack.ml.integration.PyTorchModelIT + issue: https://github.com/elastic/elasticsearch/issues/121165 - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testSetEnabled issue: https://github.com/elastic/elasticsearch/issues/121183 diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java index d02fb85f46b1e..f5c9f48454073 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java @@ -8,23 +8,20 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.internal.OriginSettingClient; -import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.ml.utils.MlIndexAndAlias; -import org.elasticsearch.xpack.core.template.IndexTemplateConfig; import java.io.IOException; import java.util.Date; @@ -32,7 +29,6 @@ import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Supplier; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -43,59 +39,36 @@ public abstract class AbstractAuditor { private static final Logger logger = LogManager.getLogger(AbstractAuditor.class); static final int MAX_BUFFER_SIZE = 1000; - static final TimeValue MASTER_TIMEOUT = TimeValue.timeValueMinutes(1); + protected static final TimeValue MASTER_TIMEOUT = TimeValue.timeValueMinutes(1); private final OriginSettingClient client; private final String nodeName; - private final String auditIndex; - private final String templateName; - private final Supplier templateSupplier; + private final String auditIndexWriteAlias; private final AbstractAuditMessageFactory messageFactory; - private final AtomicBoolean hasLatestTemplate; - - private Queue backlog; private final ClusterService clusterService; - private final AtomicBoolean putTemplateInProgress; - - protected AbstractAuditor( - OriginSettingClient client, - String auditIndex, - IndexTemplateConfig templateConfig, - String nodeName, - AbstractAuditMessageFactory messageFactory, - ClusterService clusterService - ) { + private final IndexNameExpressionResolver indexNameExpressionResolver; + private final AtomicBoolean indexAndAliasCreated; - this(client, auditIndex, templateConfig.getTemplateName(), () -> { - try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, templateConfig.loadBytes())) { - return new TransportPutComposableIndexTemplateAction.Request(templateConfig.getTemplateName()).indexTemplate( - ComposableIndexTemplate.parse(parser) - ).masterNodeTimeout(MASTER_TIMEOUT); - } catch (IOException e) { - throw new ElasticsearchParseException("unable to parse composable template " + templateConfig.getTemplateName(), e); - } - }, nodeName, messageFactory, clusterService); - } + private Queue backlog; + private final AtomicBoolean indexAndAliasCreationInProgress; protected AbstractAuditor( OriginSettingClient client, - String auditIndex, - String templateName, - Supplier templateSupplier, + String auditIndexWriteAlias, String nodeName, AbstractAuditMessageFactory messageFactory, - ClusterService clusterService + ClusterService clusterService, + IndexNameExpressionResolver indexNameExpressionResolver ) { this.client = Objects.requireNonNull(client); - this.auditIndex = Objects.requireNonNull(auditIndex); - this.templateName = Objects.requireNonNull(templateName); - this.templateSupplier = Objects.requireNonNull(templateSupplier); + this.auditIndexWriteAlias = Objects.requireNonNull(auditIndexWriteAlias); this.messageFactory = Objects.requireNonNull(messageFactory); - this.clusterService = Objects.requireNonNull(clusterService); this.nodeName = Objects.requireNonNull(nodeName); + this.clusterService = Objects.requireNonNull(clusterService); + this.indexNameExpressionResolver = Objects.requireNonNull(indexNameExpressionResolver); this.backlog = new ConcurrentLinkedQueue<>(); - this.hasLatestTemplate = new AtomicBoolean(); - this.putTemplateInProgress = new AtomicBoolean(); + this.indexAndAliasCreated = new AtomicBoolean(); + this.indexAndAliasCreationInProgress = new AtomicBoolean(); } public void audit(Level level, String resourceId, String message) { @@ -114,6 +87,19 @@ public void error(String resourceId, String message) { audit(Level.ERROR, resourceId, message); } + /** + * Calling reset will cause the auditor to check the required + * index and alias exist and recreate if necessary + */ + public void reset() { + indexAndAliasCreated.set(false); + if (backlog == null) { + // create a new backlog in case documents need + // to be temporarily stored when the new index/alias is created + backlog = new ConcurrentLinkedQueue<>(); + } + } + private static void onIndexResponse(DocWriteResponse response) { logger.trace("Successfully wrote audit message"); } @@ -123,35 +109,24 @@ private static void onIndexFailure(Exception exception) { } protected void indexDoc(ToXContent toXContent) { - if (hasLatestTemplate.get()) { + if (indexAndAliasCreated.get()) { writeDoc(toXContent); return; } - if (MlIndexAndAlias.hasIndexTemplate(clusterService.state(), templateName)) { + // install template & create index with alias + var createListener = ActionListener.wrap(success -> { + indexAndAliasCreationInProgress.set(false); synchronized (this) { - // synchronized so nothing can be added to backlog while this value changes - hasLatestTemplate.set(true); + // synchronized so nothing can be added to backlog while writing it + indexAndAliasCreated.set(true); + writeBacklog(); } - writeDoc(toXContent); - return; - } - ActionListener putTemplateListener = ActionListener.wrap(r -> { - synchronized (this) { - // synchronized so nothing can be added to backlog while this value changes - hasLatestTemplate.set(true); - } - logger.info("Auditor template [{}] successfully installed", templateName); - putTemplateInProgress.set(false); - writeBacklog(); - }, e -> { - logger.warn(Strings.format("Error putting latest template [%s]", templateName), e); - putTemplateInProgress.set(false); - }); + }, e -> { indexAndAliasCreationInProgress.set(false); }); synchronized (this) { - if (hasLatestTemplate.get() == false) { + if (indexAndAliasCreated.get() == false) { // synchronized so that hasLatestTemplate does not change value // between the read and adding to the backlog assert backlog != null; @@ -165,19 +140,11 @@ protected void indexDoc(ToXContent toXContent) { } // stop multiple invocations - if (putTemplateInProgress.compareAndSet(false, true)) { - MlIndexAndAlias.installIndexTemplateIfRequired( - clusterService.state(), - client, - templateSupplier.get(), - putTemplateListener - ); + if (indexAndAliasCreationInProgress.compareAndSet(false, true)) { + installTemplateAndCreateIndex(createListener); } - return; } } - - indexDoc(toXContent); } private void writeDoc(ToXContent toXContent) { @@ -185,9 +152,10 @@ private void writeDoc(ToXContent toXContent) { } private IndexRequest indexRequest(ToXContent toXContent) { - IndexRequest indexRequest = new IndexRequest(auditIndex); + IndexRequest indexRequest = new IndexRequest(auditIndexWriteAlias); indexRequest.source(toXContentBuilder(toXContent)); indexRequest.timeout(TimeValue.timeValueSeconds(5)); + indexRequest.setRequireAlias(true); return indexRequest; } @@ -206,7 +174,7 @@ protected void clearBacklog() { protected void writeBacklog() { assert backlog != null; if (backlog == null) { - logger.error("Message back log has already been written"); + logger.debug("Message back log has already been written"); return; } @@ -221,7 +189,7 @@ protected void writeBacklog() { if (bulkItemResponses.hasFailures()) { logger.warn("Failures bulk indexing the message back log: {}", bulkItemResponses.buildFailureMessage()); } else { - logger.trace("Successfully wrote audit message backlog after upgrading template"); + logger.trace("Successfully wrote audit message backlog"); } backlog = null; }, AbstractAuditor::onIndexFailure)); @@ -231,4 +199,32 @@ protected void writeBacklog() { int backLogSize() { return backlog.size(); } + + private void installTemplateAndCreateIndex(ActionListener listener) { + SubscribableListener.newForked(l -> { + MlIndexAndAlias.installIndexTemplateIfRequired(clusterService.state(), client, templateVersion(), putTemplateRequest(), l); + }).andThen((l, success) -> { + var indexDetails = indexDetails(); + MlIndexAndAlias.createIndexAndAliasIfNecessary( + client, + clusterService.state(), + indexNameExpressionResolver, + indexDetails.indexPrefix(), + indexDetails.indexVersion(), + auditIndexWriteAlias, + MASTER_TIMEOUT, + ActiveShardCount.DEFAULT, + l + ); + + }).addListener(listener); + } + + protected abstract TransportPutComposableIndexTemplateAction.Request putTemplateRequest(); + + protected abstract int templateVersion(); + + protected abstract IndexDetails indexDetails(); + + public record IndexDetails(String indexPrefix, String indexVersion) {}; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java index 1f5c15a46fc4e..059c217cbf1fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java @@ -13,11 +13,15 @@ public final class NotificationsIndex { - public static final String NOTIFICATIONS_INDEX = ".ml-notifications-000002"; + public static final String NOTIFICATIONS_INDEX_PREFIX = ".ml-notifications-"; + public static final String NOTIFICATIONS_INDEX_VERSION = "000002"; + public static final String NOTIFICATIONS_INDEX = NOTIFICATIONS_INDEX_PREFIX + NOTIFICATIONS_INDEX_VERSION; + public static final String NOTIFICATIONS_INDEX_WRITE_ALIAS = ".ml-notifications-write"; private static final String RESOURCE_PATH = "/ml/"; private static final String MAPPINGS_VERSION_VARIABLE = "xpack.ml.version"; public static final int NOTIFICATIONS_INDEX_MAPPINGS_VERSION = 1; + public static final int NOTIFICATIONS_INDEX_TEMPLATE_VERSION = 1; private NotificationsIndex() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java index 06b2cfbad0105..22f17428ac141 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java @@ -107,6 +107,34 @@ public static void createIndexAndAliasIfNecessary( ActiveShardCount waitForShardCount, ActionListener finalListener ) { + createIndexAndAliasIfNecessary( + client, + clusterState, + resolver, + indexPatternPrefix, + FIRST_INDEX_SIX_DIGIT_SUFFIX, + alias, + masterNodeTimeout, + waitForShardCount, + finalListener + ); + } + + /** + * Same as createIndexAndAliasIfNecessary but with the first concrete + * index number specified. + */ + public static void createIndexAndAliasIfNecessary( + Client client, + ClusterState clusterState, + IndexNameExpressionResolver resolver, + String indexPatternPrefix, + String indexNumber, + String alias, + TimeValue masterNodeTimeout, + ActiveShardCount waitForShardCount, + ActionListener finalListener + ) { final ActionListener loggingListener = ActionListener.wrap(finalListener::onResponse, e -> { logger.error(() -> format("Failed to create alias and index with pattern [%s] and alias [%s]", indexPatternPrefix, alias), e); @@ -125,7 +153,7 @@ public static void createIndexAndAliasIfNecessary( String legacyIndexWithoutSuffix = indexPatternPrefix; String indexPattern = indexPatternPrefix + "*"; // The initial index name must be suitable for rollover functionality. - String firstConcreteIndex = indexPatternPrefix + FIRST_INDEX_SIX_DIGIT_SUFFIX; + String firstConcreteIndex = indexPatternPrefix + indexNumber; String[] concreteIndexNames = resolver.concreteIndexNames(clusterState, IndicesOptions.lenientExpandHidden(), indexPattern); Optional indexPointedByCurrentWriteAlias = clusterState.getMetadata().hasAlias(alias) ? clusterState.getMetadata().getIndicesLookup().get(alias).getIndices().stream().map(Index::getName).findFirst() @@ -330,7 +358,7 @@ public static void installIndexTemplateIfRequired( String templateName = templateConfig.getTemplateName(); // The check for existence of the template is against the cluster state, so very cheap - if (hasIndexTemplate(clusterState, templateName)) { + if (hasIndexTemplate(clusterState, templateName, templateConfig.getVersion())) { listener.onResponse(true); return; } @@ -344,7 +372,7 @@ public static void installIndexTemplateIfRequired( throw new ElasticsearchParseException("unable to parse composable template " + templateConfig.getTemplateName(), e); } - installIndexTemplateIfRequired(clusterState, client, request, listener); + installIndexTemplateIfRequired(clusterState, client, templateConfig.getVersion(), request, listener); } /** @@ -360,11 +388,12 @@ public static void installIndexTemplateIfRequired( public static void installIndexTemplateIfRequired( ClusterState clusterState, Client client, + int templateVersion, TransportPutComposableIndexTemplateAction.Request templateRequest, ActionListener listener ) { // The check for existence of the template is against the cluster state, so very cheap - if (hasIndexTemplate(clusterState, templateRequest.name())) { + if (hasIndexTemplate(clusterState, templateRequest.name(), templateVersion)) { listener.onResponse(true); return; } @@ -379,8 +408,9 @@ public static void installIndexTemplateIfRequired( executeAsyncWithOrigin(client, ML_ORIGIN, TransportPutComposableIndexTemplateAction.TYPE, templateRequest, innerListener); } - public static boolean hasIndexTemplate(ClusterState state, String templateName) { - return state.getMetadata().templatesV2().containsKey(templateName); + public static boolean hasIndexTemplate(ClusterState state, String templateName, long version) { + var template = state.getMetadata().templatesV2().get(templateName); + return template != null && Long.valueOf(version).equals(template.version()); } public static boolean has6DigitSuffix(String indexName) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java index 8439c9cd76fad..652ae7f2d0593 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java @@ -48,6 +48,7 @@ public final class TransformInternalIndexConstants { public static final String AUDIT_INDEX_PATTERN_DEPRECATED = TRANSFORM_PREFIX_DEPRECATED + "notifications-*"; public static final String AUDIT_INDEX_READ_ALIAS = TRANSFORM_PREFIX + "notifications-read"; + public static final String AUDIT_INDEX_WRITE_ALIAS = TRANSFORM_PREFIX + "notifications-write"; public static final String AUDIT_INDEX = AUDIT_INDEX_PREFIX + AUDIT_TEMPLATE_VERSION; private TransformInternalIndexConstants() {} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java index 1f28afbbc75b7..bcf777906bb7c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java @@ -6,7 +6,14 @@ */ package org.elasticsearch.xpack.core.common.notifications; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; +import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.TransportBulkAction; @@ -17,8 +24,11 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.IndicesAdminClient; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; @@ -26,6 +36,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -33,7 +45,9 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.core.template.IndexTemplateConfig; import org.junit.After; @@ -51,6 +65,8 @@ import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -65,12 +81,16 @@ public class AbstractAuditorTests extends ESTestCase { private static final String TEST_NODE_NAME = "node_1"; private static final String TEST_ORIGIN = "test_origin"; - private static final String TEST_INDEX = "test_index"; + private static final String TEST_INDEX_PREFIX = "test_index"; + private static final String TEST_INDEX_VERSION = "-000001"; + private static final String TEST_INDEX = TEST_INDEX_PREFIX + TEST_INDEX_VERSION; + private static final String TEST_INDEX_ALIAS = "test_index_write"; private static final int TEST_TEMPLATE_VERSION = 23456789; private Client client; private ArgumentCaptor indexRequestCaptor; + private ArgumentCaptor bulkRequestCaptor; private long startMillis; private ThreadPool threadPool; @@ -83,6 +103,7 @@ public void setUpMocks() { when(mockPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); indexRequestCaptor = ArgumentCaptor.forClass(IndexRequest.class); + bulkRequestCaptor = ArgumentCaptor.forClass(BulkRequest.class); startMillis = System.currentTimeMillis(); threadPool = new TestThreadPool(getClass().getName()); @@ -97,53 +118,58 @@ public void shutdownThreadPool() { public void testInfo() throws IOException { AbstractAuditor auditor = createTestAuditorWithTemplateInstalled(); auditor.info("foo", "Here is my info"); - - verify(client).execute(eq(TransportIndexAction.TYPE), indexRequestCaptor.capture(), any()); - IndexRequest indexRequest = indexRequestCaptor.getValue(); - assertThat(indexRequest.indices(), arrayContaining(TEST_INDEX)); - assertThat(indexRequest.timeout(), equalTo(TimeValue.timeValueSeconds(5))); - AbstractAuditMessageTests.TestAuditMessage auditMessage = parseAuditMessage(indexRequest.source()); - assertThat(auditMessage.getResourceId(), equalTo("foo")); - assertThat(auditMessage.getMessage(), equalTo("Here is my info")); - assertThat(auditMessage.getLevel(), equalTo(Level.INFO)); - assertThat( - auditMessage.getTimestamp().getTime(), - allOf(greaterThanOrEqualTo(startMillis), lessThanOrEqualTo(System.currentTimeMillis())) - ); - assertThat(auditMessage.getNodeName(), equalTo(TEST_NODE_NAME)); + // The first audit is written as a bulk request from the backlog + // once the template & alias checks have passed + verifyBulkIndexed("foo", "Here is my info", Level.INFO); + // Subsequent messages are indexed directly + auditor.info("foo", "This message is indexed directly because the write alias exists"); + verifyMessageIndexed("foo", "This message is indexed directly because the write alias exists", Level.INFO); } public void testWarning() throws IOException { AbstractAuditor auditor = createTestAuditorWithTemplateInstalled(); auditor.warning("bar", "Here is my warning"); - - verify(client).execute(eq(TransportIndexAction.TYPE), indexRequestCaptor.capture(), any()); - IndexRequest indexRequest = indexRequestCaptor.getValue(); - assertThat(indexRequest.indices(), arrayContaining(TEST_INDEX)); - assertThat(indexRequest.timeout(), equalTo(TimeValue.timeValueSeconds(5))); - AbstractAuditMessageTests.TestAuditMessage auditMessage = parseAuditMessage(indexRequest.source()); - assertThat(auditMessage.getResourceId(), equalTo("bar")); - assertThat(auditMessage.getMessage(), equalTo("Here is my warning")); - assertThat(auditMessage.getLevel(), equalTo(Level.WARNING)); - assertThat( - auditMessage.getTimestamp().getTime(), - allOf(greaterThanOrEqualTo(startMillis), lessThanOrEqualTo(System.currentTimeMillis())) - ); - assertThat(auditMessage.getNodeName(), equalTo(TEST_NODE_NAME)); + // The first audit is written as a bulk request from the backlog + // once the template & alias checks have passed + verifyBulkIndexed("bar", "Here is my warning", Level.WARNING); + // Subsequent messages are indexed directly + auditor.warning("bar", "This message is indexed directly because the write alias exists"); + verifyMessageIndexed("bar", "This message is indexed directly because the write alias exists", Level.WARNING); } public void testError() throws IOException { AbstractAuditor auditor = createTestAuditorWithTemplateInstalled(); auditor.error("foobar", "Here is my error"); + // The first audit is written as a bulk request from the backlog + // once the template & alias checks have passed + verifyBulkIndexed("foobar", "Here is my error", Level.ERROR); + // Subsequent messages are indexed directly + auditor.error("foobar", "This message is indexed directly because the write alias exists"); + verifyMessageIndexed("foobar", "This message is indexed directly because the write alias exists", Level.ERROR); + } + public void testAudit() throws IOException { + Level level = randomFrom(Level.ERROR, Level.INFO, Level.WARNING); + + AbstractAuditor auditor = createTestAuditorWithTemplateInstalled(); + auditor.audit(level, "r_id", "Here is my audit"); + // The first audit is written as a bulk request from the backlog + // once the template & alias checks have passed + verifyBulkIndexed("r_id", "Here is my audit", level); + // Subsequent messages are indexed directly + auditor.audit(level, "r_id", "This message is indexed directly because the write alias exists"); + verifyMessageIndexed("r_id", "This message is indexed directly because the write alias exists", level); + } + + private void verifyMessageIndexed(String resourceId, String message, Level level) throws IOException { verify(client).execute(eq(TransportIndexAction.TYPE), indexRequestCaptor.capture(), any()); IndexRequest indexRequest = indexRequestCaptor.getValue(); - assertThat(indexRequest.indices(), arrayContaining(TEST_INDEX)); + assertThat(indexRequest.indices(), arrayContaining(TEST_INDEX_ALIAS)); assertThat(indexRequest.timeout(), equalTo(TimeValue.timeValueSeconds(5))); AbstractAuditMessageTests.TestAuditMessage auditMessage = parseAuditMessage(indexRequest.source()); - assertThat(auditMessage.getResourceId(), equalTo("foobar")); - assertThat(auditMessage.getMessage(), equalTo("Here is my error")); - assertThat(auditMessage.getLevel(), equalTo(Level.ERROR)); + assertThat(auditMessage.getResourceId(), equalTo(resourceId)); + assertThat(auditMessage.getMessage(), equalTo(message)); + assertThat(auditMessage.getLevel(), equalTo(level)); assertThat( auditMessage.getTimestamp().getTime(), allOf(greaterThanOrEqualTo(startMillis), lessThanOrEqualTo(System.currentTimeMillis())) @@ -151,19 +177,18 @@ public void testError() throws IOException { assertThat(auditMessage.getNodeName(), equalTo(TEST_NODE_NAME)); } - public void testAudit() throws IOException { - Level level = randomFrom(Level.ERROR, Level.INFO, Level.WARNING); - - AbstractAuditor auditor = createTestAuditorWithTemplateInstalled(); - auditor.audit(level, "r_id", "Here is my audit"); - - verify(client).execute(eq(TransportIndexAction.TYPE), indexRequestCaptor.capture(), any()); - IndexRequest indexRequest = indexRequestCaptor.getValue(); - assertThat(indexRequest.indices(), arrayContaining(TEST_INDEX)); - assertThat(indexRequest.timeout(), equalTo(TimeValue.timeValueSeconds(5))); + private void verifyBulkIndexed(String resourceId, String message, Level level) throws IOException { + verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); + BulkRequest bulkRequest = bulkRequestCaptor.getValue(); + assertThat(bulkRequest.numberOfActions(), is(1)); + assertThat(bulkRequest.timeout(), equalTo(TimeValue.timeValueSeconds(60))); + var firstBulk = bulkRequest.requests().get(0); + assertThat(firstBulk.index(), is(TEST_INDEX_ALIAS)); + assertThat(firstBulk, instanceOf(IndexRequest.class)); + var indexRequest = (IndexRequest) firstBulk; AbstractAuditMessageTests.TestAuditMessage auditMessage = parseAuditMessage(indexRequest.source()); - assertThat(auditMessage.getResourceId(), equalTo("r_id")); - assertThat(auditMessage.getMessage(), equalTo("Here is my audit")); + assertThat(auditMessage.getResourceId(), equalTo(resourceId)); + assertThat(auditMessage.getMessage(), equalTo(message)); assertThat(auditMessage.getLevel(), equalTo(level)); assertThat( auditMessage.getTimestamp().getTime(), @@ -172,6 +197,14 @@ public void testAudit() throws IOException { assertThat(auditMessage.getNodeName(), equalTo(TEST_NODE_NAME)); } + public void testAuditWithMissingAlias() throws IOException { + AbstractAuditor auditor = createTestAuditorWithTemplateAndIndexButNoAlias(); + auditor.info("foobar", "Add the alias first"); + verify(client).execute(eq(TransportIndicesAliasesAction.TYPE), any(), any()); + + verifyBulkIndexed("foobar", "Add the alias first", Level.INFO); + } + public void testAuditingBeforeTemplateInstalled() throws Exception { CountDownLatch writeSomeDocsBeforeTemplateLatch = new CountDownLatch(1); AbstractAuditor auditor = createTestAuditorWithoutTemplate( @@ -186,6 +219,9 @@ public void testAuditingBeforeTemplateInstalled() throws Exception { // fire the put template response writeSomeDocsBeforeTemplateLatch.countDown(); + assertBusy(() -> verify(client, times(1)).execute(eq(TransportPutComposableIndexTemplateAction.TYPE), any(), any())); + assertBusy(() -> verify(client, times(1)).execute(eq(TransportCreateIndexAction.TYPE), any(), any())); + // the back log will be written some point later ArgumentCaptor bulkCaptor = ArgumentCaptor.forClass(BulkRequest.class); assertBusy(() -> verify(client, times(1)).execute(eq(TransportBulkAction.TYPE), bulkCaptor.capture(), any())); @@ -226,17 +262,42 @@ private static AbstractAuditMessageTests.TestAuditMessage parseAuditMessage(Byte } private TestAuditor createTestAuditorWithTemplateInstalled() { - Map templates = Map.of(TEST_INDEX, mock(IndexTemplateMetadata.class)); - Map templatesV2 = Collections.singletonMap(TEST_INDEX, mock(ComposableIndexTemplate.class)); - Metadata metadata = mock(Metadata.class); - when(metadata.getTemplates()).thenReturn(templates); - when(metadata.templatesV2()).thenReturn(templatesV2); - ClusterState state = mock(ClusterState.class); - when(state.getMetadata()).thenReturn(metadata); + return new TestAuditor(client, TEST_NODE_NAME, mockClusterServiceAndIndexState(true)); + } + + @SuppressWarnings("unchecked") + private TestAuditor createTestAuditorWithTemplateAndIndexButNoAlias() { + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(mock(IndicesAliasesResponse.class)); + return null; + }).when(client).execute(eq(TransportIndicesAliasesAction.TYPE), any(), any()); + + return new TestAuditor(client, TEST_NODE_NAME, mockClusterServiceAndIndexState(false)); + } + + private ClusterService mockClusterServiceAndIndexState(boolean includeAlias) { + Map templates = Map.of(TEST_INDEX_PREFIX, mock(IndexTemplateMetadata.class)); + var template = mock(ComposableIndexTemplate.class); + when(template.version()).thenReturn((long) TEST_TEMPLATE_VERSION); + Map templatesV2 = Collections.singletonMap(TEST_INDEX_PREFIX, template); + + var indexMeta = Map.of(TEST_INDEX, createIndexMetadata(TEST_INDEX, includeAlias)); + Metadata metadata = Metadata.builder().indices(indexMeta).templates(templates).indexTemplates(templatesV2).build(); + + ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + ClusterService clusterService = mock(ClusterService.class); when(clusterService.state()).thenReturn(state); + return clusterService; + } - return new TestAuditor(client, TEST_NODE_NAME, clusterService); + private static IndexMetadata createIndexMetadata(String indexName, boolean withAlias) { + IndexMetadata.Builder builder = IndexMetadata.builder(indexName).settings(indexSettings(IndexVersion.current(), 1, 0)); + if (withAlias) { + builder.putAlias(AliasMetadata.builder(TEST_INDEX_ALIAS).build()); + } + return builder.build(); } @SuppressWarnings("unchecked") @@ -264,15 +325,24 @@ private TestAuditor createTestAuditorWithoutTemplate(CountDownLatch latch) { return null; }).when(client).execute(eq(TransportPutComposableIndexTemplateAction.TYPE), any(), any()); + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(new CreateIndexResponse(true, true, "foo")); + return null; + }).when(client).execute(eq(TransportCreateIndexAction.TYPE), any(), any()); + + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(new ClusterHealthResponse()); + return null; + }).when(client).execute(eq(TransportClusterHealthAction.TYPE), any(), any()); + IndicesAdminClient indicesAdminClient = mock(IndicesAdminClient.class); AdminClient adminClient = mock(AdminClient.class); when(adminClient.indices()).thenReturn(indicesAdminClient); when(client.admin()).thenReturn(adminClient); - Metadata metadata = mock(Metadata.class); - when(metadata.getTemplates()).thenReturn(Map.of()); - ClusterState state = mock(ClusterState.class); - when(state.getMetadata()).thenReturn(metadata); + ClusterState state = ClusterState.builder(ClusterName.DEFAULT).build(); ClusterService clusterService = mock(ClusterService.class); when(clusterService.state()).thenReturn(state); @@ -284,23 +354,45 @@ public static class TestAuditor extends AbstractAuditor patterns) { - return ComposableIndexTemplate.builder().indexPatterns(patterns).build(); + private static ComposableIndexTemplate createComposableIndexTemplateMetaData(String templateName, List patterns, long version) { + return ComposableIndexTemplate.builder().indexPatterns(patterns).version(version).build(); } private static IndexMetadata createIndexMetadata(String indexName, boolean withAlias) { diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ml/notifications_index_template.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/notifications_index_template.json index bf3cf302f0170..38ab2621b9316 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ml/notifications_index_template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ml/notifications_index_template.json @@ -2,7 +2,7 @@ "priority" : 2147483647, "version" : ${xpack.ml.version.id}, "index_patterns" : [ - ".ml-notifications-000002" + ".ml-notifications-*" ], "template" : { "settings" : { diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java index b163036e94760..f7cfdc7502e5e 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.SetResetModeActionRequest; @@ -363,7 +364,12 @@ private void createReindexedIndex(String reindexedIndexName) { } private void createNotification(boolean includeNodeInfo) { - AnomalyDetectionAuditor auditor = new AnomalyDetectionAuditor(client(), getInstanceFromNode(ClusterService.class), includeNodeInfo); + AnomalyDetectionAuditor auditor = new AnomalyDetectionAuditor( + client(), + getInstanceFromNode(ClusterService.class), + TestIndexNameExpressionResolver.newInstance(), + includeNodeInfo + ); auditor.info("whatever", "blah"); } } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java index 68e17ae15c549..5d06cfe0cd951 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java @@ -148,7 +148,12 @@ public void createComponents() throws Exception { .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueSeconds(1)); // We can't change the signature of createComponents to e.g. pass differing values of includeNodeInfo to pass to the // AnomalyDetectionAuditor constructor. Instead we generate a random boolean value for that purpose. - AnomalyDetectionAuditor auditor = new AnomalyDetectionAuditor(client(), getInstanceFromNode(ClusterService.class), randomBoolean()); + AnomalyDetectionAuditor auditor = new AnomalyDetectionAuditor( + client(), + getInstanceFromNode(ClusterService.class), + TestIndexNameExpressionResolver.newInstance(), + randomBoolean() + ); jobResultsProvider = new JobResultsProvider(client(), builder.build(), TestIndexNameExpressionResolver.newInstance()); renormalizer = mock(Renormalizer.class); process = mock(AutodetectProcess.class); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java index 8c9c527382106..35bc424f67aff 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.Tuple; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.license.License; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.MlConfigVersion; @@ -95,7 +96,12 @@ public void testStoreModelViaChunkedPersisterWithNodeInfo() throws IOException { ChunkedTrainedModelPersister persister = new ChunkedTrainedModelPersister( trainedModelProvider, analyticsConfig, - new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class), true), + new DataFrameAnalyticsAuditor( + client(), + getInstanceFromNode(ClusterService.class), + TestIndexNameExpressionResolver.newInstance(), + true + ), (ex) -> { throw new ElasticsearchException(ex); }, @@ -167,7 +173,12 @@ public void testStoreModelViaChunkedPersisterWithoutNodeInfo() throws IOExceptio ChunkedTrainedModelPersister persister = new ChunkedTrainedModelPersister( trainedModelProvider, analyticsConfig, - new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class), false), + new DataFrameAnalyticsAuditor( + client(), + getInstanceFromNode(ClusterService.class), + TestIndexNameExpressionResolver.newInstance(), + false + ), (ex) -> { throw new ElasticsearchException(ex); }, diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java index a8e97263647ea..f194d77d3836d 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.action.DeleteDataFrameAnalyticsAction; @@ -41,7 +42,12 @@ public void createComponents() throws Exception { configProvider = new DataFrameAnalyticsConfigProvider( client(), xContentRegistry(), - new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class), randomBoolean()), + new DataFrameAnalyticsAuditor( + client(), + getInstanceFromNode(ClusterService.class), + TestIndexNameExpressionResolver.newInstance(), + randomBoolean() + ), getInstanceFromNode(ClusterService.class) ); waitForMlTemplates(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java index e29cd4545846c..ff92e06385252 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.MlConfigVersion; @@ -58,7 +59,12 @@ public void createComponents() throws Exception { xContentRegistry(), // We can't change the signature of createComponents to e.g. pass differing values of includeNodeInfo to pass to the // DataFrameAnalyticsAuditor constructor. Instead we generate a random boolean value for that purpose. - new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class), randomBoolean()), + new DataFrameAnalyticsAuditor( + client(), + getInstanceFromNode(ClusterService.class), + TestIndexNameExpressionResolver.newInstance(), + randomBoolean() + ), getInstanceFromNode(ClusterService.class) ); dummyAuthenticationHeader = Authentication.newRealmAuthentication( diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java index 7ce0fbe760644..a650556b0501e 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java @@ -140,7 +140,7 @@ public void createComponents() throws Exception { // We can't change the signature of createComponents to e.g. pass differing values of includeNodeInfo to pass to the // AnomalyDetectionAuditor constructor. Instead we generate a random boolean value for that purpose. boolean includeNodeInfo = randomBoolean(); - auditor = new AnomalyDetectionAuditor(client(), clusterService, includeNodeInfo); + auditor = new AnomalyDetectionAuditor(client(), clusterService, TestIndexNameExpressionResolver.newInstance(), includeNodeInfo); waitForMlTemplates(); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/NotificationsIndexIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/NotificationsIndexIT.java new file mode 100644 index 0000000000000..9887e69dafde1 --- /dev/null +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/NotificationsIndexIT.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.integration; + +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.metadata.AliasMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; +import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; +import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.is; + +public class NotificationsIndexIT extends MlSingleNodeTestCase { + + @Override + protected Settings nodeSettings() { + Settings.Builder newSettings = Settings.builder(); + newSettings.put(super.nodeSettings()); + newSettings.put(XPackSettings.SECURITY_ENABLED.getKey(), false); + newSettings.put(XPackSettings.WATCHER_ENABLED.getKey(), false); + return newSettings.build(); + } + + public void testAliasCreated() throws Exception { + // Auditing a notification should create the .ml-notifications-000002 index + // and write alias + createNotification(true); + + assertBusy(() -> { + assertNotificationsIndexExists(); + assertNotificationsWriteAliasCreated(); + }); + } + + private void assertNotificationsIndexExists() { + GetIndexResponse getIndexResponse = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) + .setIndices(NotificationsIndex.NOTIFICATIONS_INDEX) + .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) + .get(); + assertThat(Arrays.asList(getIndexResponse.getIndices()), contains(NotificationsIndex.NOTIFICATIONS_INDEX)); + } + + private void assertNotificationsWriteAliasCreated() { + Map> aliases = indicesAdmin().prepareGetAliases( + TimeValue.timeValueSeconds(10L), + NotificationsIndex.NOTIFICATIONS_INDEX_WRITE_ALIAS + ).setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN).get().getAliases(); + assertThat(aliases.size(), is(1)); + List indexAliases = aliases.get(NotificationsIndex.NOTIFICATIONS_INDEX); + assertNotNull(aliases.toString(), indexAliases); + assertThat(indexAliases.size(), is(1)); + var writeAlias = indexAliases.get(0); + assertThat(writeAlias.alias(), is(NotificationsIndex.NOTIFICATIONS_INDEX_WRITE_ALIAS)); + assertThat("notification write alias should be hidden but is not: " + aliases, writeAlias.isHidden(), is(true)); + } + + private void createNotification(boolean includeNodeInfo) { + AnomalyDetectionAuditor auditor = new AnomalyDetectionAuditor( + client(), + getInstanceFromNode(ClusterService.class), + TestIndexNameExpressionResolver.newInstance(), + includeNodeInfo + ); + auditor.info("whatever", "blah"); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 01127c97ba90c..4220c52b374f2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -568,7 +568,7 @@ public Map getProcessors(Processor.Parameters paramet parameters.client, parameters.ingestService.getClusterService(), this.settings, - machineLearningExtension.get().includeNodeInfo() + inferenceAuditor ); parameters.ingestService.addIngestClusterStateListener(inferenceFactory); return Map.of(InferenceProcessor.TYPE, inferenceFactory); @@ -765,6 +765,8 @@ public void loadExtensions(ExtensionLoader loader) { private final SetOnce datafeedRunner = new SetOnce<>(); private final SetOnce dataFrameAnalyticsManager = new SetOnce<>(); private final SetOnce dataFrameAnalyticsAuditor = new SetOnce<>(); + private final SetOnce anomalyDetectionAuditor = new SetOnce<>(); + private final SetOnce inferenceAuditor = new SetOnce<>(); private final SetOnce memoryTracker = new SetOnce<>(); private final SetOnce mlUpgradeModeActionFilter = new SetOnce<>(); private final SetOnce mlLifeCycleService = new SetOnce<>(); @@ -944,15 +946,24 @@ public Collection createComponents(PluginServices services) { AnomalyDetectionAuditor anomalyDetectionAuditor = new AnomalyDetectionAuditor( client, clusterService, + indexNameExpressionResolver, machineLearningExtension.get().includeNodeInfo() ); + this.anomalyDetectionAuditor.set(anomalyDetectionAuditor); DataFrameAnalyticsAuditor dataFrameAnalyticsAuditor = new DataFrameAnalyticsAuditor( client, clusterService, + indexNameExpressionResolver, + machineLearningExtension.get().includeNodeInfo() + ); + InferenceAuditor inferenceAuditor = new InferenceAuditor( + client, + clusterService, + indexNameExpressionResolver, machineLearningExtension.get().includeNodeInfo() ); - InferenceAuditor inferenceAuditor = new InferenceAuditor(client, clusterService, machineLearningExtension.get().includeNodeInfo()); - SystemAuditor systemAuditor = new SystemAuditor(client, clusterService); + this.inferenceAuditor.set(inferenceAuditor); + SystemAuditor systemAuditor = new SystemAuditor(client, clusterService, indexNameExpressionResolver); this.dataFrameAnalyticsAuditor.set(dataFrameAnalyticsAuditor); OriginSettingClient originSettingClient = new OriginSettingClient(client, ML_ORIGIN); @@ -1233,10 +1244,6 @@ public Collection createComponents(PluginServices services) { ), new MlIndexRollover.IndexPatternAndAlias(MlStatsIndex.indexPattern(), MlStatsIndex.writeAlias()), new MlIndexRollover.IndexPatternAndAlias(AnnotationIndex.INDEX_PATTERN, AnnotationIndex.WRITE_ALIAS_NAME) - // TODO notifications = https://github.com/elastic/elasticsearch/pull/120064 - // TODO anomaly results - // TODO .ml-inference-XXXXXX - requires alias - // TODO .ml-inference-native-XXXXXX - requires alias (index added in 8.0) ), indexNameExpressionResolver, client @@ -1373,7 +1380,7 @@ public List> getPersistentTasksExecutor( client, expressionResolver, getLicenseState(), - machineLearningExtension.get().includeNodeInfo() + anomalyDetectionAuditor.get() ), new TransportStartDatafeedAction.StartDatafeedPersistentTasksExecutor(datafeedRunner.get(), expressionResolver, threadPool), new TransportStartDataFrameAnalyticsAction.TaskExecutor( @@ -1394,7 +1401,7 @@ public List> getPersistentTasksExecutor( expressionResolver, client, getLicenseState(), - machineLearningExtension.get().includeNodeInfo() + anomalyDetectionAuditor.get() ) ); } @@ -2105,35 +2112,33 @@ public void cleanUpFeature( final Map results = new ConcurrentHashMap<>(); - ActionListener unsetResetModeListener = ActionListener.wrap( - success -> client.execute( + ActionListener unsetResetModeListener = ActionListener.wrap(success -> { + // reset the auditors as aliases used may be removed + resetAuditors(); + + client.execute(SetResetModeAction.INSTANCE, SetResetModeActionRequest.disabled(true), ActionListener.wrap(resetSuccess -> { + finalListener.onResponse(success); + logger.info("Finished machine learning feature reset"); + }, resetFailure -> { + logger.error("failed to disable reset mode after state otherwise successful machine learning reset", resetFailure); + finalListener.onFailure( + ExceptionsHelper.serverError( + "failed to disable reset mode after state otherwise successful machine learning reset", + resetFailure + ) + ); + })); + }, failure -> { + logger.error("failed to reset machine learning", failure); + client.execute( SetResetModeAction.INSTANCE, - SetResetModeActionRequest.disabled(true), - ActionListener.wrap(resetSuccess -> { - finalListener.onResponse(success); - logger.info("Finished machine learning feature reset"); - }, resetFailure -> { - logger.error("failed to disable reset mode after state otherwise successful machine learning reset", resetFailure); - finalListener.onFailure( - ExceptionsHelper.serverError( - "failed to disable reset mode after state otherwise successful machine learning reset", - resetFailure - ) - ); + SetResetModeActionRequest.disabled(false), + ActionListener.wrap(resetSuccess -> finalListener.onFailure(failure), resetFailure -> { + logger.error("failed to disable reset mode after state clean up failure", resetFailure); + finalListener.onFailure(failure); }) - ), - failure -> { - logger.error("failed to reset machine learning", failure); - client.execute( - SetResetModeAction.INSTANCE, - SetResetModeActionRequest.disabled(false), - ActionListener.wrap(resetSuccess -> finalListener.onFailure(failure), resetFailure -> { - logger.error("failed to disable reset mode after state clean up failure", resetFailure); - finalListener.onFailure(failure); - }) - ); - } - ); + ); + }); // Stop all model deployments ActionListener pipelineValidation = unsetResetModeListener.delegateFailureAndWrap( @@ -2286,6 +2291,18 @@ public void cleanUpFeature( client.execute(SetResetModeAction.INSTANCE, SetResetModeActionRequest.enabled(), afterResetModeSet); } + private void resetAuditors() { + if (anomalyDetectionAuditor.get() != null) { + anomalyDetectionAuditor.get().reset(); + } + if (dataFrameAnalyticsAuditor.get() != null) { + dataFrameAnalyticsAuditor.get().reset(); + } + if (inferenceAuditor.get() != null) { + inferenceAuditor.get().reset(); + } + } + @Override public BreakerSettings getCircuitBreaker(Settings settingsToUse) { return BreakerSettings.updateFromSettings( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java index 91e738bf2183b..02fcc2b4465f3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java @@ -43,7 +43,8 @@ public class MlIndexTemplateRegistry extends IndexTemplateRegistry { * 10000001: TODO - reason */ public static final int ML_INDEX_TEMPLATE_VERSION = 10000000 + AnomalyDetectorsIndex.RESULTS_INDEX_MAPPINGS_VERSION - + NotificationsIndex.NOTIFICATIONS_INDEX_MAPPINGS_VERSION + MlStatsIndex.STATS_INDEX_MAPPINGS_VERSION; + + NotificationsIndex.NOTIFICATIONS_INDEX_MAPPINGS_VERSION + MlStatsIndex.STATS_INDEX_MAPPINGS_VERSION + + NotificationsIndex.NOTIFICATIONS_INDEX_TEMPLATE_VERSION; private static final String ROOT_RESOURCE_PATH = "/ml/"; private static final String ANOMALY_DETECTION_PATH = ROOT_RESOURCE_PATH + "anomalydetection/"; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java index 919f0a526b8ae..e49901ea9976b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java @@ -134,10 +134,9 @@ protected void doExecute( TaskId taskId = new TaskId(clusterService.localNode().getId(), task.getId()); BooleanSupplier isTimedOutSupplier = () -> Instant.now(clock).isAfter(timeoutTime); - AnomalyDetectionAuditor anomalyDetectionAuditor = new AnomalyDetectionAuditor(client, clusterService, auditor.includeNodeInfo()); if (Strings.isNullOrEmpty(request.getJobId()) || Strings.isAllOrWildcard(request.getJobId())) { - List dataRemovers = createDataRemovers(client, taskId, anomalyDetectionAuditor); + List dataRemovers = createDataRemovers(client, taskId, auditor); threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) .execute(ActionRunnable.wrap(listener, l -> deleteExpiredData(request, dataRemovers, l, isTimedOutSupplier))); } else { @@ -152,7 +151,7 @@ protected void doExecute( List jobs = jobBuilders.stream().map(Job.Builder::build).collect(Collectors.toList()); String[] jobIds = jobs.stream().map(Job::getId).toArray(String[]::new); request.setExpandedJobIds(jobIds); - List dataRemovers = createDataRemovers(jobs, taskId, anomalyDetectionAuditor); + List dataRemovers = createDataRemovers(jobs, taskId, auditor); deleteExpiredData(request, dataRemovers, l, isTimedOutSupplier); })) ) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java index 6b14e60c00247..e50d67e068756 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java @@ -8,6 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; @@ -386,15 +387,15 @@ public static final class Factory implements Processor.Factory, Consumer auditor; private volatile ClusterState clusterState = ClusterState.EMPTY_STATE; private volatile int maxIngestProcessors; private volatile MlConfigVersion minNodeVersion = MlConfigVersion.CURRENT; - public Factory(Client client, ClusterService clusterService, Settings settings, boolean includeNodeInfo) { + public Factory(Client client, ClusterService clusterService, Settings settings, SetOnce auditor) { this.client = client; this.maxIngestProcessors = MAX_INFERENCE_PROCESSORS.get(settings); - this.auditor = new InferenceAuditor(client, clusterService, includeNodeInfo); + this.auditor = auditor; clusterService.getClusterSettings().addSettingsUpdateConsumer(MAX_INFERENCE_PROCESSORS, this::setMaxIngestProcessors); } @@ -481,7 +482,7 @@ public InferenceProcessor create( return fromInputFieldConfiguration( client, - auditor, + auditor.get(), tag, description, modelId, @@ -509,7 +510,7 @@ public InferenceProcessor create( } return fromTargetFieldConfiguration( client, - auditor, + auditor.get(), tag, description, targetField, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java index cc3f8f0dd1e67..42f722e330a19 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java @@ -69,7 +69,7 @@ public SnapshotUpgradeTaskExecutor( IndexNameExpressionResolver expressionResolver, Client client, XPackLicenseState licenseState, - boolean includeNodeInfo + AnomalyDetectionAuditor auditor ) { super( MlTasks.JOB_SNAPSHOT_UPGRADE_TASK_NAME, @@ -80,7 +80,7 @@ public SnapshotUpgradeTaskExecutor( expressionResolver ); this.autodetectProcessManager = autodetectProcessManager; - this.auditor = new AnomalyDetectionAuditor(client, clusterService, includeNodeInfo); + this.auditor = auditor; this.jobResultsProvider = new JobResultsProvider(client, settings, expressionResolver); this.client = client; this.licenseState = licenseState; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java index 9c37ebc0abfd8..b2acff5d1b199 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java @@ -115,14 +115,14 @@ public OpenJobPersistentTasksExecutor( Client client, IndexNameExpressionResolver expressionResolver, XPackLicenseState licenseState, - boolean includeNodeInfo + AnomalyDetectionAuditor auditor ) { super(MlTasks.JOB_TASK_NAME, MachineLearning.UTILITY_THREAD_POOL_NAME, settings, clusterService, memoryTracker, expressionResolver); this.autodetectProcessManager = Objects.requireNonNull(autodetectProcessManager); this.datafeedConfigProvider = Objects.requireNonNull(datafeedConfigProvider); this.client = Objects.requireNonNull(client); this.jobResultsProvider = new JobResultsProvider(client, settings, expressionResolver); - this.auditor = new AnomalyDetectionAuditor(client, clusterService, includeNodeInfo); + this.auditor = auditor; this.licenseState = licenseState; clusterService.addListener(event -> clusterState = event.state()); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AbstractMlAuditor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AbstractMlAuditor.java index 31e33bcd3f62e..213d3851b3b98 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AbstractMlAuditor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AbstractMlAuditor.java @@ -9,10 +9,16 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditMessage; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditMessageFactory; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditor; @@ -20,6 +26,8 @@ import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.ml.MlIndexTemplateRegistry; +import java.io.IOException; + import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; abstract class AbstractMlAuditor extends AbstractAuditor { @@ -27,14 +35,19 @@ abstract class AbstractMlAuditor extends Abstrac private static final Logger logger = LogManager.getLogger(AbstractMlAuditor.class); private volatile boolean isResetMode; - protected AbstractMlAuditor(Client client, AbstractAuditMessageFactory messageFactory, ClusterService clusterService) { + protected AbstractMlAuditor( + Client client, + AbstractAuditMessageFactory messageFactory, + ClusterService clusterService, + IndexNameExpressionResolver indexNameExpressionResolver + ) { super( new OriginSettingClient(client, ML_ORIGIN), - NotificationsIndex.NOTIFICATIONS_INDEX, - MlIndexTemplateRegistry.NOTIFICATIONS_TEMPLATE, + NotificationsIndex.NOTIFICATIONS_INDEX_WRITE_ALIAS, clusterService.getNodeName(), messageFactory, - clusterService + clusterService, + indexNameExpressionResolver ); clusterService.addListener(event -> { if (event.metadataChanged()) { @@ -65,4 +78,29 @@ protected void writeBacklog() { super.writeBacklog(); } } + + @Override + protected TransportPutComposableIndexTemplateAction.Request putTemplateRequest() { + var templateConfig = MlIndexTemplateRegistry.NOTIFICATIONS_TEMPLATE; + try ( + var parser = JsonXContent.jsonXContent.createParser( + XContentParserConfiguration.EMPTY, + MlIndexTemplateRegistry.NOTIFICATIONS_TEMPLATE.loadBytes() + ) + ) { + return new TransportPutComposableIndexTemplateAction.Request(templateConfig.getTemplateName()).indexTemplate( + ComposableIndexTemplate.parse(parser) + ).masterNodeTimeout(MASTER_TIMEOUT); + } catch (IOException e) { + throw new ElasticsearchParseException("unable to parse composable template " + templateConfig.getTemplateName(), e); + } + } + + protected int templateVersion() { + return MlIndexTemplateRegistry.NOTIFICATIONS_TEMPLATE.getVersion(); + } + + protected IndexDetails indexDetails() { + return new IndexDetails(NotificationsIndex.NOTIFICATIONS_INDEX_PREFIX, NotificationsIndex.NOTIFICATIONS_INDEX_VERSION); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AnomalyDetectionAuditor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AnomalyDetectionAuditor.java index 5c47f92c3df11..87b6ee9444e05 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AnomalyDetectionAuditor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/AnomalyDetectionAuditor.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.notifications; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.xpack.core.ml.notifications.AnomalyDetectionAuditMessage; @@ -14,8 +15,13 @@ public class AnomalyDetectionAuditor extends AbstractMlAuditor { private final boolean includeNodeInfo; - public InferenceAuditor(Client client, ClusterService clusterService, boolean includeNodeInfo) { - super(client, InferenceAuditMessage::new, clusterService); + public InferenceAuditor( + Client client, + ClusterService clusterService, + IndexNameExpressionResolver indexNameExpressionResolver, + boolean includeNodeInfo + ) { + super(client, InferenceAuditMessage::new, clusterService, indexNameExpressionResolver); this.includeNodeInfo = includeNodeInfo; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/SystemAuditor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/SystemAuditor.java index 8cfc445e592a0..4618a03af26c2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/SystemAuditor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/SystemAuditor.java @@ -8,16 +8,18 @@ package org.elasticsearch.xpack.ml.notifications; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.xpack.core.ml.notifications.SystemAuditMessage; public class SystemAuditor extends AbstractMlAuditor { - public SystemAuditor(Client client, ClusterService clusterService) { + public SystemAuditor(Client client, ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver) { super( client, (resourceId, message, level, timestamp, nodeName) -> new SystemAuditMessage(message, level, timestamp, nodeName), - clusterService + clusterService, + indexNameExpressionResolver ); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java index bb973bf4359e8..105911d8e4bfd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.ml.action; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.bulk.FailureStoreMetrics; @@ -31,6 +32,7 @@ import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; +import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; import org.junit.Before; import java.time.Instant; @@ -95,7 +97,12 @@ public Map getProcessors(Processor.Parameters paramet when(licenseState.isAllowed(MachineLearningField.ML_API_FEATURE)).thenReturn(true); factoryMap.put( InferenceProcessor.TYPE, - new InferenceProcessor.Factory(parameters.client, parameters.ingestService.getClusterService(), Settings.EMPTY, true) + new InferenceProcessor.Factory( + parameters.client, + parameters.ingestService.getClusterService(), + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ) ); factoryMap.put("not_inference", new NotInferenceProcessor.Factory()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java index 637a9f73cbcbb..7ffddc9721bdf 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.ml.inference.ingest; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchStatusException; @@ -59,6 +60,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ZeroShotClassificationConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ZeroShotClassificationConfigUpdate; import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; import org.junit.Before; import java.io.IOException; @@ -109,279 +111,263 @@ public void setUpVariables() { } public void testCreateProcessorWithTooManyExisting() { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.builder().put(InferenceProcessor.MAX_INFERENCE_PROCESSORS.getKey(), 1).build(), - includeNodeInfo - ); + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.builder().put(InferenceProcessor.MAX_INFERENCE_PROCESSORS.getKey(), 1).build(), + new SetOnce<>(mock(InferenceAuditor.class)) + ); - try { - processorFactory.accept(buildClusterStateWithModelReferences("model1")); - } catch (IOException ioe) { - throw new AssertionError(ioe.getMessage()); - } + try { + processorFactory.accept(buildClusterStateWithModelReferences("model1")); + } catch (IOException ioe) { + throw new AssertionError(ioe.getMessage()); + } - ElasticsearchStatusException ex = expectThrows( - ElasticsearchStatusException.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, Collections.emptyMap()) - ); + ElasticsearchStatusException ex = expectThrows( + ElasticsearchStatusException.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, Collections.emptyMap()) + ); - assertThat( - ex.getMessage(), - equalTo( - "Max number of inference processors reached, total inference processors [1]. " - + "Adjust the setting [xpack.ml.max_inference_processors]: [1] if a greater number is desired." - ) - ); - }); + assertThat( + ex.getMessage(), + equalTo( + "Max number of inference processors reached, total inference processors [1]. " + + "Adjust the setting [xpack.ml.max_inference_processors]: [1] if a greater number is desired." + ) + ); } public void testCreateProcessorWithInvalidInferenceConfig() { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo - ); + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); - Map config = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - put(InferenceProcessor.INFERENCE_CONFIG, Collections.singletonMap("unknown_type", Collections.emptyMap())); - } - }; + Map config = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + put(InferenceProcessor.INFERENCE_CONFIG, Collections.singletonMap("unknown_type", Collections.emptyMap())); + } + }; - ElasticsearchStatusException ex = expectThrows( - ElasticsearchStatusException.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, config) - ); - assertThat( - ex.getMessage(), - equalTo( - "unrecognized inference configuration type [unknown_type]." - + " Supported types [classification, regression, fill_mask, ner, pass_through, " - + "question_answering, text_classification, text_embedding, text_expansion, " - + "text_similarity, zero_shot_classification]" - ) - ); + ElasticsearchStatusException ex = expectThrows( + ElasticsearchStatusException.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, config) + ); + assertThat( + ex.getMessage(), + equalTo( + "unrecognized inference configuration type [unknown_type]." + + " Supported types [classification, regression, fill_mask, ner, pass_through, " + + "question_answering, text_classification, text_embedding, text_expansion, " + + "text_similarity, zero_shot_classification]" + ) + ); - Map config2 = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - put(InferenceProcessor.INFERENCE_CONFIG, Collections.singletonMap("regression", "boom")); - } - }; - ex = expectThrows( - ElasticsearchStatusException.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, config2) - ); - assertThat(ex.getMessage(), equalTo("inference_config must be an object with one inference type mapped to an object.")); + Map config2 = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + put(InferenceProcessor.INFERENCE_CONFIG, Collections.singletonMap("regression", "boom")); + } + }; + ex = expectThrows( + ElasticsearchStatusException.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, config2) + ); + assertThat(ex.getMessage(), equalTo("inference_config must be an object with one inference type mapped to an object.")); - Map config3 = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - put(InferenceProcessor.INFERENCE_CONFIG, Collections.emptyMap()); - } - }; - ex = expectThrows( - ElasticsearchStatusException.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, config3) - ); - assertThat(ex.getMessage(), equalTo("inference_config must be an object with one inference type mapped to an object.")); - }); + Map config3 = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + put(InferenceProcessor.INFERENCE_CONFIG, Collections.emptyMap()); + } + }; + ex = expectThrows( + ElasticsearchStatusException.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, config3) + ); + assertThat(ex.getMessage(), equalTo("inference_config must be an object with one inference type mapped to an object.")); } public void testCreateProcessorWithTooOldMinNodeVersion() { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo - ); - try { - processorFactory.accept(builderClusterStateWithModelReferences(MlConfigVersion.V_7_5_0, "model1")); - } catch (IOException ioe) { - throw new AssertionError(ioe.getMessage()); + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); + try { + processorFactory.accept(builderClusterStateWithModelReferences(MlConfigVersion.V_7_5_0, "model1")); + } catch (IOException ioe) { + throw new AssertionError(ioe.getMessage()); + } + Map regression = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + put( + InferenceProcessor.INFERENCE_CONFIG, + Collections.singletonMap(RegressionConfig.NAME.getPreferredName(), Collections.emptyMap()) + ); } - Map regression = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - put( - InferenceProcessor.INFERENCE_CONFIG, - Collections.singletonMap(RegressionConfig.NAME.getPreferredName(), Collections.emptyMap()) - ); - } - }; + }; + + ElasticsearchException ex = expectThrows( + ElasticsearchException.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression) + ); + assertThat( + ex.getMessage(), + equalTo("Configuration [regression] requires minimum node version [7.6.0] (current minimum node version [7.5.0]") + ); + + Map classification = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + put( + InferenceProcessor.INFERENCE_CONFIG, + Collections.singletonMap( + ClassificationConfig.NAME.getPreferredName(), + Collections.singletonMap(ClassificationConfig.NUM_TOP_CLASSES.getPreferredName(), 1) + ) + ); + } + }; + + ex = expectThrows( + ElasticsearchException.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, classification) + ); + assertThat( + ex.getMessage(), + equalTo("Configuration [classification] requires minimum node version [7.6.0] (current minimum node version [7.5.0]") + ); + } + public void testCreateProcessorWithTooOldMinNodeVersionNlp() throws IOException { + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); + try { + processorFactory.accept(builderClusterStateWithModelReferences(MlConfigVersion.V_7_5_0, "model1")); + } catch (IOException ioe) { + throw new AssertionError(ioe.getMessage()); + } + + for (String name : List.of( + FillMaskConfig.NAME, + NerConfig.NAME, + PassThroughConfig.NAME, + QuestionAnsweringConfig.NAME, + TextClassificationConfig.NAME, + TextEmbeddingConfig.NAME, + TextExpansionConfigUpdate.NAME, + TextSimilarityConfig.NAME, + ZeroShotClassificationConfig.NAME + )) { ElasticsearchException ex = expectThrows( ElasticsearchException.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression) + () -> processorFactory.inferenceConfigUpdateFromMap(Map.of(name, Map.of())) ); assertThat( ex.getMessage(), - equalTo("Configuration [regression] requires minimum node version [7.6.0] (current minimum node version [7.5.0]") + equalTo("Configuration [" + name + "] requires minimum node version [8.0.0] (current minimum node version [7.5.0]") ); + } - Map classification = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - put( - InferenceProcessor.INFERENCE_CONFIG, - Collections.singletonMap( - ClassificationConfig.NAME.getPreferredName(), - Collections.singletonMap(ClassificationConfig.NUM_TOP_CLASSES.getPreferredName(), 1) - ) - ); - } - }; - - ex = expectThrows( + for (String name : List.of(ClassificationConfig.NAME.getPreferredName(), RegressionConfig.NAME.getPreferredName())) { + ElasticsearchException ex = expectThrows( ElasticsearchException.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, classification) + () -> processorFactory.inferenceConfigUpdateFromMap(Map.of(name, Map.of())) ); assertThat( ex.getMessage(), - equalTo("Configuration [classification] requires minimum node version [7.6.0] (current minimum node version [7.5.0]") + equalTo("Configuration [" + name + "] requires minimum node version [7.6.0] (current minimum node version [7.5.0]") ); - }); + } } - public void testCreateProcessorWithTooOldMinNodeVersionNlp() throws IOException { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo - ); - try { - processorFactory.accept(builderClusterStateWithModelReferences(MlConfigVersion.V_7_5_0, "model1")); - } catch (IOException ioe) { - throw new AssertionError(ioe.getMessage()); - } + public void testCreateProcessor() { + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); - for (String name : List.of( - FillMaskConfig.NAME, - NerConfig.NAME, - PassThroughConfig.NAME, - QuestionAnsweringConfig.NAME, - TextClassificationConfig.NAME, - TextEmbeddingConfig.NAME, - TextExpansionConfigUpdate.NAME, - TextSimilarityConfig.NAME, - ZeroShotClassificationConfig.NAME - )) { - ElasticsearchException ex = expectThrows( - ElasticsearchException.class, - () -> processorFactory.inferenceConfigUpdateFromMap(Map.of(name, Map.of())) - ); - assertThat( - ex.getMessage(), - equalTo("Configuration [" + name + "] requires minimum node version [8.0.0] (current minimum node version [7.5.0]") + Map regression = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + put( + InferenceProcessor.INFERENCE_CONFIG, + Collections.singletonMap(RegressionConfig.NAME.getPreferredName(), Collections.emptyMap()) ); } + }; - for (String name : List.of(ClassificationConfig.NAME.getPreferredName(), RegressionConfig.NAME.getPreferredName())) { - ElasticsearchException ex = expectThrows( - ElasticsearchException.class, - () -> processorFactory.inferenceConfigUpdateFromMap(Map.of(name, Map.of())) - ); - assertThat( - ex.getMessage(), - equalTo("Configuration [" + name + "] requires minimum node version [7.6.0] (current minimum node version [7.5.0]") + var processor = processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression); + assertFalse(processor.isConfiguredWithInputsFields()); + assertEquals("my_model", processor.getModelId()); + assertEquals("result", processor.getTargetField()); + assertThat(processor.getFieldMap().entrySet(), empty()); + assertNull(processor.getInputs()); + + Map classification = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + put( + InferenceProcessor.INFERENCE_CONFIG, + Collections.singletonMap( + ClassificationConfig.NAME.getPreferredName(), + Collections.singletonMap(ClassificationConfig.NUM_TOP_CLASSES.getPreferredName(), 1) + ) ); } - }); - } - - public void testCreateProcessor() { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo - ); - - Map regression = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - put( - InferenceProcessor.INFERENCE_CONFIG, - Collections.singletonMap(RegressionConfig.NAME.getPreferredName(), Collections.emptyMap()) - ); - } - }; - - var processor = processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression); - assertEquals(includeNodeInfo, processor.getAuditor().includeNodeInfo()); - assertFalse(processor.isConfiguredWithInputsFields()); - assertEquals("my_model", processor.getModelId()); - assertEquals("result", processor.getTargetField()); - assertThat(processor.getFieldMap().entrySet(), empty()); - assertNull(processor.getInputs()); - - Map classification = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - put( - InferenceProcessor.INFERENCE_CONFIG, - Collections.singletonMap( - ClassificationConfig.NAME.getPreferredName(), - Collections.singletonMap(ClassificationConfig.NUM_TOP_CLASSES.getPreferredName(), 1) - ) - ); - } - }; + }; - processor = processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, classification); - assertFalse(processor.isConfiguredWithInputsFields()); + processor = processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, classification); + assertFalse(processor.isConfiguredWithInputsFields()); - Map mininmal = new HashMap<>() { - { - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - } - }; + Map mininmal = new HashMap<>() { + { + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "result"); + } + }; - processor = processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, mininmal); - assertFalse(processor.isConfiguredWithInputsFields()); - assertEquals("my_model", processor.getModelId()); - assertEquals("result", processor.getTargetField()); - assertNull(processor.getInputs()); - }); + processor = processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, mininmal); + assertFalse(processor.isConfiguredWithInputsFields()); + assertEquals("my_model", processor.getModelId()); + assertEquals("result", processor.getTargetField()); + assertNull(processor.getInputs()); } public void testCreateProcessorWithFieldMap() { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory(client, clusterService, Settings.EMPTY, false); + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); Map config = new HashMap<>() { { @@ -406,7 +392,12 @@ public void testCreateProcessorWithFieldMap() { } public void testCreateProcessorWithInputOutputs() { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory(client, clusterService, Settings.EMPTY, false); + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); Map config = new HashMap<>(); config.put(InferenceProcessor.MODEL_ID, "my_model"); @@ -436,101 +427,90 @@ public void testCreateProcessorWithInputOutputs() { } public void testCreateProcessorWithDuplicateFields() { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo - ); + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); - Map regression = new HashMap<>() { - { - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "ml"); - put( - InferenceProcessor.INFERENCE_CONFIG, - Collections.singletonMap( - RegressionConfig.NAME.getPreferredName(), - Collections.singletonMap(RegressionConfig.RESULTS_FIELD.getPreferredName(), "warning") - ) - ); - } - }; + Map regression = new HashMap<>() { + { + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.TARGET_FIELD, "ml"); + put( + InferenceProcessor.INFERENCE_CONFIG, + Collections.singletonMap( + RegressionConfig.NAME.getPreferredName(), + Collections.singletonMap(RegressionConfig.RESULTS_FIELD.getPreferredName(), "warning") + ) + ); + } + }; - Exception ex = expectThrows( - Exception.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression) - ); - assertThat(ex.getMessage(), equalTo("Invalid inference config. " + "More than one field is configured as [warning]")); - }); + Exception ex = expectThrows( + Exception.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression) + ); + assertThat(ex.getMessage(), equalTo("Invalid inference config. " + "More than one field is configured as [warning]")); } public void testCreateProcessorWithIgnoreMissing() { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo - ); + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); - Map regression = new HashMap<>() { - { - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); - put("ignore_missing", Boolean.TRUE); - put( - InferenceProcessor.INFERENCE_CONFIG, - Collections.singletonMap( - RegressionConfig.NAME.getPreferredName(), - Collections.singletonMap(RegressionConfig.RESULTS_FIELD.getPreferredName(), "warning") - ) - ); - } - }; + Map regression = new HashMap<>() { + { + put(InferenceProcessor.MODEL_ID, "my_model"); + put(InferenceProcessor.FIELD_MAP, Collections.emptyMap()); + put("ignore_missing", Boolean.TRUE); + put( + InferenceProcessor.INFERENCE_CONFIG, + Collections.singletonMap( + RegressionConfig.NAME.getPreferredName(), + Collections.singletonMap(RegressionConfig.RESULTS_FIELD.getPreferredName(), "warning") + ) + ); + } + }; - Exception ex = expectThrows( - Exception.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression) - ); - assertThat(ex.getMessage(), equalTo("Invalid inference config. " + "More than one field is configured as [warning]")); - }); + Exception ex = expectThrows( + Exception.class, + () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, regression) + ); + assertThat(ex.getMessage(), equalTo("Invalid inference config. " + "More than one field is configured as [warning]")); } public void testParseInferenceConfigFromMap() { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo + + InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( + client, + clusterService, + Settings.EMPTY, + new SetOnce<>(mock(InferenceAuditor.class)) + ); + for (var nameAndMap : List.of( + Tuple.tuple(ClassificationConfig.NAME.getPreferredName(), Map.of()), + Tuple.tuple(RegressionConfig.NAME.getPreferredName(), Map.of()), + Tuple.tuple(FillMaskConfig.NAME, Map.of()), + Tuple.tuple(NerConfig.NAME, Map.of()), + Tuple.tuple(PassThroughConfig.NAME, Map.of()), + Tuple.tuple(TextClassificationConfig.NAME, Map.of()), + Tuple.tuple(TextEmbeddingConfig.NAME, Map.of()), + Tuple.tuple(TextExpansionConfig.NAME, Map.of()), + Tuple.tuple(ZeroShotClassificationConfig.NAME, Map.of()), + Tuple.tuple(QuestionAnsweringConfig.NAME, Map.of("question", "What is the answer to life, the universe and everything?")) + )) { + assertThat( + processorFactory.inferenceConfigUpdateFromMap(Map.of(nameAndMap.v1(), nameAndMap.v2())).getName(), + equalTo(nameAndMap.v1()) ); - for (var nameAndMap : List.of( - Tuple.tuple(ClassificationConfig.NAME.getPreferredName(), Map.of()), - Tuple.tuple(RegressionConfig.NAME.getPreferredName(), Map.of()), - Tuple.tuple(FillMaskConfig.NAME, Map.of()), - Tuple.tuple(NerConfig.NAME, Map.of()), - Tuple.tuple(PassThroughConfig.NAME, Map.of()), - Tuple.tuple(TextClassificationConfig.NAME, Map.of()), - Tuple.tuple(TextEmbeddingConfig.NAME, Map.of()), - Tuple.tuple(TextExpansionConfig.NAME, Map.of()), - Tuple.tuple(ZeroShotClassificationConfig.NAME, Map.of()), - Tuple.tuple(QuestionAnsweringConfig.NAME, Map.of("question", "What is the answer to life, the universe and everything?")) - )) { - assertThat( - processorFactory.inferenceConfigUpdateFromMap(Map.of(nameAndMap.v1(), nameAndMap.v2())).getName(), - equalTo(nameAndMap.v1()) - ); - } - }); + } } public void testCreateProcessorWithIncompatibleTargetFieldSetting() { @@ -538,7 +518,7 @@ public void testCreateProcessorWithIncompatibleTargetFieldSetting() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); Map input = new HashMap<>() { @@ -574,7 +554,7 @@ public void testCreateProcessorWithIncompatibleResultFieldSetting() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); Map input = new HashMap<>() { @@ -616,7 +596,7 @@ public void testCreateProcessorWithInputFields() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); Map inputMap1 = new HashMap<>() { @@ -683,7 +663,7 @@ public void testCreateProcessorWithInputFieldSingleOrList() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); for (var isList : new boolean[] { true, false }) { @@ -727,7 +707,7 @@ public void testCreateProcessorWithInputFieldWrongType() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); { @@ -784,7 +764,7 @@ public void testParsingInputFields() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); int numInputs = randomIntBetween(1, 3); @@ -808,7 +788,7 @@ public void testParsingInputFieldsDuplicateFieldNames() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); int numInputs = 2; @@ -860,7 +840,7 @@ public void testParsingInputFieldsGivenNoInputs() { client, clusterService, Settings.EMPTY, - randomBoolean() + new SetOnce<>(mock(InferenceAuditor.class)) ); var e = expectThrows(ElasticsearchParseException.class, () -> processorFactory.parseInputFields("my_processor", List.of())); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java index 64251c05af7c8..d88e1235241d8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java @@ -62,6 +62,7 @@ import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.elasticsearch.xpack.ml.job.JobNodeSelector; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; +import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; import org.junit.Before; @@ -309,7 +310,7 @@ private OpenJobPersistentTasksExecutor createExecutor(Settings settings) { client, TestIndexNameExpressionResolver.newInstance(), licenseState, - true + mock(AnomalyDetectionAuditor.class) ); } } diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/auditor/NotificationsIndexIT.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/auditor/NotificationsIndexIT.java new file mode 100644 index 0000000000000..94fd24fbdfd5b --- /dev/null +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/auditor/NotificationsIndexIT.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.auditor; + +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.metadata.AliasMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.indices.TestIndexNameExpressionResolver; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; +import org.elasticsearch.xpack.transform.TransformSingleNodeTestCase; +import org.elasticsearch.xpack.transform.notifications.TransformAuditor; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.is; + +public class NotificationsIndexIT extends TransformSingleNodeTestCase { + public void testAliasCreated() throws Exception { + // Auditing a notification should create the .transform-notifications-000002 + // index and the write alias + createNotification(true); + + assertBusy(() -> { + assertNotificationsIndexExists(); + assertNotificationsWriteAliasCreated(); + }); + } + + private void assertNotificationsIndexExists() { + GetIndexResponse getIndexResponse = indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT) + .setIndices(TransformInternalIndexConstants.AUDIT_INDEX) + .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) + .get(); + assertThat(Arrays.asList(getIndexResponse.getIndices()), contains(TransformInternalIndexConstants.AUDIT_INDEX)); + } + + private void assertNotificationsWriteAliasCreated() { + Map> aliases = indicesAdmin().prepareGetAliases( + TimeValue.timeValueSeconds(10L), + TransformInternalIndexConstants.AUDIT_INDEX_WRITE_ALIAS + ).setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN).get().getAliases(); + assertThat(aliases.size(), is(1)); + List indexAliases = aliases.get(TransformInternalIndexConstants.AUDIT_INDEX); + assertNotNull(aliases.toString(), indexAliases); + assertThat(indexAliases.size(), is(1)); + var writeAlias = indexAliases.get(0); + assertThat(writeAlias.alias(), is(TransformInternalIndexConstants.AUDIT_INDEX_WRITE_ALIAS)); + assertThat("notification write alias should be hidden but is not: " + aliases, writeAlias.isHidden(), is(true)); + } + + private void createNotification(boolean includeNodeInfo) { + var clusterService = getInstanceFromNode(ClusterService.class); + TransformAuditor auditor = new TransformAuditor( + client(), + clusterService.getNodeName(), + clusterService, + TestIndexNameExpressionResolver.newInstance(), + includeNodeInfo + ); + auditor.info("whatever", "blah"); + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java index d67abd45b3092..7f4b9543698b4 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java @@ -140,6 +140,7 @@ public class Transform extends Plugin implements SystemIndexPlugin, PersistentTa private final Settings settings; private final SetOnce transformServices = new SetOnce<>(); private final SetOnce transformConfigAutoMigration = new SetOnce<>(); + private final SetOnce transformAuditor = new SetOnce<>(); private final TransformExtension transformExtension = new DefaultTransformExtension(); public static final Integer DEFAULT_INITIAL_MAX_PAGE_SEARCH_SIZE = Integer.valueOf(500); @@ -299,8 +300,10 @@ public Collection createComponents(PluginServices services) { client, clusterService.getNodeName(), clusterService, + services.indexNameExpressionResolver(), getTransformExtension().includeNodeInfo() ); + this.transformAuditor.set(auditor); Clock clock = Clock.systemUTC(); TransformCheckpointService checkpointService = new TransformCheckpointService( clock, @@ -443,8 +446,12 @@ public void cleanUpFeature( ActionListener finalListener ) { OriginSettingClient client = new OriginSettingClient(unwrappedClient, TRANSFORM_ORIGIN); - ActionListener unsetResetModeListener = ActionListener.wrap( - success -> client.execute( + ActionListener unsetResetModeListener = ActionListener.wrap(success -> { + // + if (transformAuditor.get() != null) { + transformAuditor.get().reset(); + } + client.execute( SetResetModeAction.INSTANCE, SetResetModeActionRequest.disabled(true), ActionListener.wrap(resetSuccess -> finalListener.onResponse(success), resetFailure -> { @@ -457,7 +464,8 @@ public void cleanUpFeature( ) ); }) - ), + ); + }, failure -> client.execute( SetResetModeAction.INSTANCE, SetResetModeActionRequest.disabled(false), diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java index 7a1f874da66a7..51e679ff9fe6c 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditor; @@ -23,7 +24,7 @@ import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; import java.io.IOException; -import java.util.Collections; +import java.util.List; import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN; @@ -38,28 +39,20 @@ public class TransformAuditor extends AbstractAuditor { private final boolean includeNodeInfo; - public TransformAuditor(Client client, String nodeName, ClusterService clusterService, boolean includeNodeInfo) { + public TransformAuditor( + Client client, + String nodeName, + ClusterService clusterService, + IndexNameExpressionResolver indexNameExpressionResolver, + boolean includeNodeInfo + ) { super( new OriginSettingClient(client, TRANSFORM_ORIGIN), - TransformInternalIndexConstants.AUDIT_INDEX, - TransformInternalIndexConstants.AUDIT_INDEX, - () -> { - try { - return new TransportPutComposableIndexTemplateAction.Request(TransformInternalIndexConstants.AUDIT_INDEX).indexTemplate( - ComposableIndexTemplate.builder() - .template(TransformInternalIndex.getAuditIndexTemplate()) - .version((long) TransformConfigVersion.CURRENT.id()) - .indexPatterns(Collections.singletonList(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN)) - .priority(Long.MAX_VALUE) - .build() - ); - } catch (IOException e) { - throw new ElasticsearchException("Failure creating transform notification index template request", e); - } - }, + TransformInternalIndexConstants.AUDIT_INDEX_WRITE_ALIAS, nodeName, TransformAuditMessage::new, - clusterService + clusterService, + indexNameExpressionResolver ); clusterService.addListener(event -> { if (event.metadataChanged()) { @@ -93,4 +86,30 @@ protected void writeBacklog() { super.writeBacklog(); } } + + @Override + protected TransportPutComposableIndexTemplateAction.Request putTemplateRequest() { + try { + return new TransportPutComposableIndexTemplateAction.Request(TransformInternalIndexConstants.AUDIT_INDEX).indexTemplate( + ComposableIndexTemplate.builder() + .template(TransformInternalIndex.getAuditIndexTemplate()) + .version((long) TransformConfigVersion.CURRENT.id()) + .indexPatterns(List.of(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN)) + .priority(Long.MAX_VALUE) + .build() + ); + } catch (IOException e) { + throw new ElasticsearchException("Failure creating transform notification index template request", e); + } + } + + @Override + protected int templateVersion() { + return TransformConfigVersion.CURRENT.id(); + } + + @Override + protected IndexDetails indexDetails() { + return new IndexDetails(TransformInternalIndexConstants.AUDIT_INDEX_PREFIX, TransformInternalIndexConstants.AUDIT_TEMPLATE_VERSION); + } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransformUpdaterTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransformUpdaterTests.java index fa957a2ac89cf..b9d91287ce45f 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransformUpdaterTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransformUpdaterTests.java @@ -78,7 +78,7 @@ public class TransformUpdaterTests extends ESTestCase { private TestThreadPool threadPool; private Client client; private ClusterService clusterService = mock(ClusterService.class); - private TransformAuditor auditor = new MockTransformAuditor(clusterService); + private TransformAuditor auditor = new MockTransformAuditor(clusterService, mock(IndexNameExpressionResolver.class)); private final Settings settings = Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build(); private final Settings destIndexSettings = new DefaultTransformExtension().getTransformDestinationIndexSettings(); @@ -125,7 +125,7 @@ public void setupClient() { threadPool = createThreadPool(); client = new MyMockClient(threadPool); clusterService = mock(ClusterService.class); - auditor = new MockTransformAuditor(clusterService); + auditor = new MockTransformAuditor(clusterService, mock(IndexNameExpressionResolver.class)); } @After diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/notifications/MockTransformAuditor.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/notifications/MockTransformAuditor.java index 7ae1795875db5..1dffd8c20abbf 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/notifications/MockTransformAuditor.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/notifications/MockTransformAuditor.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; @@ -51,13 +52,13 @@ public static MockTransformAuditor createMockAuditor() { ClusterService clusterService = mock(ClusterService.class); when(clusterService.state()).thenReturn(state); - return new MockTransformAuditor(clusterService); + return new MockTransformAuditor(clusterService, mock(IndexNameExpressionResolver.class)); } private final List expectations; - public MockTransformAuditor(ClusterService clusterService) { - super(mock(Client.class), MOCK_NODE_NAME, clusterService, true); + public MockTransformAuditor(ClusterService clusterService, IndexNameExpressionResolver indexNameResolver) { + super(mock(Client.class), MOCK_NODE_NAME, clusterService, indexNameResolver, true); expectations = new CopyOnWriteArrayList<>(); } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java index c8eaa9e78f2ef..cde88089d4330 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java @@ -24,8 +24,12 @@ import java.util.stream.Stream; import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public class MlMappingsUpgradeIT extends AbstractUpgradeTestCase { @@ -66,6 +70,7 @@ public void testMappingsUpgrade() throws Exception { assertUpgradedConfigMappings(); assertMlLegacyTemplatesDeleted(); IndexMappingTemplateAsserter.assertMlMappingsMatchTemplates(client()); + assertNotificationsIndexAliasCreated(); break; default: throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]"); @@ -237,4 +242,22 @@ private void assertUpgradedConfigMappings() throws Exception { ); }); } + + @SuppressWarnings("unchecked") + private void assertNotificationsIndexAliasCreated() throws Exception { + assertBusy(() -> { + Request getMappings = new Request("GET", "_alias/.ml-notifications-write"); + Response response = client().performRequest(getMappings); + Map responseMap = entityAsMap(response); + assertThat(responseMap.entrySet(), hasSize(1)); + var aliases = (Map) responseMap.get(".ml-notifications-000002"); + assertThat(aliases.entrySet(), hasSize(1)); + var allAliases = (Map) aliases.get("aliases"); + var writeAlias = (Map) allAliases.get(".ml-notifications-write"); + + assertThat(writeAlias, hasEntry("is_hidden", Boolean.TRUE)); + var isWriteIndex = (Boolean) writeAlias.get("is_write_index"); + assertThat(isWriteIndex, anyOf(is(Boolean.TRUE), nullValue())); + }); + } } From 3669e061d42d378a97653c1e884d9cd04efca96f Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Wed, 29 Jan 2025 09:44:42 -0700 Subject: [PATCH 218/383] Fix typo in docs example (#121206) --- docs/reference/esql/esql-async-query-stop-api.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/esql/esql-async-query-stop-api.asciidoc b/docs/reference/esql/esql-async-query-stop-api.asciidoc index dba5282d224ed..1eb6711ae5840 100644 --- a/docs/reference/esql/esql-async-query-stop-api.asciidoc +++ b/docs/reference/esql/esql-async-query-stop-api.asciidoc @@ -23,7 +23,7 @@ field set to `true`. [source,console] ---- -POST /query/async/FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM=/stop +POST /_query/async/FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM=/stop ---- // TEST[skip: no access to query ID] From 656d36aacfe65d00453ee6485fef1f52416804c2 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 29 Jan 2025 11:51:14 -0500 Subject: [PATCH 219/383] ESQL: Bump heap attack suite timeout (#121198) The heap attack test is growing so it is taking longer. We'll work to speed it up, but for now, let's bump the timeout. Closes #121112 --- .../elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 2e68c094492fe..f732f7cbbf00d 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -7,9 +7,12 @@ package org.elasticsearch.xpack.esql.heap_attack; +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; + import org.apache.http.HttpHost; import org.apache.http.client.config.RequestConfig; import org.apache.http.util.EntityUtils; +import org.apache.lucene.tests.util.TimeUnits; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -64,6 +67,7 @@ * Tests that run ESQL queries that use a ton of memory. We want to make * sure they don't consume the entire heap and crash Elasticsearch. */ +@TimeoutSuite(millis = 40 * TimeUnits.MINUTE) public class HeapAttackIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = Clusters.buildCluster(); From 1736062ffacb71f6aa692e23030ec06f534c4cb5 Mon Sep 17 00:00:00 2001 From: Fernando Briano Date: Wed, 29 Jan 2025 17:21:41 +0000 Subject: [PATCH 220/383] Adds SearchFlip to community clients (#110814) --- docs/community-clients/index.asciidoc | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/docs/community-clients/index.asciidoc b/docs/community-clients/index.asciidoc index ebde04b02f18a..cb8d7f58c612f 100644 --- a/docs/community-clients/index.asciidoc +++ b/docs/community-clients/index.asciidoc @@ -205,10 +205,6 @@ client]. Also see the {client}/ruby-api/current/index.html[official Elasticsearch Ruby client]. -* https://github.com/printercu/elastics-rb[elastics]: - Tiny client with built-in zero-downtime migrations and ActiveRecord integration. - **- Last commit more than a year ago** - * https://github.com/toptal/chewy[chewy]: An ODM and wrapper for the official Elasticsearch client. @@ -218,6 +214,13 @@ Also see the {client}/ruby-api/current/index.html[official Elasticsearch Ruby cl * https://github.com/artsy/estella[Estella]: Make your Ruby models searchable. +* https://github.com/mrkamel/search_flip[SearchFlip]: + Full-Featured Elasticsearch Ruby Client with a Chainable DSL. + +* https://github.com/printercu/elastics-rb[elastics]: + Tiny client with built-in zero-downtime migrations and ActiveRecord integration. + **- Last commit more than a year ago** + [[rust]] == Rust @@ -267,4 +270,4 @@ client]. * https://github.com/reactiverse/elasticsearch-client[elasticsearch-client]: An Elasticsearch client for Eclipse Vert.x - **- Last commit more than a year ago** \ No newline at end of file + **- Last commit more than a year ago** From d3f20e5b4bab713bd5700aaa7aee5c10cde5dccd Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Wed, 29 Jan 2025 12:22:40 -0500 Subject: [PATCH 221/383] Updated resolve/cluster end user docs with information about the timeout flag and no index expression endpoint (#121199) --- .../indices/resolve-cluster.asciidoc | 58 ++++++++++++++++--- 1 file changed, 50 insertions(+), 8 deletions(-) diff --git a/docs/reference/indices/resolve-cluster.asciidoc b/docs/reference/indices/resolve-cluster.asciidoc index f7d21e8c0b8ea..195cbb997adb1 100644 --- a/docs/reference/indices/resolve-cluster.asciidoc +++ b/docs/reference/indices/resolve-cluster.asciidoc @@ -11,7 +11,9 @@ For the most up-to-date API details, refer to {api-es}/group/endpoint-indices[In -- Resolves the specified index expressions to return information about -each cluster, including the local "querying" cluster, if included. +each cluster, including the local "querying" cluster, if included. If no index expression +is provided, this endpoint will return information about all the remote +clusters that are configured on the querying cluster. This endpoint is useful before doing a <> in order to determine which remote clusters should be included in a search. @@ -27,7 +29,7 @@ For each cluster in scope, information is returned about: contact the remote clusters, unlike the <> endpoint. 2. whether each remote cluster is configured with `skip_unavailable` as `true` or `false` 3. whether there are any indices, aliases or data streams on that cluster that match - the index expression + the index expression (if one provided) 4. whether the search is likely to have errors returned when you do a {ccs} (including any authorization errors if your user does not have permission to query a remote cluster or the indices on that cluster) @@ -42,6 +44,12 @@ Once the proper security permissions are obtained, then you can rely on the `con in the response to determine whether the remote cluster is available and ready for querying. ==== +NOTE: The ability to query without an index expression was added in 8.18, so when +querying remote clusters older than that, the local cluster will send the index +expression `dummy*` to those remote clusters. Thus, if an errors occur, you may see a reference +to that index expression even though you didn't request it. If it causes a problem, you can +instead include an index expression like `*:*` to bypass the issue. + //// [source,console] -------------------------------- @@ -71,6 +79,15 @@ PUT _cluster/settings // TEST[s/35.238.149.\d+:930\d+/\${transport_host}/] //// +[source,console] +---- +GET /_resolve/cluster +---- +// TEST[continued] + +Returns information about all remote clusters configured on the local cluster +without doing any index matching. + [source,console] ---- GET /_resolve/cluster/my-index-*,cluster*:my-index-* @@ -108,6 +125,15 @@ Resources on <> can be specified using the [[resolve-cluster-api-query-params]] ==== {api-query-parms-title} +`timeout`:: +(Optional, TimeValue) Specify a max wait time for remote clusters to respond. +If a remote cluster does not respond within this timeout period, the API response +will show the cluster as not connected and include an error message that the +request timed out. The default timeout is unset and the query can take +as long as the networking layer is configured to wait for remote clusters that are +not responding (typically 30 seconds). ++ + include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. @@ -126,6 +152,13 @@ ignored when frozen. Defaults to `false`. + deprecated:[7.16.0] +[TIP] +==== +The index options above are only allowed when specifying an index expression. +You will get an error if you specify index options to the _resolve/cluster API +that takes no index expression. +==== + [discrete] [[usecases-for-resolve-cluster]] @@ -137,8 +170,8 @@ necessarily reflect whether the remote cluster is available or not. The remote c be available, while the local cluster is not currently connected to it. You can use the resolve-cluster API to attempt to reconnect to remote clusters -(for example with `GET _resolve/cluster/*:*`) and -the `connected` field in the response will indicate whether it was successful or not. +(for example with `GET _resolve/cluster` or `GET _resolve/cluster/*:*` ). +The `connected` field in the response will indicate whether it was successful. If a connection was (re-)established, this will also cause the <> endpoint to now indicate a connected status. @@ -231,11 +264,12 @@ The API returns the following response: ==== Identifying potential problems with your {ccs} The following request shows several examples of how modifying your query can -prevent search failures. +prevent search failures. Note also that a `timeout` of 5 seconds is sent, which +sets the maximum time the query will wait for remote clusters to respond. [source,console] -------------------------------------------------- -GET /_resolve/cluster/not-present,clust*:my-index*,oldcluster:*?ignore_unavailable=false +GET /_resolve/cluster/not-present,clust*:my-index*,oldcluster:*?ignore_unavailable=false&timeout=5s -------------------------------------------------- // TEST[continued] // TEST[s/,oldcluster:*//] @@ -263,7 +297,12 @@ GET /_resolve/cluster/not-present,clust*:my-index*,oldcluster:*?ignore_unavailab "connected": false, <3> "skip_unavailable": false }, - "oldcluster": { <4> + "cluster_three": { + "connected": false, + "skip_unavailable": false, + "error": "Request timed out before receiving a response from the remote cluster" <4> + }, + "oldcluster": { <5> "connected": true, "skip_unavailable": false, "matching_indices": true @@ -285,7 +324,10 @@ could be closed. (You can check this by using the failed). Since this cluster is marked as `skip_unavailable=false`, you should probably exclude this cluster from the search by adding `-cluster_two:*` to the search index expression. -<4> The `oldcluster` remote cluster shows that it has matching indices, but no +<4> For `cluster_three`, the error message indicates that this remote cluster did +not respond within the 5-second timeout window specified, so it is also marked as +not connected. +<5> The `oldcluster` remote cluster shows that it has matching indices, but no version information is included. This indicates that the cluster version predates the introduction of the `_resolve/cluster` API in 8.13.0., so you may want to exclude it from your {ccs}. (Note: the endpoint was able to tell there were From ae0f1a64b571c319e33a24bc8a05a1fa1d1668b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Fern=C3=A1ndez=20Casta=C3=B1o?= Date: Wed, 29 Jan 2025 18:49:48 +0100 Subject: [PATCH 222/383] Remove INDEX_REFRESH_BLOCK after index becomes searchable (#120807) This commit enhances the ShardStartedClusterStateTaskExecutor by introducing functionality to automatically remove the INDEX_REFRESH_BLOCK once an index becomes searchable. The change ensures search availability by checking that at least one copy of each searchable shard is available whenever an unpromotable shard is started. Once this condition is met, the INDEX_REFRESH_BLOCK is removed. Closes ES-10278 --- docs/changelog/120807.yaml | 5 + .../action/shard/ShardStateAction.java | 55 ++++++++ ...dStartedClusterStateTaskExecutorTests.java | 117 ++++++++++++++++++ .../ClusterStateCreationUtils.java | 48 +++++-- 4 files changed, 217 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/120807.yaml diff --git a/docs/changelog/120807.yaml b/docs/changelog/120807.yaml new file mode 100644 index 0000000000000..02083be207846 --- /dev/null +++ b/docs/changelog/120807.yaml @@ -0,0 +1,5 @@ +pr: 120807 +summary: Remove INDEX_REFRESH_BLOCK after index becomes searchable +area: CRUD +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index ed6ca57d67b25..388baca6c1048 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -23,6 +23,8 @@ import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.NotMasterException; +import org.elasticsearch.cluster.block.ClusterBlock; +import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.coordination.FailedToCommitClusterStateException; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; @@ -70,6 +72,7 @@ import static org.apache.logging.log4j.Level.DEBUG; import static org.apache.logging.log4j.Level.ERROR; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_REFRESH_BLOCK; import static org.elasticsearch.cluster.service.MasterService.isPublishFailureException; import static org.elasticsearch.core.Strings.format; @@ -619,6 +622,7 @@ public ClusterState execute(BatchExecutionContext batchE List> tasksToBeApplied = new ArrayList<>(); List shardRoutingsToBeApplied = new ArrayList<>(batchExecutionContext.taskContexts().size()); Set seenShardRoutings = new HashSet<>(); // to prevent duplicates + Set indicesWithUnpromotableShardsStarted = null; final Map updatedTimestampRanges = new HashMap<>(); final ClusterState initialState = batchExecutionContext.initialState(); for (var taskContext : batchExecutionContext.taskContexts()) { @@ -737,6 +741,14 @@ public ClusterState execute(BatchExecutionContext batchE new ClusterStateTimeRanges(newTimestampMillisRange, newEventIngestedMillisRange) ); } + + if (matched.isPromotableToPrimary() == false + && initialState.blocks().hasIndexBlock(index.getName(), INDEX_REFRESH_BLOCK)) { + if (indicesWithUnpromotableShardsStarted == null) { + indicesWithUnpromotableShardsStarted = new HashSet<>(); + } + indicesWithUnpromotableShardsStarted.add(index); + } } } } @@ -760,7 +772,10 @@ public ClusterState execute(BatchExecutionContext batchE maybeUpdatedState = ClusterState.builder(maybeUpdatedState).metadata(metadataBuilder).build(); } + maybeUpdatedState = maybeRemoveIndexRefreshBlocks(maybeUpdatedState, indicesWithUnpromotableShardsStarted); + assert assertStartedIndicesHaveCompleteTimestampRanges(maybeUpdatedState); + assert assertRefreshBlockIsNotPresentWhenTheIndexIsSearchable(maybeUpdatedState); for (final var taskContext : tasksToBeApplied) { final var task = taskContext.getTask(); @@ -776,6 +791,36 @@ public ClusterState execute(BatchExecutionContext batchE return maybeUpdatedState; } + private static ClusterState maybeRemoveIndexRefreshBlocks( + ClusterState clusterState, + @Nullable Set indicesWithUnpromotableShardsStarted + ) { + // The provided cluster state must include the newly STARTED unpromotable shards + if (indicesWithUnpromotableShardsStarted == null) { + return clusterState; + } + + ClusterBlocks.Builder clusterBlocksBuilder = null; + for (Index indexWithUnpromotableShardsStarted : indicesWithUnpromotableShardsStarted) { + String indexName = indexWithUnpromotableShardsStarted.getName(); + assert clusterState.blocks().hasIndexBlock(indexName, INDEX_REFRESH_BLOCK) : indexWithUnpromotableShardsStarted; + + var indexRoutingTable = clusterState.routingTable().index(indexWithUnpromotableShardsStarted); + if (indexRoutingTable.readyForSearch()) { + if (clusterBlocksBuilder == null) { + clusterBlocksBuilder = ClusterBlocks.builder(clusterState.blocks()); + } + clusterBlocksBuilder.removeIndexBlock(indexName, INDEX_REFRESH_BLOCK); + } + } + + if (clusterBlocksBuilder == null) { + return clusterState; + } + + return ClusterState.builder(clusterState).blocks(clusterBlocksBuilder).build(); + } + private static boolean assertStartedIndicesHaveCompleteTimestampRanges(ClusterState clusterState) { for (Map.Entry cursor : clusterState.getRoutingTable().getIndicesRouting().entrySet()) { assert cursor.getValue().allPrimaryShardsActive() == false @@ -799,6 +844,16 @@ private static boolean assertStartedIndicesHaveCompleteTimestampRanges(ClusterSt return true; } + private static boolean assertRefreshBlockIsNotPresentWhenTheIndexIsSearchable(ClusterState clusterState) { + for (Map.Entry> indexBlock : clusterState.blocks().indices().entrySet()) { + if (indexBlock.getValue().contains(INDEX_REFRESH_BLOCK)) { + assert clusterState.routingTable().index(indexBlock.getKey()).readyForSearch() == false + : "Index [" + indexBlock.getKey() + "] is searchable but has an INDEX_REFRESH_BLOCK"; + } + } + return true; + } + @Override public void clusterStatePublished(ClusterState newClusterState) { rerouteService.reroute( diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java index ca7376a43d718..6f67898bbbdb0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java @@ -12,24 +12,31 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionTestUtils; +import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; import org.elasticsearch.cluster.action.shard.ShardStateAction.StartedShardEntry; import org.elasticsearch.cluster.action.shard.ShardStateAction.StartedShardUpdateTask; +import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.routing.AllocationId; +import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.shard.IndexLongFieldRange; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardLongFieldRange; import java.util.List; +import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -37,9 +44,11 @@ import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithActivePrimary; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithAssignedPrimariesAndReplicas; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithNoShard; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_REFRESH_BLOCK; import static org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; public class ShardStartedClusterStateTaskExecutorTests extends ESAllocationTestCase { @@ -479,6 +488,114 @@ public void testExpandsTimestampRangeForReplica() throws Exception { assertThat(latestIndexMetadata.getEventIngestedRange(), sameInstance(IndexLongFieldRange.UNKNOWN)); } + public void testIndexRefreshBlockIsClearedOnceTheIndexIsReadyToBeSearched() throws Exception { + final var indexName = "test"; + final var numberOfShards = randomIntBetween(1, 4); + final var numberOfReplicas = randomIntBetween(1, 4); + var clusterState = ClusterStateCreationUtils.stateWithAssignedPrimariesAndReplicasWithState( + new String[] { indexName }, + numberOfShards, + ShardRouting.Role.INDEX_ONLY, + IntStream.range(0, numberOfReplicas) + .mapToObj(unused -> Tuple.tuple(ShardRoutingState.UNASSIGNED, ShardRouting.Role.SEARCH_ONLY)) + .toList() + ); + + clusterState = ClusterState.builder(clusterState) + .metadata(Metadata.builder(clusterState.metadata()).put(withActiveShardsInSyncAllocationIds(clusterState, indexName))) + .blocks(ClusterBlocks.builder(clusterState.blocks()).addIndexBlock(indexName, INDEX_REFRESH_BLOCK)) + .build(); + + while (clusterState.blocks().hasIndexBlock(indexName, INDEX_REFRESH_BLOCK)) { + clusterState = maybeInitializeUnassignedReplicaShard(clusterState); + + final IndexMetadata indexMetadata = clusterState.metadata().index(indexName); + + final var initializingReplicaShardOpt = clusterState.routingTable() + .allShards() + .filter(shardRouting -> shardRouting.isPromotableToPrimary() == false) + .filter(shardRouting -> shardRouting.state().equals(ShardRoutingState.INITIALIZING)) + .findFirst(); + + assertThat(clusterState.routingTable().allShards().toList().toString(), initializingReplicaShardOpt.isPresent(), is(true)); + + var initializingReplicaShard = initializingReplicaShardOpt.get(); + + final var shardId = initializingReplicaShard.shardId(); + final var primaryTerm = indexMetadata.primaryTerm(shardId.id()); + final var replicaAllocationId = initializingReplicaShard.allocationId().getId(); + final var task = new StartedShardUpdateTask( + new StartedShardEntry( + shardId, + replicaAllocationId, + primaryTerm, + "test", + ShardLongFieldRange.UNKNOWN, + ShardLongFieldRange.UNKNOWN + ), + createTestListener() + ); + + final var resultingState = executeTasks(clusterState, List.of(task)); + assertNotSame(clusterState, resultingState); + + clusterState = resultingState; + } + + var indexRoutingTable = clusterState.routingTable().index(indexName); + assertThat(indexRoutingTable.readyForSearch(), is(true)); + for (int i = 0; i < numberOfShards; i++) { + var shardRoutingTable = indexRoutingTable.shard(i); + assertThat(shardRoutingTable, is(notNullValue())); + // Ensure that at least one unpromotable shard is either STARTED or RELOCATING + assertThat(shardRoutingTable.unpromotableShards().isEmpty(), is(false)); + } + assertThat(clusterState.blocks().hasIndexBlock(indexName, INDEX_REFRESH_BLOCK), is(false)); + } + + private static ClusterState maybeInitializeUnassignedReplicaShard(ClusterState clusterState) { + var unassignedShardRoutingOpt = clusterState.routingTable() + .allShards() + .filter(shardRouting -> shardRouting.state().equals(ShardRoutingState.UNASSIGNED)) + .findFirst(); + + if (unassignedShardRoutingOpt.isEmpty()) { + return clusterState; + } + + var unassignedShardRouting = unassignedShardRoutingOpt.get(); + var initializedShard = unassignedShardRouting.initialize(randomUUID(), null, 1); + + RoutingTable routingTable = clusterState.routingTable(); + IndexRoutingTable indexRoutingTable = routingTable.index(unassignedShardRouting.getIndexName()); + IndexRoutingTable.Builder newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex()); + for (int shardId = 0; shardId < indexRoutingTable.size(); shardId++) { + IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(shardId); + for (int copy = 0; copy < shardRoutingTable.size(); copy++) { + ShardRouting shardRouting = shardRoutingTable.shard(copy); + newIndexRoutingTable.addShard(shardRouting == unassignedShardRouting ? initializedShard : shardRouting); + } + } + routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build(); + return ClusterState.builder(clusterState).routingTable(routingTable).build(); + } + + private static IndexMetadata.Builder withActiveShardsInSyncAllocationIds(ClusterState clusterState, String indexName) { + IndexMetadata.Builder indexMetadataBuilder = new IndexMetadata.Builder(clusterState.metadata().index(indexName)); + var indexRoutingTable = clusterState.routingTable().index(indexName); + for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable.allShards().toList()) { + indexMetadataBuilder.putInSyncAllocationIds( + indexShardRoutingTable.shardId().getId(), + indexShardRoutingTable.activeShards() + .stream() + .map(ShardRouting::allocationId) + .map(AllocationId::getId) + .collect(Collectors.toSet()) + ); + } + return indexMetadataBuilder; + } + private ClusterState executeTasks(final ClusterState state, final List tasks) throws Exception { return ClusterStateTaskExecutorUtils.executeAndAssertSuccessful(state, executor, tasks); } diff --git a/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java b/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java index 950c54ddb1d22..fa2247ddabea0 100644 --- a/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java @@ -363,7 +363,34 @@ public static ClusterState stateWithAssignedPrimariesAndReplicas( int numberOfShards, List replicaRoles ) { - int numberOfDataNodes = replicaRoles.size() + 1; + return stateWithAssignedPrimariesAndReplicasWithState( + indices, + numberOfShards, + replicaRoles.stream().map(role -> Tuple.tuple(ShardRoutingState.STARTED, role)).toList() + ); + } + + /** + * Creates cluster state with several indexes, shards and replicas (with given roles and state) and all primary shards STARTED. + */ + public static ClusterState stateWithAssignedPrimariesAndReplicasWithState( + String[] indices, + int numberOfShards, + List> replicaRoleAndStates + ) { + return stateWithAssignedPrimariesAndReplicasWithState(indices, numberOfShards, ShardRouting.Role.DEFAULT, replicaRoleAndStates); + } + + /** + * Creates cluster state with several indexes, shards and replicas (with given roles and state) and all primary shards STARTED. + */ + public static ClusterState stateWithAssignedPrimariesAndReplicasWithState( + String[] indices, + int numberOfShards, + ShardRouting.Role primaryRole, + List> replicasStateAndRoles + ) { + int numberOfDataNodes = replicasStateAndRoles.size() + 1; DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); for (int i = 0; i < numberOfDataNodes + 1; i++) { final DiscoveryNode node = newNode(i); @@ -383,7 +410,7 @@ public static ClusterState stateWithAssignedPrimariesAndReplicas( for (String index : indices) { IndexMetadata indexMetadata = IndexMetadata.builder(index) .settings( - indexSettings(IndexVersion.current(), numberOfShards, replicaRoles.size()).put( + indexSettings(IndexVersion.current(), numberOfShards, replicasStateAndRoles.size()).put( SETTING_CREATION_DATE, System.currentTimeMillis() ) @@ -397,14 +424,19 @@ public static ClusterState stateWithAssignedPrimariesAndReplicas( final ShardId shardId = new ShardId(index, "_na_", i); IndexShardRoutingTable.Builder indexShardRoutingBuilder = IndexShardRoutingTable.builder(shardId); indexShardRoutingBuilder.addShard( - TestShardRouting.newShardRouting(index, i, newNode(0).getId(), null, true, ShardRoutingState.STARTED) + shardRoutingBuilder(index, i, newNode(0).getId(), true, ShardRoutingState.STARTED).withRole(primaryRole).build() ); - for (int replica = 0; replica < replicaRoles.size(); replica++) { - indexShardRoutingBuilder.addShard( - shardRoutingBuilder(index, i, newNode(replica + 1).getId(), false, ShardRoutingState.STARTED).withRole( - replicaRoles.get(replica) - ).build() + for (int replica = 0; replica < replicasStateAndRoles.size(); replica++) { + var replicaStateAndRole = replicasStateAndRoles.get(replica); + ShardRoutingState shardRoutingState = replicaStateAndRole.v1(); + String currentNodeId = shardRoutingState.equals(ShardRoutingState.UNASSIGNED) ? null : newNode(replica + 1).getId(); + var shardRoutingBuilder = shardRoutingBuilder(index, i, currentNodeId, false, shardRoutingState).withRole( + replicaStateAndRole.v2() ); + if (shardRoutingState.equals(ShardRoutingState.RELOCATING)) { + shardRoutingBuilder.withRelocatingNodeId(DiscoveryNodeUtils.create("relocating_" + replica).getId()); + } + indexShardRoutingBuilder.addShard(shardRoutingBuilder.build()); } indexRoutingTableBuilder.addIndexShard(indexShardRoutingBuilder); } From f38c64e0dc3d929c31402cf443695ed13820d33d Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Wed, 29 Jan 2025 17:58:39 +0000 Subject: [PATCH 223/383] Isolate Real-Time Get from Translog Assertion Logic (#121161) This commit refactors the logic introduced in #121092, ensuring that translog assertion logic related to inference fields resides exclusively within `TranslogOperationAsserter`. For mappings that contain inference fields, we regenerate the metadata field's content during peer recovery. This can create discrepancies between the original and regenerated sources, which is expected since indexed values may differ from the original. This change ensures that we compare the "synthetic" version of index operations in the same way we do for mappings with synthetic source enabled. In contrast, real-time get (`realtime-get`) simply filters inference fields from the original source, eliminating the need to synthesize them as we do for synthetic sources. --- .../index/engine/InternalEngine.java | 3 +- .../index/engine/TranslogDirectoryReader.java | 5 +- .../engine/TranslogOperationAsserter.java | 2 +- .../ShardBulkInferenceActionFilterIT.java | 63 +++++++++++-------- 4 files changed, 42 insertions(+), 31 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index d3d7dcd8e930f..7f6fe40dbaaf0 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -825,7 +825,8 @@ private GetResult getFromTranslog( mappingLookup, documentParser, config(), - translogInMemorySegmentsCount::incrementAndGet + translogInMemorySegmentsCount::incrementAndGet, + false ); final Searcher searcher = new Searcher( "realtime_get", diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java index 9537fd0703149..598fb076ba222 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java @@ -88,7 +88,8 @@ static DirectoryReader create( MappingLookup mappingLookup, DocumentParser documentParser, EngineConfig engineConfig, - Runnable onSegmentCreated + Runnable onSegmentCreated, + boolean forceSynthetic ) throws IOException { final Directory directory = new ByteBuffersDirectory(); boolean success = false; @@ -97,7 +98,7 @@ static DirectoryReader create( // When using synthetic source, the translog operation must always be reindexed into an in-memory Lucene to ensure consistent // output for realtime-get operations. However, this can degrade the performance of realtime-get and update operations. // If slight inconsistencies in realtime-get operations are acceptable, the translog operation can be reindexed lazily. - if (mappingLookup.isSourceSynthetic() || mappingLookup.inferenceFields().isEmpty() == false) { + if (mappingLookup.isSourceSynthetic() || forceSynthetic) { onSegmentCreated.run(); leafReader = createInMemoryReader(shardId, engineConfig, directory, documentParser, mappingLookup, false, operation); } else { diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java index 90eaea78b3893..d8b51648cb586 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogOperationAsserter.java @@ -52,7 +52,7 @@ static Translog.Index synthesizeSource(EngineConfig engineConfig, Translog.Index final ShardId shardId = engineConfig.getShardId(); final MappingLookup mappingLookup = engineConfig.getMapperService().mappingLookup(); final DocumentParser documentParser = engineConfig.getMapperService().documentParser(); - try (var reader = TranslogDirectoryReader.create(shardId, op, mappingLookup, documentParser, engineConfig, () -> {})) { + try (var reader = TranslogDirectoryReader.create(shardId, op, mappingLookup, documentParser, engineConfig, () -> {}, true)) { final Engine.Searcher searcher = new Engine.Searcher( "assert_translog", reader, diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 9da6b52555498..303f957c7ab20 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -20,8 +20,9 @@ import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.plugins.Plugin; @@ -49,14 +50,21 @@ public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { public static final String INDEX_NAME = "test-index"; private final boolean useLegacyFormat; + private final boolean useSyntheticSource; - public ShardBulkInferenceActionFilterIT(boolean useLegacyFormat) { + public ShardBulkInferenceActionFilterIT(boolean useLegacyFormat, boolean useSyntheticSource) { this.useLegacyFormat = useLegacyFormat; + this.useSyntheticSource = useSyntheticSource; } @ParametersFactory public static Iterable parameters() throws Exception { - return List.of(new Object[] { true }, new Object[] { false }); + return List.of( + new Object[] { true, false }, + new Object[] { true, true }, + new Object[] { false, false }, + new Object[] { false, true } + ); } @Before @@ -79,37 +87,38 @@ protected Collection> nodePlugins() { @Override public Settings indexSettings() { - return Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + var builder = Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 10)) - .put(InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT.getKey(), useLegacyFormat) - .build(); + .put(InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT.getKey(), useLegacyFormat); + if (useSyntheticSource) { + builder.put(IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE_SETTING.getKey(), true); + builder.put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC.name()); + } + return builder.build(); } public void testBulkOperations() throws Exception { - indicesAdmin().prepareCreate(INDEX_NAME) - .setMapping( - String.format( - Locale.ROOT, - """ - { - "properties": { - "sparse_field": { - "type": "semantic_text", - "inference_id": "%s" - }, - "dense_field": { - "type": "semantic_text", - "inference_id": "%s" - } + prepareCreate(INDEX_NAME).setMapping( + String.format( + Locale.ROOT, + """ + { + "properties": { + "sparse_field": { + "type": "semantic_text", + "inference_id": "%s" + }, + "dense_field": { + "type": "semantic_text", + "inference_id": "%s" } } - """, - TestSparseInferenceServiceExtension.TestInferenceService.NAME, - TestDenseInferenceServiceExtension.TestInferenceService.NAME - ) + } + """, + TestSparseInferenceServiceExtension.TestInferenceService.NAME, + TestDenseInferenceServiceExtension.TestInferenceService.NAME ) - .get(); + ).get(); int totalBulkReqs = randomIntBetween(2, 100); long totalDocs = 0; From 51b4fffb5e7f5d14aefeb370c853adae449e6f65 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Wed, 29 Jan 2025 19:14:15 +0100 Subject: [PATCH 224/383] Default to `SSHA-256` as API key stored credential hasher (#120997) API keys are high-entropy secure random strings. This means that the additional work factor of functions like PBKDF or bcrypt are not necessary, and a faster hash function like salted SHA-256 provides adequate security against offline attacks (hash collision, brute force, etc.). This PR adds `SSHA-256` to the list of supported stored hash algorithms for API key secrets, and makes it the default algorithm. Additionally, this PR changes the format of API key secrets, moving from an encoded UUID to a random string which increase the entropy of API keys from 122 bits to 128 bits, without changing overall secret length. Relates: ES-9504 --- docs/changelog/120997.yaml | 5 ++ .../settings/security-hash-settings.asciidoc | 64 +++++++++++++++ .../settings/security-settings.asciidoc | 8 +- .../common/SecureRandomUtils.java | 41 ++++++++++ .../xpack/core/XPackSettings.java | 47 +++++++++-- .../action/user/PutUserRequestBuilder.java | 4 +- .../core/security/authc/support/Hasher.java | 16 +++- .../xpack/security/Security.java | 37 ++++++--- .../user/ChangePasswordRequestBuilder.java | 4 +- .../user/TransportChangePasswordAction.java | 4 +- .../xpack/security/authc/ApiKeyService.java | 18 ++--- .../xpack/security/SecurityTests.java | 81 +++++++++++++++---- .../ChangePasswordRequestBuilderTests.java | 2 +- .../user/PutUserRequestBuilderTests.java | 2 +- 14 files changed, 279 insertions(+), 54 deletions(-) create mode 100644 docs/changelog/120997.yaml create mode 100644 server/src/main/java/org/elasticsearch/common/SecureRandomUtils.java diff --git a/docs/changelog/120997.yaml b/docs/changelog/120997.yaml new file mode 100644 index 0000000000000..6b56578404371 --- /dev/null +++ b/docs/changelog/120997.yaml @@ -0,0 +1,5 @@ +pr: 120997 +summary: Allow `SSHA-256` for API key credential hash +area: Authentication +type: enhancement +issues: [] diff --git a/docs/reference/settings/security-hash-settings.asciidoc b/docs/reference/settings/security-hash-settings.asciidoc index 93350a7749405..79819e4a389aa 100644 --- a/docs/reference/settings/security-hash-settings.asciidoc +++ b/docs/reference/settings/security-hash-settings.asciidoc @@ -124,4 +124,68 @@ following: initial input with SHA512 first. |======================= +Furthermore, {es} supports authentication via securely-generated high entropy tokens, +for instance <>. +Analogous to passwords, only the tokens' hashes are stored. Since the tokens are guaranteed +to have sufficiently high entropy to resist offline attacks, secure salted hash functions are supported +in addition to the password-hashing algorithms mentioned above. +You can configure the algorithm for API key stored credential hashing +by setting the <> +`xpack.security.authc.api_key.hashing.algorithm` setting to one of the +following + +[[secure-token-hashing-algorithms]] +.Secure token hashing algorithms +|======================= +| Algorithm | | | Description + +| `ssha256` | | | Uses a salted `sha-256` algorithm. (default) +| `bcrypt` | | | Uses `bcrypt` algorithm with salt generated in 1024 rounds. +| `bcrypt4` | | | Uses `bcrypt` algorithm with salt generated in 16 rounds. +| `bcrypt5` | | | Uses `bcrypt` algorithm with salt generated in 32 rounds. +| `bcrypt6` | | | Uses `bcrypt` algorithm with salt generated in 64 rounds. +| `bcrypt7` | | | Uses `bcrypt` algorithm with salt generated in 128 rounds. +| `bcrypt8` | | | Uses `bcrypt` algorithm with salt generated in 256 rounds. +| `bcrypt9` | | | Uses `bcrypt` algorithm with salt generated in 512 rounds. +| `bcrypt10` | | | Uses `bcrypt` algorithm with salt generated in 1024 rounds. +| `bcrypt11` | | | Uses `bcrypt` algorithm with salt generated in 2048 rounds. +| `bcrypt12` | | | Uses `bcrypt` algorithm with salt generated in 4096 rounds. +| `bcrypt13` | | | Uses `bcrypt` algorithm with salt generated in 8192 rounds. +| `bcrypt14` | | | Uses `bcrypt` algorithm with salt generated in 16384 rounds. +| `pbkdf2` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 10000 iterations. +| `pbkdf2_1000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 1000 iterations. +| `pbkdf2_10000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 10000 iterations. +| `pbkdf2_50000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 50000 iterations. +| `pbkdf2_100000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 100000 iterations. +| `pbkdf2_500000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 500000 iterations. +| `pbkdf2_1000000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 1000000 iterations. +| `pbkdf2_stretch` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 10000 iterations, after hashing the + initial input with SHA512 first. +| `pbkdf2_stretch_1000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 1000 iterations, after hashing the + initial input with SHA512 first. +| `pbkdf2_stretch_10000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 10000 iterations, after hashing the + initial input with SHA512 first. +| `pbkdf2_stretch_50000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 50000 iterations, after hashing the + initial input with SHA512 first. +| `pbkdf2_stretch_100000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 100000 iterations, after hashing the + initial input with SHA512 first. +| `pbkdf2_stretch_500000` | | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 500000 iterations, after hashing the + initial input with SHA512 first. +| `pbkdf2_stretch_1000000`| | | Uses `PBKDF2` key derivation function with `HMAC-SHA512` as a + pseudorandom function using 1000000 iterations, after hashing the + initial input with SHA512 first. +|======================= diff --git a/docs/reference/settings/security-settings.asciidoc b/docs/reference/settings/security-settings.asciidoc index 0fc4d59e72350..db95ac48f5be8 100644 --- a/docs/reference/settings/security-settings.asciidoc +++ b/docs/reference/settings/security-settings.asciidoc @@ -23,8 +23,8 @@ For more information about creating and updating the {es} keystore, see ==== General security settings `xpack.security.enabled`:: (<>) -Defaults to `true`, which enables {es} {security-features} on the node. -This setting must be enabled to use Elasticsearch's authentication, +Defaults to `true`, which enables {es} {security-features} on the node. +This setting must be enabled to use Elasticsearch's authentication, authorization and audit features. + + -- @@ -229,7 +229,7 @@ Defaults to `7d`. -- NOTE: Large real-time clock inconsistency across cluster nodes can cause problems -with evaluating the API key retention period. That is, if the clock on the node +with evaluating the API key retention period. That is, if the clock on the node invalidating the API key is significantly different than the one performing the deletion, the key may be retained for longer or shorter than the configured retention period. @@ -252,7 +252,7 @@ Sets the timeout of the internal search and delete call. `xpack.security.authc.api_key.hashing.algorithm`:: (<>) Specifies the hashing algorithm that is used for securing API key credentials. -See <>. Defaults to `pbkdf2`. +See <>. Defaults to `ssha256`. [discrete] [[security-domain-settings]] diff --git a/server/src/main/java/org/elasticsearch/common/SecureRandomUtils.java b/server/src/main/java/org/elasticsearch/common/SecureRandomUtils.java new file mode 100644 index 0000000000000..bdde158b95db7 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/SecureRandomUtils.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.common; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.CharArrays; + +import java.util.Arrays; +import java.util.Base64; + +public final class SecureRandomUtils { + private SecureRandomUtils() {} + + /** + * Returns a cryptographically secure Base64 encoded {@link SecureString} of {@code numBytes} random bytes. + */ + public static SecureString getBase64SecureRandomString(int numBytes) { + byte[] randomBytes = null; + byte[] encodedBytes = null; + try { + randomBytes = new byte[numBytes]; + SecureRandomHolder.INSTANCE.nextBytes(randomBytes); + encodedBytes = Base64.getUrlEncoder().withoutPadding().encode(randomBytes); + return new SecureString(CharArrays.utf8BytesToChars(encodedBytes)); + } finally { + if (randomBytes != null) { + Arrays.fill(randomBytes, (byte) 0); + } + if (encodedBytes != null) { + Arrays.fill(encodedBytes, (byte) 0); + } + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index 6aef618288fd2..3b4d4aec776d1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -240,7 +240,7 @@ public Iterator> settings() { public static final List DEFAULT_CIPHERS = JDK12_CIPHERS; - public static final Setting PASSWORD_HASHING_ALGORITHM = defaultStoredHashAlgorithmSetting( + public static final Setting PASSWORD_HASHING_ALGORITHM = defaultStoredPasswordHashAlgorithmSetting( "xpack.security.authc.password_hashing.algorithm", (s) -> { if (XPackSettings.FIPS_MODE_ENABLED.get(s)) { @@ -251,7 +251,7 @@ public Iterator> settings() { } ); - public static final Setting SERVICE_TOKEN_HASHING_ALGORITHM = defaultStoredHashAlgorithmSetting( + public static final Setting SERVICE_TOKEN_HASHING_ALGORITHM = defaultStoredPasswordHashAlgorithmSetting( "xpack.security.authc.service_token_hashing.algorithm", (s) -> Hasher.PBKDF2_STRETCH.name() ); @@ -259,11 +259,48 @@ public Iterator> settings() { /* * Do not allow insecure hashing algorithms to be used for password hashing */ - public static Setting defaultStoredHashAlgorithmSetting(String key, Function defaultHashingAlgorithm) { + public static Setting defaultStoredPasswordHashAlgorithmSetting( + String key, + Function defaultHashingAlgorithm + ) { return new Setting<>(key, defaultHashingAlgorithm, Function.identity(), v -> { - if (Hasher.getAvailableAlgoStoredHash().contains(v.toLowerCase(Locale.ROOT)) == false) { + if (Hasher.getAvailableAlgoStoredPasswordHash().contains(v.toLowerCase(Locale.ROOT)) == false) { throw new IllegalArgumentException( - "Invalid algorithm: " + v + ". Valid values for password hashing are " + Hasher.getAvailableAlgoStoredHash().toString() + "Invalid algorithm: " + + v + + ". Valid values for password hashing are " + + Hasher.getAvailableAlgoStoredPasswordHash().toString() + ); + } else if (v.regionMatches(true, 0, "pbkdf2", 0, "pbkdf2".length())) { + try { + SecretKeyFactory.getInstance("PBKDF2withHMACSHA512"); + } catch (NoSuchAlgorithmException e) { + throw new IllegalArgumentException( + "Support for PBKDF2WithHMACSHA512 must be available in order to use any of the PBKDF2 algorithms for the [" + + key + + "] setting.", + e + ); + } + } + }, Property.NodeScope); + } + + /** + * Similar to {@link #defaultStoredPasswordHashAlgorithmSetting(String, Function)} but for secure, high-entropy tokens so salted secure + * hashing algorithms are allowed, in addition to algorithms that are suitable for password hashing. + */ + public static Setting defaultStoredSecureTokenHashAlgorithmSetting( + String key, + Function defaultHashingAlgorithm + ) { + return new Setting<>(key, defaultHashingAlgorithm, Function.identity(), v -> { + if (Hasher.getAvailableAlgoStoredSecureTokenHash().contains(v.toLowerCase(Locale.ROOT)) == false) { + throw new IllegalArgumentException( + "Invalid algorithm: " + + v + + ". Valid values for secure token hashing are " + + Hasher.getAvailableAlgoStoredSecureTokenHash().toString() ); } else if (v.regionMatches(true, 0, "pbkdf2", 0, "pbkdf2".length())) { try { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequestBuilder.java index 7ae915d2db791..81f6b7489d8c8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequestBuilder.java @@ -89,11 +89,11 @@ public PutUserRequestBuilder email(String email) { public PutUserRequestBuilder passwordHash(char[] passwordHash, Hasher configuredHasher) { final Hasher resolvedHasher = Hasher.resolveFromHash(passwordHash); if (resolvedHasher.equals(configuredHasher) == false - && Hasher.getAvailableAlgoStoredHash().contains(resolvedHasher.name().toLowerCase(Locale.ROOT)) == false) { + && Hasher.getAvailableAlgoStoredPasswordHash().contains(resolvedHasher.name().toLowerCase(Locale.ROOT)) == false) { throw new IllegalArgumentException( "The provided password hash is not a hash or it could not be resolved to a supported hash algorithm. " + "The supported password hash algorithms are " - + Hasher.getAvailableAlgoStoredHash().toString() + + Hasher.getAvailableAlgoStoredPasswordHash().toString() ); } if (request.passwordHash() != null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java index bf24919a39495..7e4780bf4f5b3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java @@ -735,7 +735,7 @@ private static boolean verifyBcryptHash(SecureString text, char[] hash) { * an instance of the appropriate {@link Hasher} by using {@link #resolve(String) resolve()} */ @SuppressForbidden(reason = "This is the only allowed way to get available values") - public static List getAvailableAlgoStoredHash() { + public static List getAvailableAlgoStoredPasswordHash() { return Arrays.stream(Hasher.values()) .map(Hasher::name) .map(name -> name.toLowerCase(Locale.ROOT)) @@ -743,6 +743,20 @@ public static List getAvailableAlgoStoredHash() { .collect(Collectors.toList()); } + /** + * Returns a list of lower case String identifiers for the Hashing algorithm and parameter + * combinations that can be used for secure token hashing. The identifiers can be used to get + * an instance of the appropriate {@link Hasher} by using {@link #resolve(String) resolve()} + */ + @SuppressForbidden(reason = "This is the only allowed way to get available values") + public static List getAvailableAlgoStoredSecureTokenHash() { + return Arrays.stream(Hasher.values()) + .map(Hasher::name) + .map(name -> name.toLowerCase(Locale.ROOT)) + .filter(name -> (name.startsWith("pbkdf2") || name.startsWith("bcrypt") || name.equals("ssha256"))) + .collect(Collectors.toList()); + } + /** * Returns a list of lower case String identifiers for the Hashing algorithm and parameter * combinations that can be used for password hashing in the cache. The identifiers can be used to get diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 6004f8ebf95c4..804610f8dd341 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -1472,7 +1472,7 @@ public static List> getSettings(List securityExten settingsList.add(TokenService.DELETE_INTERVAL); settingsList.add(TokenService.DELETE_TIMEOUT); settingsList.addAll(SSLConfigurationSettings.getProfileSettings()); - settingsList.add(ApiKeyService.PASSWORD_HASHING_ALGORITHM); + settingsList.add(ApiKeyService.STORED_HASH_ALGO_SETTING); settingsList.add(ApiKeyService.DELETE_TIMEOUT); settingsList.add(ApiKeyService.DELETE_INTERVAL); settingsList.add(ApiKeyService.DELETE_RETENTION_PERIOD); @@ -1818,17 +1818,30 @@ static void validateForFips(Settings settings) { + " ] setting." ); } - Stream.of(ApiKeyService.PASSWORD_HASHING_ALGORITHM, XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM).forEach((setting) -> { - final var storedHashAlgo = setting.get(settings); - if (storedHashAlgo.toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) { - // log instead of validation error for backwards compatibility - logger.warn( - "Only PBKDF2 is allowed for stored credential hashing in a FIPS 140 JVM. " - + "Please set the appropriate value for [{}] setting.", - setting.getKey() - ); - } - }); + + final var serviceTokenStoredHashSettings = XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM; + final var serviceTokenStoredHashAlgo = serviceTokenStoredHashSettings.get(settings); + if (serviceTokenStoredHashAlgo.toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) { + // log instead of validation error for backwards compatibility + logger.warn( + "Only PBKDF2 is allowed for stored credential hashing in a FIPS 140 JVM. " + + "Please set the appropriate value for [{}] setting.", + serviceTokenStoredHashSettings.getKey() + ); + } + + final var apiKeyStoredHashSettings = ApiKeyService.STORED_HASH_ALGO_SETTING; + final var apiKeyStoredHashAlgo = apiKeyStoredHashSettings.get(settings); + if (apiKeyStoredHashAlgo.toLowerCase(Locale.ROOT).startsWith("ssha256") == false + && apiKeyStoredHashAlgo.toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) { + // log instead of validation error for backwards compatibility + logger.warn( + "[{}] is not recommended for stored API key hashing in a FIPS 140 JVM. The recommended hasher for [{}] is SSHA256.", + apiKeyStoredHashSettings, + apiKeyStoredHashSettings.getKey() + ); + } + final var cacheHashAlgoSettings = settings.filter(k -> k.endsWith(".cache.hash_algo")); cacheHashAlgoSettings.keySet().forEach((key) -> { final var setting = cacheHashAlgoSettings.get(key); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java index c792fa364a74a..fc09681ac26ed 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java @@ -72,11 +72,11 @@ public ChangePasswordRequestBuilder password(char[] password, Hasher hasher) { public ChangePasswordRequestBuilder passwordHash(char[] passwordHashChars, Hasher configuredHasher) { final Hasher resolvedHasher = Hasher.resolveFromHash(passwordHashChars); if (resolvedHasher.equals(configuredHasher) == false - && Hasher.getAvailableAlgoStoredHash().contains(resolvedHasher.name().toLowerCase(Locale.ROOT)) == false) { + && Hasher.getAvailableAlgoStoredPasswordHash().contains(resolvedHasher.name().toLowerCase(Locale.ROOT)) == false) { throw new IllegalArgumentException( "The provided password hash is not a hash or it could not be resolved to a supported hash algorithm. " + "The supported password hash algorithms are " - + Hasher.getAvailableAlgoStoredHash().toString() + + Hasher.getAvailableAlgoStoredPasswordHash().toString() ); } if (request.passwordHash() != null) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java index 96323836aa005..541bbdddd657e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java @@ -52,12 +52,12 @@ protected void doExecute(Task task, ChangePasswordRequest request, ActionListene final Hasher requestPwdHashAlgo = Hasher.resolveFromHash(request.passwordHash()); final Hasher configPwdHashAlgo = Hasher.resolve(XPackSettings.PASSWORD_HASHING_ALGORITHM.get(settings)); if (requestPwdHashAlgo.equals(configPwdHashAlgo) == false - && Hasher.getAvailableAlgoStoredHash().contains(requestPwdHashAlgo.name().toLowerCase(Locale.ROOT)) == false) { + && Hasher.getAvailableAlgoStoredPasswordHash().contains(requestPwdHashAlgo.name().toLowerCase(Locale.ROOT)) == false) { listener.onFailure( new IllegalArgumentException( "The provided password hash is not a hash or it could not be resolved to a supported hash algorithm. " + "The supported password hash algorithms are " - + Hasher.getAvailableAlgoStoredHash().toString() + + Hasher.getAvailableAlgoStoredPasswordHash().toString() ) ); return; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index c2d1370c2cbf3..5fee747a3f73f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -38,7 +38,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.cache.Cache; @@ -139,6 +138,7 @@ import java.util.function.Supplier; import java.util.stream.Collectors; +import static org.elasticsearch.common.SecureRandomUtils.getBase64SecureRandomString; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; @@ -158,9 +158,9 @@ public class ApiKeyService implements Closeable { private static final Logger logger = LogManager.getLogger(ApiKeyService.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ApiKeyService.class); - public static final Setting PASSWORD_HASHING_ALGORITHM = XPackSettings.defaultStoredHashAlgorithmSetting( + public static final Setting STORED_HASH_ALGO_SETTING = XPackSettings.defaultStoredSecureTokenHashAlgorithmSetting( "xpack.security.authc.api_key.hashing.algorithm", - (s) -> Hasher.PBKDF2.name() + (s) -> Hasher.SSHA256.name() ); public static final Setting DELETE_TIMEOUT = Setting.timeSetting( "xpack.security.authc.api_key.delete.timeout", @@ -181,7 +181,7 @@ public class ApiKeyService implements Closeable { ); public static final Setting CACHE_HASH_ALGO_SETTING = Setting.simpleString( "xpack.security.authc.api_key.cache.hash_algo", - "ssha256", + Hasher.SSHA256.name(), Setting.Property.NodeScope ); public static final Setting CACHE_TTL_SETTING = Setting.timeSetting( @@ -217,9 +217,9 @@ public class ApiKeyService implements Closeable { private final ThreadPool threadPool; private final ApiKeyDocCache apiKeyDocCache; - // The API key secret is a Base64 encoded v4 UUID without padding. The UUID is 128 bits, i.e. 16 byte, - // which requires 22 digits of Base64 characters for encoding without padding. - // See also UUIDs.randomBase64UUIDSecureString + private static final int API_KEY_SECRET_NUM_BYTES = 16; + // The API key secret is a Base64 encoded string of 128 random bits. + // See getBase64SecureRandomString() private static final int API_KEY_SECRET_LENGTH = 22; private static final long EVICTION_MONITOR_INTERVAL_SECONDS = 300L; // 5 minutes private static final long EVICTION_MONITOR_INTERVAL_NANOS = EVICTION_MONITOR_INTERVAL_SECONDS * 1_000_000_000L; @@ -245,7 +245,7 @@ public ApiKeyService( this.securityIndex = securityIndex; this.clusterService = clusterService; this.enabled = XPackSettings.API_KEY_SERVICE_ENABLED_SETTING.get(settings); - this.hasher = Hasher.resolve(PASSWORD_HASHING_ALGORITHM.get(settings)); + this.hasher = Hasher.resolve(STORED_HASH_ALGO_SETTING.get(settings)); this.settings = settings; this.inactiveApiKeysRemover = new InactiveApiKeysRemover(settings, client, clusterService); this.threadPool = threadPool; @@ -545,7 +545,7 @@ private void createApiKeyAndIndexIt( ) { final Instant created = clock.instant(); final Instant expiration = getApiKeyExpiration(created, request.getExpiration()); - final SecureString apiKey = UUIDs.randomBase64UUIDSecureString(); + final SecureString apiKey = getBase64SecureRandomString(API_KEY_SECRET_NUM_BYTES); assert ApiKey.Type.CROSS_CLUSTER != request.getType() || API_KEY_SECRET_LENGTH == apiKey.length() : "Invalid API key (name=[" + request.getName() + "], type=[" + request.getType() + "], length=[" + apiKey.length() + "])"; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 5c6c3e8c7933c..3ff8f16165547 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -548,7 +548,7 @@ public void testValidateForFipsKeystoreWithImplicitJksType() { .put( XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), randomFrom( - Hasher.getAvailableAlgoStoredHash() + Hasher.getAvailableAlgoStoredPasswordHash() .stream() .filter(alg -> alg.startsWith("pbkdf2") == false) .collect(Collectors.toList()) @@ -567,7 +567,10 @@ public void testValidateForFipsKeystoreWithExplicitJksType() { .put( XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), randomFrom( - Hasher.getAvailableAlgoStoredHash().stream().filter(alg -> alg.startsWith("pbkdf2")).collect(Collectors.toList()) + Hasher.getAvailableAlgoStoredPasswordHash() + .stream() + .filter(alg -> alg.startsWith("pbkdf2")) + .collect(Collectors.toList()) ) ) .build(); @@ -581,7 +584,7 @@ public void testValidateForFipsInvalidPasswordHashingAlgorithm() { .put( XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), randomFrom( - Hasher.getAvailableAlgoStoredHash() + Hasher.getAvailableAlgoStoredPasswordHash() .stream() .filter(alg -> alg.startsWith("pbkdf2") == false) .collect(Collectors.toList()) @@ -626,7 +629,7 @@ public void testValidateForFipsMultipleValidationErrors() { .put( XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), randomFrom( - Hasher.getAvailableAlgoStoredHash() + Hasher.getAvailableAlgoStoredPasswordHash() .stream() .filter(alg -> alg.startsWith("pbkdf2") == false) .collect(Collectors.toList()) @@ -646,19 +649,28 @@ public void testValidateForFipsNoErrorsOrLogs() throws IllegalAccessException { .put( XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), randomFrom( - Hasher.getAvailableAlgoStoredHash().stream().filter(alg -> alg.startsWith("pbkdf2")).collect(Collectors.toList()) + Hasher.getAvailableAlgoStoredPasswordHash() + .stream() + .filter(alg -> alg.startsWith("pbkdf2")) + .collect(Collectors.toList()) ) ) .put( XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM.getKey(), randomFrom( - Hasher.getAvailableAlgoStoredHash().stream().filter(alg -> alg.startsWith("pbkdf2")).collect(Collectors.toList()) + Hasher.getAvailableAlgoStoredPasswordHash() + .stream() + .filter(alg -> alg.startsWith("pbkdf2")) + .collect(Collectors.toList()) ) ) .put( - ApiKeyService.PASSWORD_HASHING_ALGORITHM.getKey(), + ApiKeyService.STORED_HASH_ALGO_SETTING.getKey(), randomFrom( - Hasher.getAvailableAlgoStoredHash().stream().filter(alg -> alg.startsWith("pbkdf2")).collect(Collectors.toList()) + Hasher.getAvailableAlgoStoredPasswordHash() + .stream() + .filter(alg -> alg.startsWith("pbkdf2")) + .collect(Collectors.toList()) ) ) .put( @@ -683,13 +695,37 @@ public void testValidateForFipsNonFipsCompliantCacheHashAlgoWarningLog() throws assertThatLogger(() -> Security.validateForFips(settings), Security.class, logEventForNonCompliantCacheHash(key)); } - public void testValidateForFipsNonFipsCompliantStoredHashAlgoWarningLog() throws IllegalAccessException { - String key = randomFrom(ApiKeyService.PASSWORD_HASHING_ALGORITHM, XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM).getKey(); + public void testValidateForFipsNonFipsCompliantStoredHashAlgoWarningLog() { + String key = XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM.getKey(); final Settings settings = Settings.builder() .put(XPackSettings.FIPS_MODE_ENABLED.getKey(), true) - .put(key, randomNonFipsCompliantStoredHash()) + .put(key, randomNonFipsCompliantStoredPasswordHash()) .build(); - assertThatLogger(() -> Security.validateForFips(settings), Security.class, logEventForNonCompliantStoredHash(key)); + assertThatLogger(() -> Security.validateForFips(settings), Security.class, logEventForNonCompliantStoredPasswordHash(key)); + } + + public void testValidateForFipsNonFipsCompliantApiKeyStoredHashAlgoWarningLog() { + var nonCompliant = randomFrom( + Hasher.getAvailableAlgoStoredPasswordHash() + .stream() + .filter(alg -> alg.startsWith("pbkdf2") == false && alg.startsWith("ssha256") == false) + .collect(Collectors.toList()) + ); + String key = ApiKeyService.STORED_HASH_ALGO_SETTING.getKey(); + final Settings settings = Settings.builder().put(XPackSettings.FIPS_MODE_ENABLED.getKey(), true).put(key, nonCompliant).build(); + assertThatLogger(() -> Security.validateForFips(settings), Security.class, logEventForNonCompliantStoredApiKeyHash(key)); + } + + public void testValidateForFipsFipsCompliantApiKeyStoredHashAlgoWarningLog() { + var compliant = randomFrom( + Hasher.getAvailableAlgoStoredPasswordHash() + .stream() + .filter(alg -> alg.startsWith("pbkdf2") || alg.startsWith("ssha256")) + .collect(Collectors.toList()) + ); + String key = ApiKeyService.STORED_HASH_ALGO_SETTING.getKey(); + final Settings settings = Settings.builder().put(XPackSettings.FIPS_MODE_ENABLED.getKey(), true).put(key, compliant).build(); + assertThatLogger(() -> Security.validateForFips(settings), Security.class); } public void testValidateForMultipleNonFipsCompliantCacheHashAlgoWarningLogs() throws IllegalAccessException { @@ -1135,9 +1171,12 @@ private String randomNonFipsCompliantCacheHash() { ); } - private String randomNonFipsCompliantStoredHash() { + private String randomNonFipsCompliantStoredPasswordHash() { return randomFrom( - Hasher.getAvailableAlgoStoredHash().stream().filter(alg -> alg.startsWith("pbkdf2") == false).collect(Collectors.toList()) + Hasher.getAvailableAlgoStoredPasswordHash() + .stream() + .filter(alg -> alg.startsWith("pbkdf2") == false) + .collect(Collectors.toList()) ); } @@ -1153,7 +1192,19 @@ private MockLog.SeenEventExpectation logEventForNonCompliantCacheHash(String set ); } - private MockLog.SeenEventExpectation logEventForNonCompliantStoredHash(String settingKey) { + private MockLog.SeenEventExpectation logEventForNonCompliantStoredApiKeyHash(String settingKey) { + return new MockLog.SeenEventExpectation( + "cache hash not fips compliant", + Security.class.getName(), + Level.WARN, + "[*] is not recommended for stored API key hashing in a FIPS 140 JVM. " + + "The recommended hasher for [" + + settingKey + + "] is SSHA256." + ); + } + + private MockLog.SeenEventExpectation logEventForNonCompliantStoredPasswordHash(String settingKey) { return new MockLog.SeenEventExpectation( "stored hash not fips compliant", Security.class.getName(), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilderTests.java index df5cebdf735ac..af2a5c11e6e73 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilderTests.java @@ -76,7 +76,7 @@ public void testWithHashedPasswordWithDifferentAlgo() throws IOException { } public void testWithHashedPasswordNotHash() { - final Hasher systemHasher = Hasher.valueOf(randomFrom(Hasher.getAvailableAlgoStoredHash()).toUpperCase(Locale.ROOT)); + final Hasher systemHasher = Hasher.valueOf(randomFrom(Hasher.getAvailableAlgoStoredPasswordHash()).toUpperCase(Locale.ROOT)); final char[] hash = randomAlphaOfLength(20).toCharArray(); final String json = Strings.format(""" { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/PutUserRequestBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/PutUserRequestBuilderTests.java index cb30c8f117f22..018ffa7b09651 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/PutUserRequestBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/PutUserRequestBuilderTests.java @@ -205,7 +205,7 @@ public void testWithDifferentPasswordHashingAlgorithm() throws IOException { } public void testWithPasswordHashThatsNotReallyAHash() throws IOException { - final Hasher systemHasher = Hasher.valueOf(randomFrom(Hasher.getAvailableAlgoStoredHash()).toUpperCase(Locale.ROOT)); + final Hasher systemHasher = Hasher.valueOf(randomFrom(Hasher.getAvailableAlgoStoredPasswordHash()).toUpperCase(Locale.ROOT)); final char[] hash = randomAlphaOfLengthBetween(14, 20).toCharArray(); final String json = Strings.format(""" { From 9edd64e608e3586f25f36b26a8a527f8a0ebfe0b Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Wed, 29 Jan 2025 19:21:58 +0100 Subject: [PATCH 225/383] [DOCS] Fix failing docs test (at least try) (#118934) Fix failing docs test: * Unmute test * Replace hardcoded values with regex in snippet test --- docs/reference/indices/shard-stores.asciidoc | 6 ++---- muted-tests.yml | 3 --- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/docs/reference/indices/shard-stores.asciidoc b/docs/reference/indices/shard-stores.asciidoc index 35f6a0915caa0..941c1ce379078 100644 --- a/docs/reference/indices/shard-stores.asciidoc +++ b/docs/reference/indices/shard-stores.asciidoc @@ -198,10 +198,8 @@ The API returns the following response: // TESTRESPONSE[s/"attributes": \{[^}]*\}/"attributes": $body.$_path/] // TESTRESPONSE[s/"roles": \[[^]]*\]/"roles": $body.$_path/] // TESTRESPONSE[s/"8.10.0"/\$node_version/] -// TESTRESPONSE[s/"min_index_version": 7000099/"min_index_version": $body.$_path/] -// TESTRESPONSE[s/"max_index_version": 8100099/"max_index_version": $body.$_path/] - - +// TESTRESPONSE[s/"min_index_version": [0-9]+/"min_index_version": $body.$_path/] +// TESTRESPONSE[s/"max_index_version": [0-9]+/"max_index_version": $body.$_path/] <1> The key is the corresponding shard id for the store information <2> A list of store information for all copies of the shard diff --git a/muted-tests.yml b/muted-tests.yml index b835dd5bc53fd..c7c6d5113600a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -154,9 +154,6 @@ tests: - class: org.elasticsearch.xpack.ccr.rest.ShardChangesRestIT method: testShardChangesNoOperation issue: https://github.com/elastic/elasticsearch/issues/118800 -- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - method: test {yaml=reference/indices/shard-stores/line_150} - issue: https://github.com/elastic/elasticsearch/issues/118896 - class: org.elasticsearch.cluster.service.MasterServiceTests method: testThreadContext issue: https://github.com/elastic/elasticsearch/issues/118914 From c0f3024c3f5f832cd79c66e50bd64d1e326197e7 Mon Sep 17 00:00:00 2001 From: Luiz Santos Date: Wed, 29 Jan 2025 15:28:43 -0300 Subject: [PATCH 226/383] Make it clear that previous enrich indices are deleted every 15 minutes (#109085) Before this change, one could interpret that enrich policies are executed every 15 minutes, which is not true. --- .../ingest/apis/enrich/execute-enrich-policy.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc b/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc index 8e72a51514a59..a76f857ebf85e 100644 --- a/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc +++ b/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc @@ -96,8 +96,8 @@ or index documents to an enrich index. Instead, update your source indices and <> the enrich policy again. This creates a new enrich index from your updated source indices. -The previous enrich index will deleted with a delayed maintenance job. -By default this is done every 15 minutes. +The previous enrich index will be deleted with a delayed maintenance +job that executes by default every 15 minutes. // end::update-enrich-index[] By default, this API is synchronous: It returns when a policy has been executed. From a0f1856a40a8613abfbb57dfd214d64f3b74e7c7 Mon Sep 17 00:00:00 2001 From: Kuni Sen <30574753+kunisen@users.noreply.github.com> Date: Thu, 30 Jan 2025 03:31:50 +0900 Subject: [PATCH 227/383] (Doc+) Expand watermark resolution (#119174) * (Doc+) Expand watermark resolution Relaunch https://github.com/elastic/elasticsearch/pull/116892 since the original one seems to be outdated and hard to update branch. * Apply suggestions from code review Co-authored-by: shainaraskas <58563081+shainaraskas@users.noreply.github.com> --------- Co-authored-by: shainaraskas <58563081+shainaraskas@users.noreply.github.com> --- .../disk-usage-exceeded.asciidoc | 36 +++++++++++++------ 1 file changed, 25 insertions(+), 11 deletions(-) diff --git a/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc b/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc index a2342c449c88c..def89b37eb316 100644 --- a/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc +++ b/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc @@ -57,7 +57,7 @@ GET _cluster/allocation/explain [[fix-watermark-errors-temporary]] ==== Temporary Relief -To immediately restore write operations, you can temporarily increase the +To immediately restore write operations, you can temporarily increase <> and remove the <>. @@ -106,19 +106,33 @@ PUT _cluster/settings [[fix-watermark-errors-resolve]] ==== Resolve -As a long-term solution, we recommend you do one of the following best suited -to your use case: +To resolve watermark errors permanently, perform one of the following actions: -* add nodes to the affected <> -+ -TIP: You should enable <> for clusters deployed using our {ess}, {ece}, and {eck} platforms. +* Horizontally scale nodes of the affected <>. -* upgrade existing nodes to increase disk space -+ -TIP: On {ess}, https://support.elastic.co[Elastic Support] intervention may -become necessary if <> reaches `status:red`. +* Vertically scale existing nodes to increase disk space. -* delete unneeded indices using the <> +* Delete indices using the <>, either +permanently if the index isn't needed, or temporarily to later +<>. * update related <> to push indices through to later <> + +TIP: On {ess} and {ece}, indices may need to be temporarily deleted via +its {cloud}/ec-api-console.html[Elasticsearch API Console] to later +<> in order to resolve +<> `status:red` which will block +{cloud}/ec-activity-page.html[attempted changes]. If you experience issues +with this resolution flow on {ess}, kindly reach out to +https://support.elastic.co[Elastic Support] for assistance. + +== Prevent watermark errors + +To avoid watermark errors in future, , perform one of the following actions: + +* If you're using {ess}, {ece}, or {eck}: Enable <>. + +* Set up {kibana-ref}/kibana-alerts.html[stack monitoring alerts] on top of +<> to be notified before +the flood-stage watermark is reached. \ No newline at end of file From c4bb9b34bf234e4c19679fa5e0ebff3d888f2622 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Wed, 29 Jan 2025 13:37:08 -0500 Subject: [PATCH 228/383] Optimize some per-document hot paths in the geoip processor (#120824) --- docs/changelog/120824.yaml | 5 ++++ .../ingest/geoip/GeoIpDownloaderIT.java | 14 +++++++--- .../geoip/DatabaseReaderLazyLoader.java | 9 ++++++- .../ingest/geoip/GeoIpTaskState.java | 26 ++++++++++++++++--- 4 files changed, 46 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/120824.yaml diff --git a/docs/changelog/120824.yaml b/docs/changelog/120824.yaml new file mode 100644 index 0000000000000..603b49338ff69 --- /dev/null +++ b/docs/changelog/120824.yaml @@ -0,0 +1,5 @@ +pr: 120824 +summary: Optimize some per-document hot paths in the geoip processor +area: Ingest Node +type: enhancement +issues: [] diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index dd177fed5732a..4d70a83c6752a 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -70,6 +70,7 @@ import static org.elasticsearch.ingest.geoip.GeoIpTestUtils.copyDefaultDatabases; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -172,10 +173,15 @@ public void testInvalidTimestamp() throws Exception { for (Path geoIpTmpDir : geoIpTmpDirs) { try (Stream files = Files.list(geoIpTmpDir)) { Set names = files.map(f -> f.getFileName().toString()).collect(Collectors.toSet()); - assertThat(names, not(hasItem("GeoLite2-ASN.mmdb"))); - assertThat(names, not(hasItem("GeoLite2-City.mmdb"))); - assertThat(names, not(hasItem("GeoLite2-Country.mmdb"))); - assertThat(names, not(hasItem("MyCustomGeoLite2-City.mmdb"))); + assertThat( + names, + allOf( + not(hasItem("GeoLite2-ASN.mmdb")), + not(hasItem("GeoLite2-City.mmdb")), + not(hasItem("GeoLite2-Country.mmdb")), + not(hasItem("MyCustomGeoLite2-City.mmdb")) + ) + ); } } }); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java index 120afe0e9e815..fb4fadf043b05 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java @@ -53,6 +53,10 @@ public class DatabaseReaderLazyLoader implements IpDatabase { private volatile boolean deleteDatabaseFileOnShutdown; private final AtomicInteger currentUsages = new AtomicInteger(0); + // it seems insane, especially if you read the code for UnixPath, but calling toString on a path in advance here is faster enough + // than calling it on every call to cache.putIfAbsent that it makes the slight additional internal complication worth it + private final String cachedDatabasePathToString; + DatabaseReaderLazyLoader(GeoIpCache cache, Path databasePath, String md5) { this.cache = cache; this.databasePath = Objects.requireNonNull(databasePath); @@ -61,6 +65,9 @@ public class DatabaseReaderLazyLoader implements IpDatabase { this.databaseReader = new SetOnce<>(); this.databaseType = new SetOnce<>(); this.buildDate = new SetOnce<>(); + + // cache the toString on construction + this.cachedDatabasePathToString = databasePath.toString(); } /** @@ -99,7 +106,7 @@ int current() { @Override @Nullable public RESPONSE getResponse(String ipAddress, CheckedBiFunction responseProvider) { - return cache.putIfAbsent(ipAddress, databasePath.toString(), ip -> { + return cache.putIfAbsent(ipAddress, cachedDatabasePathToString, ip -> { try { return responseProvider.apply(get(), ipAddress); } catch (Exception e) { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java index 96525d427d3e8..91b040e8699bb 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java @@ -206,12 +206,32 @@ public static Metadata fromXContent(XContentParser parser) { } public boolean isCloseToExpiration() { - return Instant.ofEpochMilli(lastCheck).isBefore(Instant.now().minus(25, ChronoUnit.DAYS)); + final Instant now = Instant.ofEpochMilli(System.currentTimeMillis()); // millisecond precision is sufficient (and faster) + return Instant.ofEpochMilli(lastCheck).isBefore(now.minus(25, ChronoUnit.DAYS)); } + // these constants support the micro optimization below, see that note + private static final TimeValue THIRTY_DAYS = TimeValue.timeValueDays(30); + private static final long THIRTY_DAYS_MILLIS = THIRTY_DAYS.millis(); + public boolean isNewEnough(Settings settings) { - TimeValue valid = settings.getAsTime("ingest.geoip.database_validity", TimeValue.timeValueDays(30)); - return Instant.ofEpochMilli(lastCheck).isAfter(Instant.now().minus(valid.getMillis(), ChronoUnit.MILLIS)); + // micro optimization: this looks a little silly, but the expected case is that database_validity is only used in tests. + // we run this code on every document, though, so the argument checking and other bits that getAsTime does is enough + // to show up in a flame graph. + + // if you grep for "ingest.geoip.database_validity" and you'll see that it's not a 'real' setting -- it's only defined in + // AbstractGeoIpIT, that's why it's an inline string constant here and no some static final, and also why it cannot + // be the case that this setting exists in a real running cluster + + final long valid; + if (settings.hasValue("ingest.geoip.database_validity")) { + valid = settings.getAsTime("ingest.geoip.database_validity", THIRTY_DAYS).millis(); + } else { + valid = THIRTY_DAYS_MILLIS; + } + + final Instant now = Instant.ofEpochMilli(System.currentTimeMillis()); // millisecond precision is sufficient (and faster) + return Instant.ofEpochMilli(lastCheck).isAfter(now.minus(valid, ChronoUnit.MILLIS)); } @Override From d76380514187c1619b3b6382bed106404fa52f93 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Wed, 29 Jan 2025 13:38:43 -0500 Subject: [PATCH 229/383] Optimize IngestDocument FieldPath allocation (#120573) --- docs/changelog/120573.yaml | 5 ++ .../elasticsearch/ingest/IngestDocument.java | 56 ++++++++++++++----- 2 files changed, 47 insertions(+), 14 deletions(-) create mode 100644 docs/changelog/120573.yaml diff --git a/docs/changelog/120573.yaml b/docs/changelog/120573.yaml new file mode 100644 index 0000000000000..33ced06ddf996 --- /dev/null +++ b/docs/changelog/120573.yaml @@ -0,0 +1,5 @@ +pr: 120573 +summary: Optimize `IngestDocument` `FieldPath` allocation +area: Ingest Node +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 7982024911beb..3dfeb21dd6d9f 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.index.VersionType; @@ -190,8 +191,8 @@ public T getFieldValue(String path, Class clazz) { * or if the field that is found at the provided path is not of the expected type. */ public T getFieldValue(String path, Class clazz, boolean ignoreMissing) { - FieldPath fieldPath = new FieldPath(path); - Object context = fieldPath.initialContext; + final FieldPath fieldPath = FieldPath.of(path); + Object context = fieldPath.initialContext(this); for (String pathElement : fieldPath.pathElements) { ResolveResult result = resolve(pathElement, path, context); if (result.wasSuccessful) { @@ -261,8 +262,8 @@ public boolean hasField(String path) { * @throws IllegalArgumentException if the path is null, empty or invalid. */ public boolean hasField(String path, boolean failOutOfRange) { - FieldPath fieldPath = new FieldPath(path); - Object context = fieldPath.initialContext; + final FieldPath fieldPath = FieldPath.of(path); + Object context = fieldPath.initialContext(this); for (int i = 0; i < fieldPath.pathElements.length - 1; i++) { String pathElement = fieldPath.pathElements[i]; if (context == null) { @@ -329,8 +330,8 @@ public boolean hasField(String path, boolean failOutOfRange) { * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. */ public void removeField(String path) { - FieldPath fieldPath = new FieldPath(path); - Object context = fieldPath.initialContext; + final FieldPath fieldPath = FieldPath.of(path); + Object context = fieldPath.initialContext(this); for (int i = 0; i < fieldPath.pathElements.length - 1; i++) { ResolveResult result = resolve(fieldPath.pathElements[i], path, context); if (result.wasSuccessful) { @@ -544,8 +545,8 @@ public void setFieldValue(String path, Object value, boolean ignoreEmptyValue) { } private void setFieldValue(String path, Object value, boolean append, boolean allowDuplicates) { - FieldPath fieldPath = new FieldPath(path); - Object context = fieldPath.initialContext; + final FieldPath fieldPath = FieldPath.of(path); + Object context = fieldPath.initialContext(this); for (int i = 0; i < fieldPath.pathElements.length - 1; i++) { String pathElement = fieldPath.pathElements[i]; if (context == null) { @@ -998,21 +999,45 @@ public String getFieldName() { } } - private class FieldPath { + private static final class FieldPath { - private final String[] pathElements; - private final Object initialContext; + private static final int MAX_SIZE = 512; + private static final Map CACHE = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency(); - private FieldPath(String path) { + // constructing a new FieldPath requires that we parse a String (e.g. "foo.bar.baz") into an array + // of path elements (e.g. ["foo", "bar", "baz"]). Calling String#split results in the allocation + // of an ArrayList to hold the results, then a new String is created for each path element, and + // then finally a String[] is allocated to hold the actual result -- in addition to all that, we + // do some processing ourselves on the path and path elements to validate and prepare them. + // the above CACHE and the below 'FieldPath.of' method allow us to almost always avoid this work. + + static FieldPath of(String path) { if (Strings.isEmpty(path)) { throw new IllegalArgumentException("path cannot be null nor empty"); } + FieldPath res = CACHE.get(path); + if (res != null) { + return res; + } + res = new FieldPath(path); + if (CACHE.size() > MAX_SIZE) { + CACHE.clear(); + } + CACHE.put(path, res); + return res; + } + + private final String[] pathElements; + private final boolean useIngestContext; + + // you shouldn't call this directly, use the FieldPath.of method above instead! + private FieldPath(String path) { String newPath; if (path.startsWith(INGEST_KEY_PREFIX)) { - initialContext = ingestMetadata; + useIngestContext = true; newPath = path.substring(INGEST_KEY_PREFIX.length()); } else { - initialContext = ctxMap; + useIngestContext = false; if (path.startsWith(SOURCE_PREFIX)) { newPath = path.substring(SOURCE_PREFIX.length()); } else { @@ -1025,6 +1050,9 @@ private FieldPath(String path) { } } + public Object initialContext(IngestDocument document) { + return useIngestContext ? document.getIngestMetadata() : document.getCtxMap(); + } } private static class ResolveResult { From c5ab17c3aa9214f99c2cbc15803863ccf973350a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Wed, 29 Jan 2025 19:43:04 +0100 Subject: [PATCH 230/383] Deprecate certificate-based remote cluster security model (#120806) Today, Elasticsearch supports two models to establish secure connections and trust between two Elasticsearch clusters: - API key based security model - Certificate based security model This PR deprecates the _Certificate based security model_ in favour of *API key based security model*. The _API key based security model_ is preferred way to configure remote clusters, as it allows to follow security best practices when setting up remote cluster connections and defining fine-grained access control. Users are encouraged to migrate remote clusters from certificate to API key authentication. --- docs/changelog/120806.yaml | 20 +++ .../esql/esql-across-clusters.asciidoc | 2 + .../cluster/remote-clusters-cert.asciidoc | 4 +- .../modules/remote-clusters.asciidoc | 2 + .../transport/RemoteConnectionManager.java | 32 +++- .../RemoteConnectionManagerTests.java | 21 ++- .../elasticsearch/test/cluster/LogType.java | 3 +- ...emoteClusterSecurityRCS1DeprecationIT.java | 138 ++++++++++++++++++ .../RemoteClusterSecurityRestIT.java | 23 +++ 9 files changed, 239 insertions(+), 6 deletions(-) create mode 100644 docs/changelog/120806.yaml create mode 100644 x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRCS1DeprecationIT.java diff --git a/docs/changelog/120806.yaml b/docs/changelog/120806.yaml new file mode 100644 index 0000000000000..7605e2ba534d9 --- /dev/null +++ b/docs/changelog/120806.yaml @@ -0,0 +1,20 @@ +pr: 120806 +summary: Deprecate certificate based remote cluster security model +area: Security +type: deprecation +issues: [] +deprecation: + title: Deprecate certificate based remote cluster security model + area: Authorization + details: -| + <> is deprecated and will be removed + in a future major version. + Users are encouraged to <>. + The <> is preferred way to configure remote clusters, + as it allows to follow security best practices when setting up remote cluster connections + and defining fine-grained access control. + impact: -| + If you have configured remote clusters with certificate-based security model, you should + <>. + Configuring a remote cluster using <>, + generates a warning in the deprecation logs. diff --git a/docs/reference/esql/esql-across-clusters.asciidoc b/docs/reference/esql/esql-across-clusters.asciidoc index c12865bad6162..91e1ff5255034 100644 --- a/docs/reference/esql/esql-across-clusters.asciidoc +++ b/docs/reference/esql/esql-across-clusters.asciidoc @@ -39,6 +39,8 @@ If you're using the API key authentication method, you'll see the `"cluster_cred [[esql-ccs-security-model-certificate]] ===== TLS certificate authentication +deprecated::[9.0.0, "Use <> instead."] + TLS certificate authentication secures remote clusters with mutual TLS. This could be the preferred model when a single administrator has full control over both clusters. We generally recommend that roles and their privileges be identical in both clusters. diff --git a/docs/reference/modules/cluster/remote-clusters-cert.asciidoc b/docs/reference/modules/cluster/remote-clusters-cert.asciidoc index 6602c807f5b64..1540b05a9bcfa 100644 --- a/docs/reference/modules/cluster/remote-clusters-cert.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-cert.asciidoc @@ -1,6 +1,8 @@ [[remote-clusters-cert]] === Add remote clusters using TLS certificate authentication +deprecated::[9.0.0,"Certificate based authentication is deprecated. Configure <> instead or follow a guide on how to <>."] + To add a remote cluster using TLS certificate authentication: . <> @@ -80,4 +82,4 @@ generate certificates for all nodes simplifies this task. include::remote-clusters-connect.asciidoc[] :!trust-mechanism: -include::{es-ref-dir}/security/authentication/remote-clusters-privileges-cert.asciidoc[leveloffset=+1] \ No newline at end of file +include::{es-ref-dir}/security/authentication/remote-clusters-privileges-cert.asciidoc[leveloffset=+1] diff --git a/docs/reference/modules/remote-clusters.asciidoc b/docs/reference/modules/remote-clusters.asciidoc index 87078c0f1956f..11d4400254407 100644 --- a/docs/reference/modules/remote-clusters.asciidoc +++ b/docs/reference/modules/remote-clusters.asciidoc @@ -56,6 +56,8 @@ is performed on the local cluster and a user's role names are passed to the remote cluster. In this model, a superuser on the local cluster gains total read access to the remote cluster, so it is only suitable for clusters that are in the same security domain. <>. ++ +deprecated::[9.0.0, "Use <> instead."] [[sniff-proxy-modes]] [discrete] diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java index 97520e8b939a6..8bc5771485f6d 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java @@ -13,6 +13,8 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Nullable; @@ -27,10 +29,15 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicLong; +import static org.elasticsearch.transport.RemoteClusterPortSettings.REMOTE_CLUSTER_PROFILE; import static org.elasticsearch.transport.RemoteClusterService.REMOTE_CLUSTER_HANDSHAKE_ACTION_NAME; public class RemoteConnectionManager implements ConnectionManager { + private static final Logger logger = LogManager.getLogger(RemoteConnectionManager.class); + + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RemoteConnectionManager.class); + private final String clusterAlias; private final RemoteClusterCredentialsManager credentialsManager; private final ConnectionManager delegate; @@ -45,6 +52,12 @@ public class RemoteConnectionManager implements ConnectionManager { @Override public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { addConnectedNode(node); + try { + // called when a node is successfully connected through a proxy connection + maybeLogDeprecationWarning(wrapConnectionWithRemoteClusterInfo(connection, clusterAlias, credentialsManager)); + } catch (Exception e) { + logger.warn("Failed to log deprecation warning.", e); + } } @Override @@ -102,11 +115,28 @@ public void openConnection(DiscoveryNode node, @Nullable ConnectionProfile profi node, profile, listener.delegateFailureAndWrap( - (l, connection) -> l.onResponse(wrapConnectionWithRemoteClusterInfo(connection, clusterAlias, credentialsManager)) + (l, connection) -> l.onResponse( + maybeLogDeprecationWarning(wrapConnectionWithRemoteClusterInfo(connection, clusterAlias, credentialsManager)) + ) ) ); } + private InternalRemoteConnection maybeLogDeprecationWarning(InternalRemoteConnection connection) { + if (connection.getClusterCredentials() == null + && (false == REMOTE_CLUSTER_PROFILE.equals(this.getConnectionProfile().getTransportProfile()))) { + deprecationLogger.warn( + DeprecationCategory.SECURITY, + "remote_cluster_certificate_access-" + connection.getClusterAlias(), + "The remote cluster connection to [{}] is using the certificate-based security model. " + + "The certificate-based security model is deprecated and will be removed in a future major version. " + + "Migrate the remote cluster from the certificate-based to the API key-based security model.", + connection.getClusterAlias() + ); + } + return connection; + } + @Override public Transport.Connection getConnection(DiscoveryNode node) { try { diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java index cb97d3dd6f8da..3f498df0a88de 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java @@ -100,6 +100,12 @@ public void testGetConnection() { proxyNodes.add(((ProxyConnection) remoteConnectionManager.getConnection(node4)).getConnection().getNode().getId()); assertThat(proxyNodes, containsInAnyOrder("node-2")); + + assertWarnings( + "The remote cluster connection to [remote-cluster] is using the certificate-based security model. " + + "The certificate-based security model is deprecated and will be removed in a future major version. " + + "Migrate the remote cluster from the certificate-based to the API key-based security model." + ); } public void testDisconnectedException() { @@ -124,7 +130,8 @@ public void testResolveRemoteClusterAlias() throws ExecutionException, Interrupt assertTrue(future.isDone()); Transport.Connection remoteConnection = remoteConnectionManager.getConnection(remoteNode1); - assertThat(RemoteConnectionManager.resolveRemoteClusterAlias(remoteConnection).get(), equalTo("remote-cluster")); + final String remoteClusterAlias = "remote-cluster"; + assertThat(RemoteConnectionManager.resolveRemoteClusterAlias(remoteConnection).get(), equalTo(remoteClusterAlias)); Transport.Connection localConnection = mock(Transport.Connection.class); assertThat(RemoteConnectionManager.resolveRemoteClusterAlias(localConnection).isPresent(), equalTo(false)); @@ -132,11 +139,19 @@ public void testResolveRemoteClusterAlias() throws ExecutionException, Interrupt DiscoveryNode remoteNode2 = DiscoveryNodeUtils.create("remote-node-2", address); Transport.Connection proxyConnection = remoteConnectionManager.getConnection(remoteNode2); assertThat(proxyConnection, instanceOf(ProxyConnection.class)); - assertThat(RemoteConnectionManager.resolveRemoteClusterAlias(proxyConnection).get(), equalTo("remote-cluster")); + assertThat(RemoteConnectionManager.resolveRemoteClusterAlias(proxyConnection).get(), equalTo(remoteClusterAlias)); PlainActionFuture future2 = new PlainActionFuture<>(); remoteConnectionManager.openConnection(remoteNode1, null, future2); - assertThat(RemoteConnectionManager.resolveRemoteClusterAlias(future2.get()).get(), equalTo("remote-cluster")); + assertThat(RemoteConnectionManager.resolveRemoteClusterAlias(future2.get()).get(), equalTo(remoteClusterAlias)); + + assertWarnings( + "The remote cluster connection to [" + + remoteClusterAlias + + "] is using the certificate-based security model. " + + "The certificate-based security model is deprecated and will be removed in a future major version. " + + "Migrate the remote cluster from the certificate-based to the API key-based security model." + ); } public void testRewriteHandshakeAction() throws IOException { diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/LogType.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/LogType.java index 63dbd98da3730..548a372964887 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/LogType.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/LogType.java @@ -14,7 +14,8 @@ public enum LogType { SERVER_JSON("%s_server.json"), AUDIT("%s_audit.json"), SEARCH_SLOW("%s_index_search_slowlog.json"), - INDEXING_SLOW("%s_index_indexing_slowlog.json"); + INDEXING_SLOW("%s_index_indexing_slowlog.json"), + DEPRECATION("%s_deprecation.json"); private final String filenameFormat; diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRCS1DeprecationIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRCS1DeprecationIT.java new file mode 100644 index 0000000000000..adf637b831fe5 --- /dev/null +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRCS1DeprecationIT.java @@ -0,0 +1,138 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.remotecluster; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.core.Strings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.LogType; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Locale; + +import static org.hamcrest.Matchers.containsString; + +/** + * Tests the deprecation of RCS1.0 (certificate-based) security model. + */ +public class RemoteClusterSecurityRCS1DeprecationIT extends AbstractRemoteClusterSecurityTestCase { + + public static final String REMOTE_CLUSTER_ALIAS = "my_remote_cluster"; + + static { + fulfillingCluster = ElasticsearchCluster.local().name("fulfilling-cluster").nodes(1).apply(commonClusterConfig).build(); + queryCluster = ElasticsearchCluster.local().nodes(1).name("query-cluster").apply(commonClusterConfig).build(); + } + + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(fulfillingCluster).around(queryCluster); + + public void testUsingRCS1GeneratesDeprecationWarning() throws Exception { + final boolean rcs1 = true; + final boolean useProxyMode = randomBoolean(); + configureRemoteCluster(REMOTE_CLUSTER_ALIAS, fulfillingCluster, rcs1, useProxyMode, randomBoolean()); + + { + // Query cluster -> add role for test user + var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); + putRoleRequest.setJsonEntity(""" + { + "indices": [ + { + "names": ["local_index"], + "privileges": ["read"] + } + ] + }"""); + assertOK(adminClient().performRequest(putRoleRequest)); + + // Query cluster -> create user and assign role + var putUserRequest = new Request("PUT", "/_security/user/" + REMOTE_SEARCH_USER); + putUserRequest.setJsonEntity(""" + { + "password": "x-pack-test-password", + "roles" : ["remote_search"] + }"""); + assertOK(adminClient().performRequest(putUserRequest)); + + // Query cluster -> create test index + var indexDocRequest = new Request("POST", "/local_index/_doc?refresh=true"); + indexDocRequest.setJsonEntity("{\"local_foo\": \"local_bar\"}"); + assertOK(client().performRequest(indexDocRequest)); + + // Fulfilling cluster -> create test indices + Request bulkRequest = new Request("POST", "/_bulk?refresh=true"); + bulkRequest.setJsonEntity(Strings.format(""" + { "index": { "_index": "index1" } } + { "foo": "bar" } + { "index": { "_index": "secretindex" } } + { "bar": "foo" } + """)); + assertOK(performRequestAgainstFulfillingCluster(bulkRequest)); + + // Fulfilling cluster -> add role for remote search user + var putRoleOnRemoteClusterRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); + putRoleOnRemoteClusterRequest.setJsonEntity(""" + { + "indices": [ + { + "names": ["index*"], + "privileges": ["read", "read_cross_cluster"] + } + ] + }"""); + assertOK(performRequestAgainstFulfillingCluster(putRoleOnRemoteClusterRequest)); + } + { + // perform a simple search request, so we can ensure the remote cluster is connected + final Request searchRequest = new Request( + "GET", + String.format( + Locale.ROOT, + "/%s:index1/_search?ccs_minimize_roundtrips=%s", + randomFrom(REMOTE_CLUSTER_ALIAS, "*", "my_remote_*"), + randomBoolean() + ) + ); + assertOK(performRequestWithRemoteSearchUser(searchRequest)); + } + { + // verify that the deprecation warning is logged + try (InputStream log = queryCluster.getNodeLog(0, LogType.DEPRECATION)) { + Streams.readAllLines( + log, + line -> assertThat( + line, + containsString( + "The remote cluster connection to [" + + REMOTE_CLUSTER_ALIAS + + "] is using the certificate-based security model. " + + "The certificate-based security model is deprecated and will be removed in a future major version. " + + "Migrate the remote cluster from the certificate-based to the API key-based security model." + ) + ) + ); + } + } + } + + private Response performRequestWithRemoteSearchUser(final Request request) throws IOException { + request.setOptions( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", headerFromRandomAuthMethod(REMOTE_SEARCH_USER, PASS)) + ); + return client().performRequest(request); + } + +} diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java index 4cbd1cab21af9..307f59859c75a 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java @@ -15,12 +15,14 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Strings; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.LogType; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.junit.RunnableTestRuleAdapter; @@ -31,6 +33,7 @@ import org.junit.rules.TestRule; import java.io.IOException; +import java.io.InputStream; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.util.Arrays; @@ -607,6 +610,7 @@ public void testCrossClusterSearch() throws Exception { assertThat(exception6.getMessage(), containsString("invalid cross-cluster API key value")); } } + assertNoRcs1DeprecationWarnings(); } @SuppressWarnings("unchecked") @@ -681,4 +685,23 @@ private static void selectTasksWithOpaqueId( } } } + + private void assertNoRcs1DeprecationWarnings() throws IOException { + for (int i = 0; i < queryCluster.getNumNodes(); i++) { + try (InputStream log = queryCluster.getNodeLog(i, LogType.DEPRECATION)) { + Streams.readAllLines( + log, + line -> assertThat( + line, + not( + containsString( + "The certificate-based security model is deprecated and will be removed in a future major version. " + + "Migrate the remote cluster from the certificate-based to the API key-based security model." + ) + ) + ) + ); + } + } + } } From cfe25077546b303c4f3ef4a9de597b8e971d9500 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 29 Jan 2025 13:48:37 -0500 Subject: [PATCH 231/383] ESQL: Speed up field name resolution (#121221) Speeds up field name resolution when there are many many field names, like in `HeapAttackIT`. Relates to #121112 --- .../java/org/elasticsearch/xpack/esql/session/EsqlSession.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 8c95992cf9f5a..94bf414da1b9d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -623,10 +623,11 @@ static PreAnalysisResult fieldNames(LogicalPlan parsed, Set enrichPolicy // for example "from test | eval x = salary | stats max = max(x) by gender" // remove the UnresolvedAttribute "x", since that is an Alias defined in "eval" AttributeSet planRefs = p.references(); + Set fieldNames = planRefs.names(); p.forEachExpressionDown(Alias.class, alias -> { // do not remove the UnresolvedAttribute that has the same name as its alias, ie "rename id = id" // or the UnresolvedAttributes that are used in Functions that have aliases "STATS id = MAX(id)" - if (planRefs.names().contains(alias.name())) { + if (fieldNames.contains(alias.name())) { return; } references.removeIf(attr -> matchByName(attr, alias.name(), keepCommandReferences.contains(attr))); From 74f91780e0869f443ce2777fdaf9172971b82024 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Wed, 29 Jan 2025 12:03:47 -0800 Subject: [PATCH 232/383] Fix check for sort fields being in nested objects (#121084) --- .../rest-api-spec/test/logsdb/10_settings.yml | 41 +++++++++++++++++++ .../index/mapper/DocumentMapper.java | 12 +++--- .../index/mapper/MapperFeatures.java | 2 + 3 files changed, 49 insertions(+), 6 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml index 2a31b3bd387c4..13107b39d6e1f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml @@ -201,6 +201,47 @@ non-default sort settings: - match: { test-sort.settings.index.sort.mode.0: "max" } - match: { test-sort.settings.index.sort.mode.1: "max" } +--- +non-default sort settings with presence of nested: + - requires: + cluster_features: [ "mapper.nested.sorting_fields_check_fix" ] + reason: "Fixed behavior" + + - do: + indices.create: + index: test-sort + body: + settings: + index: + mode: logsdb + number_of_shards: 2 + number_of_replicas: 0 + sort: + field: [ "agent_id", "@timestamp" ] + order: [ "asc", "desc" ] + mappings: + properties: + "@timestamp": + type: date + agent_id: + type: keyword + agent: + type: nested + properties: + id: + type: keyword + + - do: + indices.get_settings: + index: test-sort + + - is_true: test-sort + - match: { test-sort.settings.index.mode: "logsdb" } + - match: { test-sort.settings.index.sort.field.0: "agent_id" } + - match: { test-sort.settings.index.sort.field.1: "@timestamp" } + - match: { test-sort.settings.index.sort.order.0: "asc" } + - match: { test-sort.settings.index.sort.order.1: "desc" } + --- override sort order settings: - requires: diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index cf0c355a22e65..0d488e47c2e4f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -164,12 +164,12 @@ public void validate(IndexSettings settings, boolean checkLimits) { throw new IllegalArgumentException("cannot have nested fields when index sort is activated"); } for (String field : settings.getValue(IndexSortConfig.INDEX_SORT_FIELD_SETTING)) { - for (NestedObjectMapper nestedObjectMapper : mappers().nestedLookup().getNestedMappers().values()) { - if (field.startsWith(nestedObjectMapper.fullPath())) { - throw new IllegalArgumentException( - "cannot apply index sort to field [" + field + "] under nested object [" + nestedObjectMapper.fullPath() + "]" - ); - } + NestedObjectMapper nestedMapper = mappers().nestedLookup().getNestedMappers().get(field); + String nestedParent = nestedMapper != null ? nestedMapper.fullPath() : mappers().nestedLookup().getNestedParent(field); + if (nestedParent != null) { + throw new IllegalArgumentException( + "cannot apply index sort to field [" + field + "] under nested object [" + nestedParent + "]" + ); } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 9b1abb1a4d533..0935e219fb5c0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -29,6 +29,7 @@ public class MapperFeatures implements FeatureSpecification { public static final NodeFeature META_FETCH_FIELDS_ERROR_CODE_CHANGED = new NodeFeature("meta_fetch_fields_error_code_changed"); public static final NodeFeature SPARSE_VECTOR_STORE_SUPPORT = new NodeFeature("mapper.sparse_vector.store_support"); + public static final NodeFeature SORT_FIELDS_CHECK_FOR_NESTED_OBJECT_FIX = new NodeFeature("mapper.nested.sorting_fields_check_fix"); @Override public Set getTestFeatures() { @@ -44,6 +45,7 @@ public Set getTestFeatures() { CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX, META_FETCH_FIELDS_ERROR_CODE_CHANGED, SPARSE_VECTOR_STORE_SUPPORT, + SORT_FIELDS_CHECK_FOR_NESTED_OBJECT_FIX, COUNTED_KEYWORD_SYNTHETIC_SOURCE_NATIVE_SUPPORT, SourceFieldMapper.SYNTHETIC_RECOVERY_SOURCE, ObjectMapper.SUBOBJECTS_FALSE_MAPPING_UPDATE_FIX From 1901c71e518c1732bdfab9a72f1423a27c1dc113 Mon Sep 17 00:00:00 2001 From: Mikhail Berezovskiy Date: Wed, 29 Jan 2025 12:07:29 -0800 Subject: [PATCH 233/383] Re-add http stream content size handler (#121095) --- .../Netty4IncrementalRequestHandlingIT.java | 94 +++---- .../http/netty4/Netty4HttpAggregator.java | 41 +-- .../netty4/Netty4HttpContentSizeHandler.java | 164 ++++++++++++ .../netty4/Netty4HttpServerTransport.java | 3 +- .../Netty4HttpContentSizeHandlerTests.java | 240 ++++++++++++++++++ 5 files changed, 461 insertions(+), 81 deletions(-) create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandler.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandlerTests.java diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java index d825ec0a83f53..0158384b47aa4 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java @@ -54,7 +54,6 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.http.HttpBodyTracer; -import org.elasticsearch.http.HttpHandlingSettings; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.plugins.ActionPlugin; @@ -93,10 +92,15 @@ @ESIntegTestCase.ClusterScope(numDataNodes = 1) public class Netty4IncrementalRequestHandlingIT extends ESNetty4IntegTestCase { + private static final int MAX_CONTENT_LENGTH = ByteSizeUnit.MB.toIntBytes(50); + @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); - builder.put(HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH.getKey(), ByteSizeValue.of(50, ByteSizeUnit.MB)); + builder.put( + HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH.getKey(), + ByteSizeValue.of(MAX_CONTENT_LENGTH, ByteSizeUnit.BYTES) + ); return builder.build(); } @@ -135,7 +139,7 @@ public void testReceiveAllChunks() throws Exception { var opaqueId = opaqueId(reqNo); // this dataset will be compared with one on server side - var dataSize = randomIntBetween(1024, maxContentLength()); + var dataSize = randomIntBetween(1024, MAX_CONTENT_LENGTH); var sendData = Unpooled.wrappedBuffer(randomByteArrayOfLength(dataSize)); sendData.retain(); ctx.clientChannel.writeAndFlush(fullHttpRequest(opaqueId, sendData)); @@ -243,7 +247,7 @@ public void testServerExceptionMidStream() throws Exception { public void testClientBackpressure() throws Exception { try (var ctx = setupClientCtx()) { var opaqueId = opaqueId(0); - var payloadSize = maxContentLength(); + var payloadSize = MAX_CONTENT_LENGTH; var totalParts = 10; var partSize = payloadSize / totalParts; ctx.clientChannel.writeAndFlush(httpRequest(opaqueId, payloadSize)); @@ -285,7 +289,7 @@ public void test100Continue() throws Exception { try (var ctx = setupClientCtx()) { for (int reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var acceptableContentLength = randomIntBetween(0, maxContentLength()); + var acceptableContentLength = randomIntBetween(0, MAX_CONTENT_LENGTH); // send request header and await 100-continue var req = httpRequest(id, acceptableContentLength); @@ -317,7 +321,7 @@ public void test413TooLargeOnExpect100Continue() throws Exception { try (var ctx = setupClientCtx()) { for (int reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var oversized = maxContentLength() + 1; + var oversized = MAX_CONTENT_LENGTH + 1; // send request header and await 413 too large var req = httpRequest(id, oversized); @@ -333,32 +337,28 @@ public void test413TooLargeOnExpect100Continue() throws Exception { } } - // ensures that oversized chunked encoded request has no limits at http layer - // rest handler is responsible for oversized requests - public void testOversizedChunkedEncodingNoLimits() throws Exception { + // ensures that oversized chunked encoded request has maxContentLength limit and returns 413 + public void testOversizedChunkedEncoding() throws Exception { try (var ctx = setupClientCtx()) { - for (var reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { - var id = opaqueId(reqNo); - var contentSize = maxContentLength() + 1; - var content = randomByteArrayOfLength(contentSize); - var is = new ByteBufInputStream(Unpooled.wrappedBuffer(content)); - var chunkedIs = new ChunkedStream(is); - var httpChunkedIs = new HttpChunkedInput(chunkedIs, LastHttpContent.EMPTY_LAST_CONTENT); - var req = httpRequest(id, 0); - HttpUtil.setTransferEncodingChunked(req, true); - - ctx.clientChannel.pipeline().addLast(new ChunkedWriteHandler()); - ctx.clientChannel.writeAndFlush(req); - ctx.clientChannel.writeAndFlush(httpChunkedIs); - var handler = ctx.awaitRestChannelAccepted(id); - var consumed = handler.readAllBytes(); - assertEquals(contentSize, consumed); - handler.sendResponse(new RestResponse(RestStatus.OK, "")); - - var resp = (FullHttpResponse) safePoll(ctx.clientRespQueue); - assertEquals(HttpResponseStatus.OK, resp.status()); - resp.release(); - } + var id = opaqueId(0); + var contentSize = MAX_CONTENT_LENGTH + 1; + var content = randomByteArrayOfLength(contentSize); + var is = new ByteBufInputStream(Unpooled.wrappedBuffer(content)); + var chunkedIs = new ChunkedStream(is); + var httpChunkedIs = new HttpChunkedInput(chunkedIs, LastHttpContent.EMPTY_LAST_CONTENT); + var req = httpRequest(id, 0); + HttpUtil.setTransferEncodingChunked(req, true); + + ctx.clientChannel.pipeline().addLast(new ChunkedWriteHandler()); + ctx.clientChannel.writeAndFlush(req); + ctx.clientChannel.writeAndFlush(httpChunkedIs); + var handler = ctx.awaitRestChannelAccepted(id); + var consumed = handler.readAllBytes(); + assertTrue(consumed <= MAX_CONTENT_LENGTH); + + var resp = (FullHttpResponse) safePoll(ctx.clientRespQueue); + assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + resp.release(); } } @@ -369,7 +369,7 @@ public void testBadRequestReleaseQueuedChunks() throws Exception { try (var ctx = setupClientCtx()) { for (var reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var contentSize = randomIntBetween(0, maxContentLength()); + var contentSize = randomIntBetween(0, MAX_CONTENT_LENGTH); var req = httpRequest(id, contentSize); var content = randomContent(contentSize, true); @@ -405,7 +405,7 @@ public void testHttpClientStats() throws Exception { for (var reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var contentSize = randomIntBetween(0, maxContentLength()); + var contentSize = randomIntBetween(0, MAX_CONTENT_LENGTH); totalBytesSent += contentSize; ctx.clientChannel.writeAndFlush(httpRequest(id, contentSize)); ctx.clientChannel.writeAndFlush(randomContent(contentSize, true)); @@ -485,10 +485,6 @@ private void assertHttpBodyLogging(Function test) throws Exceptio } } - private int maxContentLength() { - return HttpHandlingSettings.fromSettings(internalCluster().getInstance(Settings.class)).maxContentLength(); - } - private String opaqueId(int reqNo) { return getTestName() + "-" + reqNo; } @@ -658,14 +654,22 @@ void sendResponse(RestResponse response) { int readBytes(int bytes) { var consumed = 0; if (recvLast == false) { - while (consumed < bytes) { - stream.next(); - var recvChunk = safePoll(recvChunks); - consumed += recvChunk.chunk.length(); - recvChunk.chunk.close(); - if (recvChunk.isLast) { - recvLast = true; - break; + stream.next(); + while (consumed < bytes && streamClosed == false) { + try { + var recvChunk = recvChunks.poll(10, TimeUnit.MILLISECONDS); + if (recvChunk != null) { + consumed += recvChunk.chunk.length(); + recvChunk.chunk.close(); + if (recvChunk.isLast) { + recvLast = true; + break; + } + stream.next(); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new AssertionError(e); } } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java index 021ce09e0ed8e..0294b4626496c 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java @@ -11,13 +11,10 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.FullHttpRequest; -import io.netty.handler.codec.http.FullHttpResponse; -import io.netty.handler.codec.http.HttpContent; import io.netty.handler.codec.http.HttpObject; import io.netty.handler.codec.http.HttpObjectAggregator; import io.netty.handler.codec.http.HttpRequest; -import io.netty.handler.codec.http.HttpResponseStatus; -import io.netty.handler.codec.http.HttpUtil; +import io.netty.handler.codec.http.HttpRequestDecoder; import org.elasticsearch.http.HttpPreRequest; import org.elasticsearch.http.netty4.internal.HttpHeadersAuthenticatorUtils; @@ -27,18 +24,19 @@ /** * A wrapper around {@link HttpObjectAggregator}. Provides optional content aggregation based on * predicate. {@link HttpObjectAggregator} also handles Expect: 100-continue and oversized content. - * Unfortunately, Netty does not provide handlers for oversized messages beyond HttpObjectAggregator. + * Provides content size handling for non-aggregated requests too. */ public class Netty4HttpAggregator extends HttpObjectAggregator { private static final Predicate IGNORE_TEST = (req) -> req.uri().startsWith("/_test/request-stream") == false; private final Predicate decider; + private final Netty4HttpContentSizeHandler streamContentSizeHandler; private boolean aggregating = true; - private boolean ignoreContentAfterContinueResponse = false; - public Netty4HttpAggregator(int maxContentLength, Predicate decider) { + public Netty4HttpAggregator(int maxContentLength, Predicate decider, HttpRequestDecoder decoder) { super(maxContentLength); this.decider = decider; + this.streamContentSizeHandler = new Netty4HttpContentSizeHandler(decoder, maxContentLength); } @Override @@ -51,34 +49,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception if (aggregating || msg instanceof FullHttpRequest) { super.channelRead(ctx, msg); } else { - handle(ctx, (HttpObject) msg); - } - } - - private void handle(ChannelHandlerContext ctx, HttpObject msg) { - if (msg instanceof HttpRequest request) { - var continueResponse = newContinueResponse(request, maxContentLength(), ctx.pipeline()); - if (continueResponse != null) { - // there are 3 responses expected: 100, 413, 417 - // on 100 we pass request further and reply to client to continue - // on 413/417 we ignore following content - ctx.writeAndFlush(continueResponse); - var resp = (FullHttpResponse) continueResponse; - if (resp.status() != HttpResponseStatus.CONTINUE) { - ignoreContentAfterContinueResponse = true; - return; - } - HttpUtil.set100ContinueExpected(request, false); - } - ignoreContentAfterContinueResponse = false; - ctx.fireChannelRead(msg); - } else { - var httpContent = (HttpContent) msg; - if (ignoreContentAfterContinueResponse) { - httpContent.release(); - } else { - ctx.fireChannelRead(msg); - } + streamContentSizeHandler.channelRead(ctx, msg); } } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandler.java new file mode 100644 index 0000000000000..fee9d227d8310 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandler.java @@ -0,0 +1,164 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http.netty4; + +import io.netty.buffer.Unpooled; +import io.netty.channel.ChannelFutureListener; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import io.netty.handler.codec.http.DefaultFullHttpResponse; +import io.netty.handler.codec.http.DefaultHttpHeaders; +import io.netty.handler.codec.http.EmptyHttpHeaders; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpContent; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpHeaderValues; +import io.netty.handler.codec.http.HttpObject; +import io.netty.handler.codec.http.HttpRequest; +import io.netty.handler.codec.http.HttpRequestDecoder; +import io.netty.handler.codec.http.HttpResponseStatus; +import io.netty.handler.codec.http.HttpUtil; +import io.netty.handler.codec.http.HttpVersion; + +import org.elasticsearch.core.SuppressForbidden; + +import static io.netty.handler.codec.http.HttpHeaderNames.CONNECTION; +import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH; + +/** + * Provides handling for 'Expect' header and content size. Implements HTTP1.1 spec. + * Allows {@code Expect: 100-continue} header only. Other 'Expect' headers will be rejected with + * {@code 417 Expectation Failed} reason. + *
+ * Replies {@code 100 Continue} to requests with allowed maxContentLength. + *
+ * Replies {@code 413 Request Entity Too Large} when content size exceeds maxContentLength. + * + * Channel can be reused for requests with "Expect:100-Continue" header that exceed allowed content length, + * as long as request does not include content. If oversized request already contains content then + * we cannot safely proceed and connection will be closed. + *

+ * TODO: move to RestController to allow content limits per RestHandler. + * Ideally we should be able to handle Continue and oversized request in the RestController. + *
    + *
  • + * 100 Continue is interim response, means RestChannel will send 2 responses for a single request. See + * rfc9110.html#status.100 + *
  • + *
  • + * RestChannel should be able to close underlying HTTP channel connection. + *
  • + *
+ */ +@SuppressForbidden(reason = "use of default ChannelFutureListener's CLOSE and CLOSE_ON_FAILURE") +public class Netty4HttpContentSizeHandler extends ChannelInboundHandlerAdapter { + + // copied from netty's HttpObjectAggregator + static final FullHttpResponse CONTINUE = new DefaultFullHttpResponse( + HttpVersion.HTTP_1_1, + HttpResponseStatus.CONTINUE, + Unpooled.EMPTY_BUFFER + ); + static final FullHttpResponse EXPECTATION_FAILED_CLOSE = new DefaultFullHttpResponse( + HttpVersion.HTTP_1_1, + HttpResponseStatus.EXPECTATION_FAILED, + Unpooled.EMPTY_BUFFER, + new DefaultHttpHeaders().add(CONTENT_LENGTH, 0).add(CONNECTION, HttpHeaderValues.CLOSE), + EmptyHttpHeaders.INSTANCE + ); + static final FullHttpResponse TOO_LARGE_CLOSE = new DefaultFullHttpResponse( + HttpVersion.HTTP_1_1, + HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, + Unpooled.EMPTY_BUFFER, + new DefaultHttpHeaders().add(CONTENT_LENGTH, 0).add(CONNECTION, HttpHeaderValues.CLOSE), + EmptyHttpHeaders.INSTANCE + ); + static final FullHttpResponse TOO_LARGE = new DefaultFullHttpResponse( + HttpVersion.HTTP_1_1, + HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, + Unpooled.EMPTY_BUFFER, + new DefaultHttpHeaders().add(CONTENT_LENGTH, 0), + EmptyHttpHeaders.INSTANCE + ); + + private final int maxContentLength; + private final HttpRequestDecoder decoder; // need to reset decoder after sending 413 + private int currentContentLength; // chunked encoding does not provide content length, need to track actual length + private boolean ignoreContent; + + public Netty4HttpContentSizeHandler(HttpRequestDecoder decoder, int maxContentLength) { + this.maxContentLength = maxContentLength; + this.decoder = decoder; + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) { + assert msg instanceof HttpObject; + if (msg instanceof HttpRequest request) { + handleRequest(ctx, request); + } else { + handleContent(ctx, (HttpContent) msg); + } + } + + private void handleRequest(ChannelHandlerContext ctx, HttpRequest request) { + ignoreContent = true; + if (request.decoderResult().isFailure()) { + ctx.fireChannelRead(request); + return; + } + + final var expectValue = request.headers().get(HttpHeaderNames.EXPECT); + boolean isContinueExpected = false; + // Only "Expect: 100-Continue" header is supported + if (expectValue != null) { + if (HttpHeaderValues.CONTINUE.toString().equalsIgnoreCase(expectValue)) { + isContinueExpected = true; + } else { + ctx.writeAndFlush(EXPECTATION_FAILED_CLOSE.retainedDuplicate()).addListener(ChannelFutureListener.CLOSE); + return; + } + } + + boolean isOversized = HttpUtil.getContentLength(request, -1) > maxContentLength; + if (isOversized) { + if (isContinueExpected) { + // Client is allowed to send content without waiting for Continue. + // See https://www.rfc-editor.org/rfc/rfc9110.html#section-10.1.1-11.3 + // this content will result in HttpRequestDecoder failure and send downstream + decoder.reset(); + } + ctx.writeAndFlush(TOO_LARGE.retainedDuplicate()).addListener(ChannelFutureListener.CLOSE_ON_FAILURE); + } else { + ignoreContent = false; + currentContentLength = 0; + if (isContinueExpected) { + ctx.writeAndFlush(CONTINUE.retainedDuplicate()); + HttpUtil.set100ContinueExpected(request, false); + } + ctx.fireChannelRead(request); + } + } + + private void handleContent(ChannelHandlerContext ctx, HttpContent msg) { + if (ignoreContent) { + msg.release(); + } else { + currentContentLength += msg.content().readableBytes(); + if (currentContentLength > maxContentLength) { + msg.release(); + ctx.writeAndFlush(TOO_LARGE_CLOSE.retainedDuplicate()).addListener(ChannelFutureListener.CLOSE); + } else { + ctx.fireChannelRead(msg); + } + } + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 36c860f1fb90b..9ffa4b479be17 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -381,7 +381,8 @@ protected HttpMessage createMessage(String[] initialLine) throws Exception { handlingSettings.maxContentLength(), httpPreRequest -> enabled.get() == false || ((httpPreRequest.rawPath().endsWith("/_bulk") == false) - || httpPreRequest.rawPath().startsWith("/_xpack/monitoring/_bulk")) + || httpPreRequest.rawPath().startsWith("/_xpack/monitoring/_bulk")), + decoder ); aggregator.setMaxCumulationBufferComponents(transport.maxCompositeBufferComponents); ch.pipeline() diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandlerTests.java new file mode 100644 index 0000000000000..36399c8d6d7a5 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandlerTests.java @@ -0,0 +1,240 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.embedded.EmbeddedChannel; +import io.netty.handler.codec.http.DefaultHttpContent; +import io.netty.handler.codec.http.DefaultHttpRequest; +import io.netty.handler.codec.http.DefaultLastHttpContent; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpContent; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpHeaderValues; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.codec.http.HttpObject; +import io.netty.handler.codec.http.HttpRequest; +import io.netty.handler.codec.http.HttpRequestDecoder; +import io.netty.handler.codec.http.HttpRequestEncoder; +import io.netty.handler.codec.http.HttpResponseStatus; +import io.netty.handler.codec.http.HttpUtil; +import io.netty.handler.codec.http.HttpVersion; +import io.netty.handler.codec.http.LastHttpContent; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; + +public class Netty4HttpContentSizeHandlerTests extends ESTestCase { + + private static final int MAX_CONTENT_LENGTH = 1024; + private static final int OVERSIZED_LENGTH = MAX_CONTENT_LENGTH + 1; + private static final int REPS = 1000; + private EmbeddedChannel channel; + private EmbeddedChannel encoder; // channel to encode HTTP objects into bytes + + private static HttpContent httpContent(int size) { + return new DefaultHttpContent(Unpooled.wrappedBuffer(randomByteArrayOfLength(size))); + } + + private static LastHttpContent lastHttpContent(int size) { + return new DefaultLastHttpContent(Unpooled.wrappedBuffer(randomByteArrayOfLength(size))); + } + + private HttpRequest httpRequest() { + return new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/"); + } + + // encodes multiple HTTP objects into single ByteBuf + private ByteBuf encode(HttpObject... objs) { + var out = Unpooled.compositeBuffer(); + Arrays.stream(objs).forEach(encoder::writeOutbound); + while (encoder.outboundMessages().isEmpty() == false) { + out.addComponent(true, encoder.readOutbound()); + } + return out; + } + + @Override + public void setUp() throws Exception { + super.setUp(); + var decoder = new HttpRequestDecoder(); + encoder = new EmbeddedChannel(new HttpRequestEncoder()); + channel = new EmbeddedChannel(decoder, new Netty4HttpContentSizeHandler(decoder, MAX_CONTENT_LENGTH)); + } + + /** + * Assert that handler replies 100-continue for acceptable request and pass request further. + */ + public void testContinue() { + for (var i = 0; i < REPS; i++) { + var sendRequest = httpRequest(); + HttpUtil.set100ContinueExpected(sendRequest, true); + channel.writeInbound(encode(sendRequest)); + assertEquals("should send back 100-continue", Netty4HttpContentSizeHandler.CONTINUE, channel.readOutbound()); + var recvRequest = (HttpRequest) channel.readInbound(); + assertNotNull(recvRequest); + assertFalse(HttpUtil.is100ContinueExpected(recvRequest)); + channel.writeInbound(encode(LastHttpContent.EMPTY_LAST_CONTENT)); + assertEquals(LastHttpContent.EMPTY_LAST_CONTENT, channel.readInbound()); + } + } + + /** + * Assert that handler pass through acceptable request. + */ + public void testWithoutContinue() { + for (var i = 0; i < REPS; i++) { + var sendRequest = httpRequest(); + channel.writeInbound(encode(sendRequest)); + assertNull("should not receive response", channel.readOutbound()); + assertNotNull("request should pass", channel.readInbound()); + channel.writeInbound(encode(LastHttpContent.EMPTY_LAST_CONTENT)); + assertEquals(LastHttpContent.EMPTY_LAST_CONTENT, channel.readInbound()); + } + } + + /** + * Assert that handler pass through request and content for acceptable request. + */ + public void testContinueWithContent() { + for (var i = 0; i < REPS; i++) { + var sendRequest = httpRequest(); + HttpUtil.set100ContinueExpected(sendRequest, true); + HttpUtil.setContentLength(sendRequest, MAX_CONTENT_LENGTH); + var sendContent = lastHttpContent(MAX_CONTENT_LENGTH); + channel.writeInbound(encode(sendRequest, sendContent)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals("should send back 100-continue", Netty4HttpContentSizeHandler.CONTINUE, resp); + resp.release(); + var recvRequest = (HttpRequest) channel.readInbound(); + assertNotNull(recvRequest); + var recvContent = (HttpContent) channel.readInbound(); + assertNotNull(recvContent); + assertEquals(MAX_CONTENT_LENGTH, recvContent.content().readableBytes()); + recvContent.release(); + } + } + + /** + * Assert that handler return 417 Expectation Failed and closes channel on request + * with "Expect" header other than "100-Continue". + */ + public void testExpectationFailed() { + var sendRequest = httpRequest(); + sendRequest.headers().set(HttpHeaderNames.EXPECT, randomValueOtherThan(HttpHeaderValues.CONTINUE, ESTestCase::randomIdentifier)); + channel.writeInbound(encode(sendRequest)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.EXPECTATION_FAILED, resp.status()); + assertFalse(channel.isOpen()); + resp.release(); + } + + /** + * Assert that handler returns 413 Request Entity Too Large for oversized request + * and does not close channel if following content is not present. + */ + public void testEntityTooLarge() { + for (var i = 0; i < REPS; i++) { + var sendRequest = httpRequest(); + HttpUtil.set100ContinueExpected(sendRequest, true); + HttpUtil.setContentLength(sendRequest, OVERSIZED_LENGTH); + channel.writeInbound(encode(sendRequest, LastHttpContent.EMPTY_LAST_CONTENT)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + assertNull("request should not pass", channel.readInbound()); + assertTrue("should not close channel", channel.isOpen()); + resp.release(); + } + } + + /** + * Mixed load of oversized and normal requests with Exepct:100-Continue. + */ + public void testMixedContent() { + for (int i = 0; i < REPS; i++) { + var isOversized = randomBoolean(); + var sendRequest = httpRequest(); + HttpUtil.set100ContinueExpected(sendRequest, true); + if (isOversized) { + HttpUtil.setContentLength(sendRequest, OVERSIZED_LENGTH); + channel.writeInbound(encode(sendRequest)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + channel.writeInbound(encode(LastHttpContent.EMPTY_LAST_CONTENT)); // terminate + assertNull(channel.readInbound()); + resp.release(); + } else { + var normalSize = between(1, MAX_CONTENT_LENGTH); + HttpUtil.setContentLength(sendRequest, normalSize); + channel.writeInbound(encode(sendRequest)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.CONTINUE, resp.status()); + resp.release(); + var sendContent = lastHttpContent(normalSize); + channel.writeInbound(encode(sendContent)); + var recvRequest = (HttpRequest) channel.readInbound(); + var recvContent = (LastHttpContent) channel.readInbound(); + assertEquals("content length header should match", normalSize, HttpUtil.getContentLength(recvRequest)); + assertFalse("should remove expect header", HttpUtil.is100ContinueExpected(recvRequest)); + assertEquals("actual content size should match", normalSize, recvContent.content().readableBytes()); + recvContent.release(); + } + } + } + + /** + * Assert that handler returns 413 Request Entity Too Large and skip following content. + */ + public void testEntityTooLargeWithContentWithoutExpect() { + for (int i = 0; i < REPS; i++) { + var sendRequest = httpRequest(); + HttpUtil.setContentLength(sendRequest, OVERSIZED_LENGTH); + var unexpectedContent = lastHttpContent(OVERSIZED_LENGTH); + channel.writeInbound(encode(sendRequest, unexpectedContent)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + resp.release(); + assertNull("request and content should not pass", channel.readInbound()); + assertTrue("should not close channel", channel.isOpen()); + } + } + + /** + * Assert that handler return 413 Request Entity Too Large and closes channel for oversized + * requests with chunked content. + */ + public void testEntityTooLargeWithChunkedContent() { + var sendRequest = httpRequest(); + HttpUtil.setTransferEncodingChunked(sendRequest, true); + channel.writeInbound(encode(sendRequest)); + assertTrue("request should pass", channel.readInbound() instanceof HttpRequest); + + int contentBytesSent = 0; + do { + var thisPartSize = between(1, MAX_CONTENT_LENGTH * 2); + channel.writeInbound(encode(httpContent(thisPartSize))); + contentBytesSent += thisPartSize; + + if (contentBytesSent <= MAX_CONTENT_LENGTH) { + ((HttpContent) channel.readInbound()).release(); + } else { + break; + } + } while (true); + + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals("should respond with 413", HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + assertFalse("should close channel", channel.isOpen()); + resp.release(); + } + +} From 3128d6682506282d725851d69d1c83b7e1078be7 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Wed, 29 Jan 2025 16:16:19 -0500 Subject: [PATCH 234/383] Unmuting test (#121209) --- muted-tests.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index c7c6d5113600a..5947cf733326c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -258,8 +258,6 @@ tests: - class: org.elasticsearch.packaging.test.DockerTests method: test171AdditionalCliOptionsAreForwarded issue: https://github.com/elastic/elasticsearch/issues/120925 -- class: org.elasticsearch.xpack.inference.InferenceGetServicesIT - issue: https://github.com/elastic/elasticsearch/issues/120986 - class: org.elasticsearch.action.search.SearchProgressActionListenerIT method: testSearchProgressWithQuery issue: https://github.com/elastic/elasticsearch/issues/120994 From 61dc93107f5b23055b890ffda5a81d9fe119cfb0 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 29 Jan 2025 13:20:25 -0800 Subject: [PATCH 235/383] Add lucene version compatibility tests (#121104) This commit adds compatibility tests that target ES revisions that align with specific Lucene versions. In this case, we are intending to upgrade from Lucene 10.0 to 10.1. Since no on-prem Elasticsearch release exists with 10.0, we need another method to ensure compatibility with Lucene 10.0 indicies. The work here is a bit hacky since all our compatibility testing infrastructure is centered around versions and we're now effectively doing compatibility tests between two different revisions of Elasticsearch that both report the same version. Ideally this specific testing would be replaced by unit tests, rather that reusing our full cluster restart tests for this purpose. We'll also want to bump the commit referenced in the CI pipelines here to align with the last commit using Lucene 10.0. --- .buildkite/pipelines/intake.template.yml | 21 +++++++++++++++++ .buildkite/pipelines/intake.yml | 21 +++++++++++++++++ .buildkite/pipelines/periodic.template.yml | 21 +++++++++++++++++ .buildkite/pipelines/periodic.yml | 21 +++++++++++++++++ .../InternalDistributionBwcSetupPlugin.java | 18 +++++++++++++++ .../InternalDistributionDownloadPlugin.java | 23 +++++++++++++++++++ .../test/rest/RestTestBasePlugin.java | 2 +- .../gradle/DistributionDownloadPlugin.java | 5 +++- distribution/bwc/main/build.gradle | 10 ++++++++ .../ingest/geoip/FullClusterRestartIT.java | 3 ++- qa/full-cluster-restart/build.gradle | 8 +++++++ .../FullClusterRestartArchivedSettingsIT.java | 3 ++- .../FullClusterRestartDownsampleIT.java | 3 ++- .../upgrades/FullClusterRestartIT.java | 3 ++- .../LogsIndexModeFullClusterRestartIT.java | 3 ++- ...rameterizedFullClusterRestartTestCase.java | 10 ++++---- .../upgrades/QueryBuilderBWCIT.java | 2 +- settings.gradle | 1 + .../SnapshotDistributionResolver.java | 3 ++- .../application/FullClusterRestartIT.java | 2 +- .../xpack/restart/FullClusterRestartIT.java | 3 ++- ...stractXpackFullClusterRestartTestCase.java | 3 ++- 22 files changed, 173 insertions(+), 16 deletions(-) create mode 100644 distribution/bwc/main/build.gradle diff --git a/.buildkite/pipelines/intake.template.yml b/.buildkite/pipelines/intake.template.yml index 9d7cf3c7e0083..d1400bdb83da0 100644 --- a/.buildkite/pipelines/intake.template.yml +++ b/.buildkite/pipelines/intake.template.yml @@ -63,6 +63,27 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: "{{matrix.BWC_VERSION}}" + - group: lucene-compat + steps: + - label: "{{matrix.LUCENE_VERSION}} / lucene-compat" + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints -Dtests.bwc.main.version=$$ES_VERSION -Dtests.bwc.refspec.main=$$ES_COMMIT luceneBwcTest + timeout_in_minutes: 300 + matrix: + setup: + LUCENE_VERSION: + - "10.0.0" + ES_VERSION: + - "9.0.0" + ES_COMMIT: + - "b2cc9d9b8f00ee621f93ddca07ea9c671aab1578" # update to match last commit before lucene bump + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + env: + ES_VERSION: "{{matrix.ES_VERSION}}" + ES_COMMIT: "{{matrix.ES_COMMIT}}" - label: rest-compat command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkRestCompat timeout_in_minutes: 300 diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 54be022ce236b..ea04a0340076d 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -64,6 +64,27 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: "{{matrix.BWC_VERSION}}" + - group: lucene-compat + steps: + - label: "{{matrix.LUCENE_VERSION}} / lucene-compat" + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints -Dtests.bwc.main.version=$$ES_VERSION -Dtests.bwc.refspec.main=$$ES_COMMIT luceneBwcTest + timeout_in_minutes: 300 + matrix: + setup: + LUCENE_VERSION: + - "10.0.0" + ES_VERSION: + - "9.0.0" + ES_COMMIT: + - "b2cc9d9b8f00ee621f93ddca07ea9c671aab1578" # update to match last commit before lucene bump + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + env: + ES_VERSION: "{{matrix.ES_VERSION}}" + ES_COMMIT: "{{matrix.ES_COMMIT}}" - label: rest-compat command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkRestCompat timeout_in_minutes: 300 diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index 201c34058a409..afde6bdf8e65d 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -201,6 +201,27 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk + - group: lucene-compat + steps: + - label: "{{matrix.LUCENE_VERSION}} / lucene-compat" + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints -Dtests.bwc.main.version=$$ES_VERSION -Dtests.bwc.refspec.main=$$ES_COMMIT luceneBwcTest + timeout_in_minutes: 300 + matrix: + setup: + LUCENE_VERSION: + - "10.0.0" + ES_VERSION: + - "9.0.0" + ES_COMMIT: + - "b2cc9d9b8f00ee621f93ddca07ea9c671aab1578" # update to match last commit before lucene bump + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + env: + ES_VERSION: "{{matrix.ES_VERSION}}" + ES_COMMIT: "{{matrix.ES_COMMIT}}" - label: Upload Snyk Dependency Graph command: .ci/scripts/run-gradle.sh uploadSnykDependencyGraph -PsnykTargetReference=$BUILDKITE_BRANCH env: diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 64c4d59fd7fbe..3472e7edce0da 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -582,6 +582,27 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk + - group: lucene-compat + steps: + - label: "{{matrix.LUCENE_VERSION}} / lucene-compat" + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints -Dtests.bwc.main.version=$$ES_VERSION -Dtests.bwc.refspec.main=$$ES_COMMIT luceneBwcTest + timeout_in_minutes: 300 + matrix: + setup: + LUCENE_VERSION: + - "10.0.0" + ES_VERSION: + - "9.0.0" + ES_COMMIT: + - "b2cc9d9b8f00ee621f93ddca07ea9c671aab1578" # update to match last commit before lucene bump + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + env: + ES_VERSION: "{{matrix.ES_VERSION}}" + ES_COMMIT: "{{matrix.ES_COMMIT}}" - label: Upload Snyk Dependency Graph command: .ci/scripts/run-gradle.sh uploadSnykDependencyGraph -PsnykTargetReference=$BUILDKITE_BRANCH env: diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java index c38ea5b4f0850..b6c36285ca3a7 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java @@ -86,6 +86,24 @@ public void apply(Project project) { fileSystemOperations ); }); + + // Also set up the "main" project which is just used for arbitrary overrides. See InternalDistributionDownloadPlugin. + if (System.getProperty("tests.bwc.main.version") != null) { + configureBwcProject( + project.project(":distribution:bwc:main"), + buildParams, + new BwcVersions.UnreleasedVersionInfo( + Version.fromString(System.getProperty("tests.bwc.main.version")), + "main", + ":distribution:bwc:main" + ), + providerFactory, + objectFactory, + toolChainService, + isCi, + fileSystemOperations + ); + } } private static void configureBwcProject( diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java index ba587aa4bd979..4c0c224aff3f3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java @@ -102,6 +102,29 @@ private void registerInternalDistributionResolutions(List { + String versionProperty = System.getProperty("tests.bwc.main.version"); + // We use this phony version as a placeholder for the real version + if (distribution.getVersion().equals("0.0.0")) { + BwcVersions.UnreleasedVersionInfo unreleasedVersionInfo = new BwcVersions.UnreleasedVersionInfo( + Version.fromString(versionProperty), + "main", + ":distribution:bwc:main" + ); + String projectConfig = getProjectConfig(distribution, unreleasedVersionInfo); + return new ProjectBasedDistributionDependency( + (config) -> projectDependency(project.getDependencies(), unreleasedVersionInfo.gradleProjectPath(), projectConfig) + ); + } + return null; + })); } private boolean isCurrentVersion(ElasticsearchDistribution distribution) { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index 559c0f60abc08..a9f7267cb501c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -223,7 +223,7 @@ public Void call(Object... args) { } Version version = (Version) args[0]; - boolean isReleased = bwcVersions.unreleasedInfo(version) == null; + boolean isReleased = bwcVersions.unreleasedInfo(version) == null && version.toString().equals("0.0.0") == false; String versionString = version.toString(); ElasticsearchDistribution bwcDistro = createDistribution(project, "bwc_" + versionString, versionString); diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java index 4c7290457e7df..35748459ecac3 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java @@ -25,7 +25,9 @@ import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.List; +import java.util.Objects; import javax.inject.Inject; @@ -141,8 +143,9 @@ private void registerDistributionDependencies(Project project, ElasticsearchDist private DistributionDependency resolveDependencyNotation(Project project, ElasticsearchDistribution distro) { return distributionsResolutionStrategies.stream() + .sorted(Comparator.comparing(DistributionResolution::getPriority).reversed()) .map(r -> r.getResolver().resolve(project, distro)) - .filter(d -> d != null) + .filter(Objects::nonNull) .findFirst() .orElseGet(() -> DistributionDependency.of(dependencyNotation(distro))); } diff --git a/distribution/bwc/main/build.gradle b/distribution/bwc/main/build.gradle new file mode 100644 index 0000000000000..305e32a58a6ad --- /dev/null +++ b/distribution/bwc/main/build.gradle @@ -0,0 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +// This project is used only for overriding bwc distributions. See InternalDistributionDownloadPlugin for details. diff --git a/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java b/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java index 49c4aaea0a728..6a28a09dc7c6b 100644 --- a/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java +++ b/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.upgrades.ParameterizedFullClusterRestartTestCase; @@ -54,7 +55,7 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) .setting("ingest.geoip.downloader.endpoint", () -> fixture.getAddress(), s -> useFixture) .setting("xpack.security.enabled", useSecurity ? "true" : "false") diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle index 5e68c4d1ad26b..d23267bb352fc 100644 --- a/qa/full-cluster-restart/build.gradle +++ b/qa/full-cluster-restart/build.gradle @@ -7,6 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' @@ -20,6 +21,13 @@ buildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> } } +tasks.register("luceneBwcTest", StandaloneRestIntegTestTask) { + // We use a phony version here as the real version is provided via `tests.bwc.main.version` system property + usesBwcDistribution(Version.fromString("0.0.0")) + systemProperty("tests.old_cluster_version", "0.0.0") + onlyIf("tests.bwc.main.version system property exists") { System.getProperty("tests.bwc.main.version") != null } +} + tasks.withType(Test).configureEach { // CI doesn't like it when there's multiple clusters running at once maxParallelForks = 1 diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartArchivedSettingsIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartArchivedSettingsIT.java index caa57f1e605a2..6a2fe9ec84528 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartArchivedSettingsIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartArchivedSettingsIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ObjectPath; import org.junit.ClassRule; import org.junit.rules.RuleChain; @@ -44,7 +45,7 @@ public class FullClusterRestartArchivedSettingsIT extends ParameterizedFullClust private static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) .setting("path.repo", () -> repoDirectory.getRoot().getPath()) .setting("xpack.security.enabled", "false") diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java index f907870fc8254..1d6254aed7045 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; import org.junit.Before; import org.junit.ClassRule; import org.junit.rules.RuleChain; @@ -45,7 +46,7 @@ public class FullClusterRestartDownsampleIT extends ParameterizedFullClusterRest private static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) .setting("xpack.security.enabled", "false") .setting("indices.lifecycle.poll_interval", "5s") diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 0f41712abe927..a5e1041dab279 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -39,6 +39,7 @@ import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.xcontent.ToXContent; @@ -103,7 +104,7 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas private static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) .setting("path.repo", () -> repoDirectory.getRoot().getPath()) .setting("xpack.security.enabled", "false") diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java index ebf72b26a2111..26006bc70b866 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.time.FormatNames; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.junit.ClassRule; @@ -34,7 +35,7 @@ public class LogsIndexModeFullClusterRestartIT extends ParameterizedFullClusterR @ClassRule public static final ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(Version.fromString(OLD_CLUSTER_VERSION)) .module("constant-keyword") .module("data-streams") .module("mapper-extras") diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java index 6de960a0fd7ed..7518a799540b8 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java @@ -38,8 +38,10 @@ @TestCaseOrdering(FullClusterRestartTestOrdering.class) public abstract class ParameterizedFullClusterRestartTestCase extends ESRestTestCase { - private static final Version MINIMUM_WIRE_COMPATIBLE_VERSION = Version.fromString(System.getProperty("tests.minimum.wire.compatible")); - private static final String OLD_CLUSTER_VERSION = System.getProperty("tests.old_cluster_version"); + protected static final Version MINIMUM_WIRE_COMPATIBLE_VERSION = Version.fromString( + System.getProperty("tests.minimum.wire.compatible") + ); + protected static final String OLD_CLUSTER_VERSION = System.getProperty("tests.old_cluster_version"); private static IndexVersion oldIndexVersion; private static boolean upgradeFailed = false; private static boolean upgraded = false; @@ -133,7 +135,7 @@ public boolean isRunningAgainstOldCluster() { } public static String getOldClusterVersion() { - return OLD_CLUSTER_VERSION; + return System.getProperty("tests.bwc.main.version", OLD_CLUSTER_VERSION); } protected static boolean oldClusterHasFeature(String featureId) { @@ -152,7 +154,7 @@ public static IndexVersion getOldClusterIndexVersion() { } public static Version getOldClusterTestVersion() { - return Version.fromString(OLD_CLUSTER_VERSION); + return Version.fromString(System.getProperty("tests.bwc.main.version", OLD_CLUSTER_VERSION)); } protected abstract ElasticsearchCluster getUpgradeCluster(); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java index 8b74657becb24..02bea9a35f5f4 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java @@ -75,7 +75,7 @@ public class QueryBuilderBWCIT extends ParameterizedFullClusterRestartTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(org.elasticsearch.test.cluster.util.Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) .setting("xpack.security.enabled", "false") .feature(FeatureFlag.FAILURE_STORE_ENABLED) diff --git a/settings.gradle b/settings.gradle index 8a15f74dcb286..374b67ee78a13 100644 --- a/settings.gradle +++ b/settings.gradle @@ -79,6 +79,7 @@ List projects = [ 'distribution:bwc:maintenance', 'distribution:bwc:minor', 'distribution:bwc:staged', + 'distribution:bwc:main', 'distribution:tools:java-version-checker', 'distribution:tools:cli-launcher', 'distribution:tools:server-cli', diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/distribution/SnapshotDistributionResolver.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/distribution/SnapshotDistributionResolver.java index 048a3c49fcade..a84211674f8c6 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/distribution/SnapshotDistributionResolver.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/distribution/SnapshotDistributionResolver.java @@ -38,7 +38,8 @@ public DistributionDescriptor resolve(Version version, DistributionType type) { } // Snapshot distributions are never release builds and always use the default distribution - return new DefaultDistributionDescriptor(version, true, distributionDir, DistributionType.DEFAULT); + Version realVersion = Version.fromString(System.getProperty("tests.bwc.main.version", version.toString())); + return new DefaultDistributionDescriptor(realVersion, true, distributionDir, DistributionType.DEFAULT); } return delegate.resolve(version, type); diff --git a/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java b/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java index a00dc28bd5fb6..6d2d3c33d3e94 100644 --- a/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java +++ b/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java @@ -43,7 +43,7 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas @ClassRule public static final ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(org.elasticsearch.test.cluster.util.Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") diff --git a/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index fa6a908891400..5db5abd3d60f8 100644 --- a/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -17,6 +17,7 @@ import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; @@ -45,7 +46,7 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) // some tests rely on the translog not being flushed .setting("indices.memory.shard_inactive_time", "60m") diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/AbstractXpackFullClusterRestartTestCase.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/AbstractXpackFullClusterRestartTestCase.java index 96acaaa5b41b4..87c7dedf0a409 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/AbstractXpackFullClusterRestartTestCase.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/AbstractXpackFullClusterRestartTestCase.java @@ -10,6 +10,7 @@ import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.upgrades.ParameterizedFullClusterRestartTestCase; @@ -20,7 +21,7 @@ public abstract class AbstractXpackFullClusterRestartTestCase extends Parameteri @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) + .version(Version.fromString(OLD_CLUSTER_VERSION)) .nodes(2) // some tests rely on the translog not being flushed .setting("indices.memory.shard_inactive_time", "60m") From 57022a1486070d729348929c4f1c6428391cff99 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 08:35:45 +1100 Subject: [PATCH 236/383] Mute org.elasticsearch.index.mapper.KeywordFieldMapperTests testFieldTypeWithSkipDocValues_LogsDbMode #121232 --- muted-tests.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/muted-tests.yml b/muted-tests.yml index 5947cf733326c..27a47b9b13388 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -310,7 +310,7 @@ tests: method: testGetUsersWithProfileUidWhenProfileIndexDoesNotExists issue: https://github.com/elastic/elasticsearch/issues/121179 - class: org.elasticsearch.xpack.ml.integration.PyTorchModelIT - issue: https://github.com/elastic/elasticsearch/issues/121165 + issue: https://github.com/elastic/elasticsearch/issues/121165 - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testSetEnabled issue: https://github.com/elastic/elasticsearch/issues/121183 @@ -319,6 +319,9 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cat.aliases/10_basic/Simple alias} issue: https://github.com/elastic/elasticsearch/issues/121186 +- class: org.elasticsearch.index.mapper.KeywordFieldMapperTests + method: testFieldTypeWithSkipDocValues_LogsDbMode + issue: https://github.com/elastic/elasticsearch/issues/121232 # Examples: # From 6486299371dbe6de1176290c7130d7c2ed44ca16 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 08:35:56 +1100 Subject: [PATCH 237/383] Mute org.elasticsearch.index.mapper.KeywordFieldMapperTests testFieldTypeDefault_ConfiguredDocValues #121233 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 27a47b9b13388..03338493a3d90 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -322,6 +322,9 @@ tests: - class: org.elasticsearch.index.mapper.KeywordFieldMapperTests method: testFieldTypeWithSkipDocValues_LogsDbMode issue: https://github.com/elastic/elasticsearch/issues/121232 +- class: org.elasticsearch.index.mapper.KeywordFieldMapperTests + method: testFieldTypeDefault_ConfiguredDocValues + issue: https://github.com/elastic/elasticsearch/issues/121233 # Examples: # From dbeb55cb3d361907c77bb6d72dbb627b47c94360 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Wed, 29 Jan 2025 21:55:53 +0000 Subject: [PATCH 238/383] Enable Mapped Field Types to Override Default Highlighter (#121176) This commit introduces the `MappedFieldType#getDefaultHighlighter`, allowing a specific highlighter to be enforced for a field. The semantic field mapper utilizes this new functionality to set the `semantic` highlighter as the default. All other fields will continue to use the `unified` highlighter by default. --- .../mapping/types/semantic-text.asciidoc | 32 +++++++- .../search-your-data/highlighting.asciidoc | 18 ++++- .../index/mapper/MappedFieldType.java | 8 ++ .../elasticsearch/search/SearchModule.java | 2 +- .../highlight/DefaultHighlighter.java | 2 + .../subphase/highlight/HighlightPhase.java | 9 +-- .../xpack/inference/InferenceFeatures.java | 4 +- .../mapper/SemanticTextFieldMapper.java | 6 ++ .../90_semantic_text_highlighter.yml | 73 +++++++++++++++---- 9 files changed, 128 insertions(+), 26 deletions(-) diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc index 62e5075b9287d..c5cc24f957a44 100644 --- a/docs/reference/mapping/types/semantic-text.asciidoc +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -133,14 +133,13 @@ You can extract the most relevant fragments from a semantic text field by using POST test-index/_search { "query": { - "semantic": { - "field": "my_semantic_field" + "match": { + "my_semantic_field": "Which country is Paris in?" } }, "highlight": { "fields": { "my_semantic_field": { - "type": "semantic", "number_of_fragments": 2, <1> "order": "score" <2> } @@ -152,6 +151,33 @@ POST test-index/_search <1> Specifies the maximum number of fragments to return. <2> Sorts highlighted fragments by score when set to `score`. By default, fragments will be output in the order they appear in the field (order: none). +Highlighting is supported on fields other than semantic_text. +However, if you want to restrict highlighting to the semantic highlighter and return no fragments when the field is not of type semantic_text, +you can explicitly enforce the `semantic` highlighter in the query: + +[source,console] +------------------------------------------------------------ +PUT test-index +{ + "query": { + "match": { + "my_field": "Which country is Paris in?" + } + }, + "highlight": { + "fields": { + "my_field": { + "type": "semantic", <1> + "number_of_fragments": 2, + "order": "score" + } + } + } +} +------------------------------------------------------------ +// TEST[skip:Requires inference endpoint] +<1> Ensures that highlighting is applied exclusively to semantic_text fields. + [discrete] [[custom-indexing]] ==== Customizing `semantic_text` indexing diff --git a/docs/reference/search/search-your-data/highlighting.asciidoc b/docs/reference/search/search-your-data/highlighting.asciidoc index 63d9c632bffcf..bc81be389cf9c 100644 --- a/docs/reference/search/search-your-data/highlighting.asciidoc +++ b/docs/reference/search/search-your-data/highlighting.asciidoc @@ -37,8 +37,8 @@ GET /_search // TEST[setup:my_index] {es} supports three highlighters: `unified`, `plain`, and `fvh` (fast vector -highlighter). You can specify the highlighter `type` you want to use -for each field. +highlighter) for `text` and `keyword` fields and the `semantic` highlighter for `semantic_text` fields. +You can specify the highlighter `type` you want to use for each field or rely on the field type's default highlighter. [discrete] [[unified-highlighter]] @@ -48,7 +48,19 @@ highlighter breaks the text into sentences and uses the BM25 algorithm to score individual sentences as if they were documents in the corpus. It also supports accurate phrase and multi-term (fuzzy, prefix, regex) highlighting. The `unified` highlighter can combine matches from multiple fields into one result (see -`matched_fields`). This is the default highlighter. +`matched_fields`). + +This is the default highlighter for all `text` and `keyword` fields. + +[discrete] +[[semantic-highlighter]] +==== Semantic Highlighter + +The `semantic` highlighter is specifically designed for use with the <> field. +It identifies and extracts the most relevant fragments from the field based on semantic +similarity between the query and each fragment. + +By default, <> fields use the semantic highlighter. [discrete] [[plain-highlighter]] diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 4b68e20673572..d65a8cbd8411b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -41,6 +41,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.fetch.subphase.FetchFieldsPhase; +import org.elasticsearch.search.fetch.subphase.highlight.DefaultHighlighter; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; @@ -217,6 +218,13 @@ public TimeSeriesParams.MetricType getMetricType() { return null; } + /** + * Returns the default highlighter type to use when highlighting the field. + */ + public String getDefaultHighlighter() { + return DefaultHighlighter.NAME; + } + /** Generates a query that will only match documents that contain the given value. * The default implementation returns a {@link TermQuery} over the value bytes * @throws IllegalArgumentException if {@code value} cannot be converted to the expected data type or if the field is not searchable diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 6716c03a3a935..2183ce5646293 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -913,7 +913,7 @@ private static Map setupHighlighters(Settings settings, Lis NamedRegistry highlighters = new NamedRegistry<>("highlighter"); highlighters.register("fvh", new FastVectorHighlighter(settings)); highlighters.register("plain", new PlainHighlighter()); - highlighters.register("unified", new DefaultHighlighter()); + highlighters.register(DefaultHighlighter.NAME, new DefaultHighlighter()); highlighters.extractAndRegister(plugins, SearchPlugin::getHighlighters); return unmodifiableMap(highlighters.getRegistry()); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java index c47f815c18639..9f888c1f08baa 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java @@ -50,6 +50,8 @@ public class DefaultHighlighter implements Highlighter { + public static final String NAME = "unified"; + @Override public boolean canHighlight(MappedFieldType fieldType) { return true; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index c356c383d103a..54c265deb948d 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -66,7 +66,7 @@ public void process(HitContext hitContext) throws IOException { Map> contextBuilders = fieldContext.builders; for (String field : contextBuilders.keySet()) { FieldHighlightContext fieldContext = contextBuilders.get(field).apply(hitContext); - Highlighter highlighter = getHighlighter(fieldContext.field); + Highlighter highlighter = getHighlighter(fieldContext.field, fieldContext.fieldType); HighlightField highlightField = highlighter.highlight(fieldContext); if (highlightField != null) { // Note that we make sure to use the original field name in the response. This is because the @@ -80,10 +80,10 @@ public void process(HitContext hitContext) throws IOException { }; } - private Highlighter getHighlighter(SearchHighlightContext.Field field) { + private Highlighter getHighlighter(SearchHighlightContext.Field field, MappedFieldType fieldType) { String highlighterType = field.fieldOptions().highlighterType(); if (highlighterType == null) { - highlighterType = "unified"; + highlighterType = fieldType.getDefaultHighlighter(); } Highlighter highlighter = highlighters.get(highlighterType); if (highlighter == null) { @@ -103,8 +103,6 @@ private FieldContext contextBuilders( Map> builders = new LinkedHashMap<>(); StoredFieldsSpec storedFieldsSpec = StoredFieldsSpec.NO_REQUIREMENTS; for (SearchHighlightContext.Field field : highlightContext.fields()) { - Highlighter highlighter = getHighlighter(field); - Collection fieldNamesToHighlight = context.getSearchExecutionContext().getMatchingFieldNames(field.field()); boolean fieldNameContainsWildcards = field.field().contains("*"); @@ -112,6 +110,7 @@ private FieldContext contextBuilders( boolean sourceRequired = false; for (String fieldName : fieldNamesToHighlight) { MappedFieldType fieldType = context.getSearchExecutionContext().getFieldType(fieldName); + Highlighter highlighter = getHighlighter(field, fieldType); // We should prevent highlighting if a field is anything but a text, match_only_text, // or keyword field. diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index 4707a7824fcd1..8c2be17777cca 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -25,6 +25,7 @@ public class InferenceFeatures implements FeatureSpecification { private static final NodeFeature SEMANTIC_TEXT_HIGHLIGHTER = new NodeFeature("semantic_text.highlighter"); + private static final NodeFeature SEMANTIC_TEXT_HIGHLIGHTER_DEFAULT = new NodeFeature("semantic_text.highlighter.default"); @Override public Set getTestFeatures() { @@ -40,7 +41,8 @@ public Set getTestFeatures() { SemanticInferenceMetadataFieldsMapper.EXPLICIT_NULL_FIXES, SEMANTIC_KNN_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED, TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_ALIAS_HANDLING_FIX, - SemanticInferenceMetadataFieldsMapper.INFERENCE_METADATA_FIELDS_ENABLED_BY_DEFAULT + SemanticInferenceMetadataFieldsMapper.INFERENCE_METADATA_FIELDS_ENABLED_BY_DEFAULT, + SEMANTIC_TEXT_HIGHLIGHTER_DEFAULT ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 1acdff7a751ae..f24f407fd051d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -73,6 +73,7 @@ import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.search.SparseVectorQueryBuilder; +import org.elasticsearch.xpack.inference.highlight.SemanticTextHighlighter; import java.io.IOException; import java.io.UncheckedIOException; @@ -580,6 +581,11 @@ public String familyTypeName() { return TextFieldMapper.CONTENT_TYPE; } + @Override + public String getDefaultHighlighter() { + return SemanticTextHighlighter.NAME; + } + public String getInferenceId() { return inferenceId; } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml index ca87c97fc3acd..7765795ebfbdc 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/90_semantic_text_highlighter.yml @@ -55,22 +55,32 @@ setup: index.mapping.semantic_text.use_legacy_format: false mappings: properties: + title: + type: text body: type: semantic_text inference_id: dense-inference-id ---- -"Highlighting using a sparse embedding model": - do: index: index: test-sparse-index id: doc_1 body: + title: "Elasticsearch" body: ["ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!"] refresh: true - - match: { result: created } + - do: + index: + index: test-dense-index + id: doc_1 + body: + title: "Elasticsearch" + body: [ "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!" ] + refresh: true +--- +"Highlighting using a sparse embedding model": - do: search: index: test-sparse-index @@ -153,16 +163,6 @@ setup: --- "Highlighting using a dense embedding model": - - do: - index: - index: test-dense-index - id: doc_1 - body: - body: ["ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides.", "You Know, for Search!"] - refresh: true - - - match: { result: created } - - do: search: index: test-dense-index @@ -243,4 +243,51 @@ setup: - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } - match: { hits.hits.0.highlight.body.1: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } +--- +"Default highlighter for fields": + - requires: + cluster_features: "semantic_text.highlighter.default" + reason: semantic text field defaults to the semantic highlighter + + - do: + search: + index: test-dense-index + body: + query: + match: + body: "What is Elasticsearch?" + highlight: + fields: + body: + order: "score" + number_of_fragments: 2 + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - length: { hits.hits.0.highlight.body: 2 } + - match: { hits.hits.0.highlight.body.0: "You Know, for Search!" } + - match: { hits.hits.0.highlight.body.1: "ElasticSearch is an open source, distributed, RESTful, search engine which is built on top of Lucene internally and enjoys all the features it provides." } + +--- +"semantic highlighter ignores non-inference fields": + - requires: + cluster_features: "semantic_text.highlighter.default" + reason: semantic text field defaults to the semantic highlighter + + - do: + search: + index: test-dense-index + body: + query: + match: + title: "Elasticsearch" + highlight: + fields: + title: + type: semantic + number_of_fragments: 2 + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - not_exists: hits.hits.0.highlight.title From ec7c50ffc91d3dc68bff57b18cda890600749295 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 08:55:58 +1100 Subject: [PATCH 239/383] Mute org.elasticsearch.xpack.ml.integration.ClassificationIT testWithDatastreams #121236 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 03338493a3d90..91fd75be6802e 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -325,6 +325,9 @@ tests: - class: org.elasticsearch.index.mapper.KeywordFieldMapperTests method: testFieldTypeDefault_ConfiguredDocValues issue: https://github.com/elastic/elasticsearch/issues/121233 +- class: org.elasticsearch.xpack.ml.integration.ClassificationIT + method: testWithDatastreams + issue: https://github.com/elastic/elasticsearch/issues/121236 # Examples: # From 50376efd42d081d6d91e56c9fc7f5bf0c22137cc Mon Sep 17 00:00:00 2001 From: Adam Demjen Date: Wed, 29 Jan 2025 17:01:21 -0500 Subject: [PATCH 240/383] [Inference API] Put back legacy EIS URL setting (#121207) * Put back legacy EIS URL setting * Update docs/changelog/121207.yaml * Fallback logic to legacy URL * Add unit tests --- docs/changelog/121207.yaml | 5 ++ .../ElasticInferenceServiceSettings.java | 11 +++- .../ElasticInferenceServiceSettingsTests.java | 57 +++++++++++++++++++ 3 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/121207.yaml create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java diff --git a/docs/changelog/121207.yaml b/docs/changelog/121207.yaml new file mode 100644 index 0000000000000..ebb9d44d89366 --- /dev/null +++ b/docs/changelog/121207.yaml @@ -0,0 +1,5 @@ +pr: 121207 +summary: "[Inference API] Put back legacy EIS URL setting" +area: Inference +type: bug +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java index fd4a70da01fda..98d55fd799598 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.services.elastic; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings; @@ -22,14 +23,21 @@ public class ElasticInferenceServiceSettings { public static final String ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX = "xpack.inference.elastic.http.ssl."; + @Deprecated + static final Setting EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); + static final Setting ELASTIC_INFERENCE_SERVICE_URL = Setting.simpleString( "xpack.inference.elastic.url", Setting.Property.NodeScope ); + @Deprecated + private final String eisGatewayUrl; + private final String elasticInferenceServiceUrl; public ElasticInferenceServiceSettings(Settings settings) { + eisGatewayUrl = EIS_GATEWAY_URL.get(settings); elasticInferenceServiceUrl = ELASTIC_INFERENCE_SERVICE_URL.get(settings); } @@ -46,6 +54,7 @@ public ElasticInferenceServiceSettings(Settings settings) { public static List> getSettingsDefinitions() { ArrayList> settings = new ArrayList<>(); + settings.add(EIS_GATEWAY_URL); settings.add(ELASTIC_INFERENCE_SERVICE_URL); settings.add(ELASTIC_INFERENCE_SERVICE_SSL_ENABLED); settings.addAll(ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_SETTINGS.getEnabledSettings()); @@ -54,7 +63,7 @@ public static List> getSettingsDefinitions() { } public String getElasticInferenceServiceUrl() { - return elasticInferenceServiceUrl; + return Strings.isEmpty(elasticInferenceServiceUrl) ? eisGatewayUrl : elasticInferenceServiceUrl; } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java new file mode 100644 index 0000000000000..e477ffb10def0 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettingsTests.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elastic; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class ElasticInferenceServiceSettingsTests extends ESTestCase { + + private static final String ELASTIC_INFERENCE_SERVICE_URL = "http://elastic-inference-service"; + private static final String ELASTIC_INFERENCE_SERVICE_LEGACY_URL = "http://elastic-inference-service-legacy"; + + public void testGetElasticInferenceServiceUrl_WithUrlSetting() { + var settings = Settings.builder() + .put(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_URL.getKey(), ELASTIC_INFERENCE_SERVICE_URL) + .build(); + + var eisSettings = new ElasticInferenceServiceSettings(settings); + + assertThat(eisSettings.getElasticInferenceServiceUrl(), equalTo(ELASTIC_INFERENCE_SERVICE_URL)); + } + + public void testGetElasticInferenceServiceUrl_WithLegacyUrlSetting() { + var settings = Settings.builder() + .put(ElasticInferenceServiceSettings.EIS_GATEWAY_URL.getKey(), ELASTIC_INFERENCE_SERVICE_LEGACY_URL) + .build(); + + var eisSettings = new ElasticInferenceServiceSettings(settings); + + assertThat(eisSettings.getElasticInferenceServiceUrl(), equalTo(ELASTIC_INFERENCE_SERVICE_LEGACY_URL)); + } + + public void testGetElasticInferenceServiceUrl_WithUrlSetting_TakesPrecedenceOverLegacyUrlSetting() { + var settings = Settings.builder() + .put(ElasticInferenceServiceSettings.EIS_GATEWAY_URL.getKey(), ELASTIC_INFERENCE_SERVICE_LEGACY_URL) + .put(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_URL.getKey(), ELASTIC_INFERENCE_SERVICE_URL) + .build(); + + var eisSettings = new ElasticInferenceServiceSettings(settings); + + assertThat(eisSettings.getElasticInferenceServiceUrl(), equalTo(ELASTIC_INFERENCE_SERVICE_URL)); + } + + public void testGetElasticInferenceServiceUrl_WithoutUrlSetting() { + var eisSettings = new ElasticInferenceServiceSettings(Settings.EMPTY); + + assertThat(eisSettings.getElasticInferenceServiceUrl(), equalTo("")); + } + +} From c6e722e8d23b7a0a90bbdf342cd86f57cebb684c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 09:02:32 +1100 Subject: [PATCH 241/383] Mute org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT test {yaml=nodes.stats/11_indices_metrics/Metric - blank for indices mappings} #121238 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 91fd75be6802e..8d9bdff3d2572 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -328,6 +328,9 @@ tests: - class: org.elasticsearch.xpack.ml.integration.ClassificationIT method: testWithDatastreams issue: https://github.com/elastic/elasticsearch/issues/121236 +- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT + method: test {yaml=nodes.stats/11_indices_metrics/Metric - blank for indices mappings} + issue: https://github.com/elastic/elasticsearch/issues/121238 # Examples: # From bcd8d159d68fa13baf7c394cdeac0db7543d1fa4 Mon Sep 17 00:00:00 2001 From: Larisa Motova Date: Wed, 29 Jan 2025 12:08:46 -1000 Subject: [PATCH 242/383] [ES|QL] Support some stats on aggregate_metric_double (#120343) Adds non-grouping support for min, max, sum, and count, using CompositeBlock as the underlying block type and an internal FromAggregateMetricDouble function to handle converting from CompositeBlock to the correct metric subfields. Closes #110649 --- docs/changelog/120343.yaml | 6 + .../index/mapper/BlockLoader.java | 14 ++ .../elasticsearch/index/mapper/TestBlock.java | 5 + x-pack/plugin/build.gradle | 5 + .../esql/core/plugin/EsqlCorePlugin.java | 1 + .../xpack/esql/core/type/DataType.java | 8 +- .../AggregateMetricDoubleBlockBuilder.java | 165 +++++++++++++++++ .../compute/data/BlockFactory.java | 33 ++++ .../compute/data/BlockUtils.java | 9 + .../compute/data/CompositeBlock.java | 11 +- .../compute/data/ElementType.java | 4 +- .../lucene/ValuesSourceReaderOperator.java | 5 + .../xpack/esql/EsqlTestUtils.java | 5 +- .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../xpack/esql/action/PositionToXContent.java | 3 +- .../xpack/esql/action/ResponseValueUtils.java | 2 +- .../expression/function/aggregate/Count.java | 6 + .../expression/function/aggregate/Max.java | 24 ++- .../expression/function/aggregate/Min.java | 24 ++- .../expression/function/aggregate/Sum.java | 32 +++- .../scalar/ScalarFunctionWritables.java | 2 + .../convert/FromAggregateMetricDouble.java | 171 ++++++++++++++++++ .../function/scalar/nulls/Coalesce.java | 4 +- .../xpack/esql/planner/AggregateMapper.java | 4 + .../esql/planner/LocalExecutionPlanner.java | 2 +- .../xpack/esql/planner/PlannerUtils.java | 2 +- .../esql/action/EsqlQueryResponseTests.java | 18 +- .../xpack/esql/analysis/AnalyzerTests.java | 5 +- .../xpack/esql/analysis/VerifierTests.java | 2 +- .../scalar/conditional/CaseTests.java | 2 +- .../FromAggregateMetricDoubleTests.java | 78 ++++++++ .../AggregateDoubleMetricFieldMapper.java | 143 ++++++++++++++- ...AggregateDoubleMetricFieldMapperTests.java | 12 ++ .../rest-api-spec/test/esql/40_tsdb.yml | 55 +++++- .../test/esql/40_unsupported_types.yml | 133 +++++++------- .../rest-api-spec/test/esql/46_downsample.yml | 119 ++++++++++++ 36 files changed, 1019 insertions(+), 102 deletions(-) create mode 100644 docs/changelog/120343.yaml create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlockBuilder.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDouble.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDoubleTests.java create mode 100644 x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/46_downsample.yml diff --git a/docs/changelog/120343.yaml b/docs/changelog/120343.yaml new file mode 100644 index 0000000000000..f33bd215877c7 --- /dev/null +++ b/docs/changelog/120343.yaml @@ -0,0 +1,6 @@ +pr: 120343 +summary: Support some stats on aggregate_metric_double +area: "ES|QL" +type: enhancement +issues: + - 110649 diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java index 7b4ceb67f04d7..451da5bfdbaf0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java @@ -417,6 +417,8 @@ interface BlockFactory { SingletonOrdinalsBuilder singletonOrdinalsBuilder(SortedDocValues ordinals, int count); // TODO support non-singleton ords + + AggregateMetricDoubleBuilder aggregateMetricDoubleBuilder(int count); } /** @@ -501,4 +503,16 @@ interface SingletonOrdinalsBuilder extends Builder { */ SingletonOrdinalsBuilder appendOrd(int value); } + + interface AggregateMetricDoubleBuilder extends Builder { + + DoubleBuilder min(); + + DoubleBuilder max(); + + DoubleBuilder sum(); + + IntBuilder count(); + + } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java index 2c53fa782db85..14beb979b96cf 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java @@ -147,6 +147,11 @@ public SingletonOrdsBuilder appendOrd(int value) { } return new SingletonOrdsBuilder(); } + + @Override + public BlockLoader.AggregateMetricDoubleBuilder aggregateMetricDoubleBuilder(int count) { + return null; + } }; } diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 07e4ee9294489..d3052cb191a06 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -104,6 +104,11 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("esql/180_match_operator/match with disjunctions", "Disjunctions in full text functions work now") // Expected deprecation warning to compat yaml tests: task.addAllowedWarningRegex(".*rollup functionality will be removed in Elasticsearch.*") + task.skipTest("esql/40_tsdb/from doc with aggregate_metric_double", "TODO: support for subset of metric fields") + task.skipTest("esql/40_tsdb/stats on aggregate_metric_double", "TODO: support for subset of metric fields") + task.skipTest("esql/40_tsdb/from index pattern unsupported counter", "TODO: support for subset of metric fields") + task.skipTest("esql/40_unsupported_types/unsupported", "TODO: support for subset of metric fields") + task.skipTest("esql/40_unsupported_types/unsupported with sort", "TODO: support for subset of metric fields") }) tasks.named('yamlRestCompatTest').configure { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java index 61b480968e974..729188e2981d9 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java @@ -14,4 +14,5 @@ public class EsqlCorePlugin extends Plugin implements ExtensiblePlugin { public static final FeatureFlag SEMANTIC_TEXT_FEATURE_FLAG = new FeatureFlag("esql_semantic_text"); + public static final FeatureFlag AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG = new FeatureFlag("esql_aggregate_metric_double"); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index d86cdb0de038c..671e2df3650dd 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -307,7 +307,9 @@ public enum DataType { * loaded from the index and ESQL will load these fields as strings without their attached * chunks or embeddings. */ - SEMANTIC_TEXT(builder().esType("semantic_text").unknownSize()); + SEMANTIC_TEXT(builder().esType("semantic_text").unknownSize()), + + AGGREGATE_METRIC_DOUBLE(builder().esType("aggregate_metric_double").estimatedSize(Double.BYTES * 3 + Integer.BYTES)); /** * Types that are actively being built. These types are not returned @@ -316,7 +318,8 @@ public enum DataType { * check that sending them to a function produces a sane error message. */ public static final Map UNDER_CONSTRUCTION = Map.ofEntries( - Map.entry(SEMANTIC_TEXT, EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG) + Map.entry(SEMANTIC_TEXT, EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG), + Map.entry(AGGREGATE_METRIC_DOUBLE, EsqlCorePlugin.AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG) ); private final String typeName; @@ -553,6 +556,7 @@ public static boolean isRepresentable(DataType t) { && t != SOURCE && t != HALF_FLOAT && t != PARTIAL_AGG + && t != AGGREGATE_METRIC_DOUBLE && t.isCounter() == false; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlockBuilder.java new file mode 100644 index 0000000000000..d5eecc3e6ed70 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AggregateMetricDoubleBlockBuilder.java @@ -0,0 +1,165 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.core.Releasables; +import org.elasticsearch.index.mapper.BlockLoader; + +public class AggregateMetricDoubleBlockBuilder extends AbstractBlockBuilder implements BlockLoader.AggregateMetricDoubleBuilder { + + private DoubleBlockBuilder minBuilder; + private DoubleBlockBuilder maxBuilder; + private DoubleBlockBuilder sumBuilder; + private IntBlockBuilder countBuilder; + + public AggregateMetricDoubleBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + super(blockFactory); + minBuilder = null; + maxBuilder = null; + sumBuilder = null; + countBuilder = null; + try { + minBuilder = new DoubleBlockBuilder(estimatedSize, blockFactory); + maxBuilder = new DoubleBlockBuilder(estimatedSize, blockFactory); + sumBuilder = new DoubleBlockBuilder(estimatedSize, blockFactory); + countBuilder = new IntBlockBuilder(estimatedSize, blockFactory); + } finally { + if (countBuilder == null) { + Releasables.closeWhileHandlingException(minBuilder, maxBuilder, sumBuilder, countBuilder); + } + } + } + + @Override + protected int valuesLength() { + throw new UnsupportedOperationException("Not available on aggregate_metric_double"); + } + + @Override + protected void growValuesArray(int newSize) { + throw new UnsupportedOperationException("Not available on aggregate_metric_double"); + } + + @Override + protected int elementSize() { + throw new UnsupportedOperationException("Not available on aggregate_metric_double"); + } + + @Override + public Block.Builder copyFrom(Block block, int beginInclusive, int endExclusive) { + Block minBlock; + Block maxBlock; + Block sumBlock; + Block countBlock; + if (block.areAllValuesNull()) { + minBlock = block; + maxBlock = block; + sumBlock = block; + countBlock = block; + } else { + CompositeBlock composite = (CompositeBlock) block; + minBlock = composite.getBlock(Metric.MIN.getIndex()); + maxBlock = composite.getBlock(Metric.MAX.getIndex()); + sumBlock = composite.getBlock(Metric.SUM.getIndex()); + countBlock = composite.getBlock(Metric.COUNT.getIndex()); + } + minBuilder.copyFrom(minBlock, beginInclusive, endExclusive); + maxBuilder.copyFrom(maxBlock, beginInclusive, endExclusive); + sumBuilder.copyFrom(sumBlock, beginInclusive, endExclusive); + countBuilder.copyFrom(countBlock, beginInclusive, endExclusive); + return this; + } + + @Override + public AbstractBlockBuilder appendNull() { + minBuilder.appendNull(); + maxBuilder.appendNull(); + sumBuilder.appendNull(); + countBuilder.appendNull(); + return this; + } + + @Override + public Block.Builder mvOrdering(Block.MvOrdering mvOrdering) { + minBuilder.mvOrdering(mvOrdering); + maxBuilder.mvOrdering(mvOrdering); + sumBuilder.mvOrdering(mvOrdering); + countBuilder.mvOrdering(mvOrdering); + return this; + } + + @Override + public Block build() { + Block[] blocks = new Block[4]; + boolean success = false; + try { + finish(); + blocks[Metric.MIN.getIndex()] = minBuilder.build(); + blocks[Metric.MAX.getIndex()] = maxBuilder.build(); + blocks[Metric.SUM.getIndex()] = sumBuilder.build(); + blocks[Metric.COUNT.getIndex()] = countBuilder.build(); + CompositeBlock block = new CompositeBlock(blocks); + success = true; + return block; + } finally { + if (success == false) { + Releasables.closeExpectNoException(blocks); + } + } + } + + @Override + protected void extraClose() { + Releasables.closeExpectNoException(minBuilder, maxBuilder, sumBuilder, countBuilder); + } + + @Override + public BlockLoader.DoubleBuilder min() { + return minBuilder; + } + + @Override + public BlockLoader.DoubleBuilder max() { + return maxBuilder; + } + + @Override + public BlockLoader.DoubleBuilder sum() { + return sumBuilder; + } + + @Override + public BlockLoader.IntBuilder count() { + return countBuilder; + } + + public enum Metric { + MIN(0), + MAX(1), + SUM(2), + COUNT(3); + + private final int index; + + Metric(int index) { + this.index = index; + } + + public int getIndex() { + return index; + } + } + + public record AggregateMetricDoubleLiteral(Double min, Double max, Double sum, Integer count) { + public AggregateMetricDoubleLiteral { + min = min.isNaN() ? null : min; + max = max.isNaN() ? null : max; + sum = sum.isNaN() ? null : sum; + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java index f66ae42106ca2..55053f509591d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java @@ -432,6 +432,39 @@ public Block newConstantNullBlock(int positions) { return b; } + public AggregateMetricDoubleBlockBuilder newAggregateMetricDoubleBlockBuilder(int estimatedSize) { + return new AggregateMetricDoubleBlockBuilder(estimatedSize, this); + } + + public final Block newConstantAggregateMetricDoubleBlock( + AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral value, + int positions + ) { + try (AggregateMetricDoubleBlockBuilder builder = newAggregateMetricDoubleBlockBuilder(positions)) { + if (value.min() != null) { + builder.min().appendDouble(value.min()); + } else { + builder.min().appendNull(); + } + if (value.max() != null) { + builder.max().appendDouble(value.max()); + } else { + builder.max().appendNull(); + } + if (value.sum() != null) { + builder.sum().appendDouble(value.sum()); + } else { + builder.sum().appendNull(); + } + if (value.count() != null) { + builder.count().appendInt(value.count()); + } else { + builder.count().appendNull(); + } + return builder.build(); + } + } + /** * Returns the maximum number of bytes that a Block should be backed by a primitive array before switching to using BigArrays. */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java index 3df389135e9d3..8773a3b9785e0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Randomness; +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -233,6 +234,14 @@ private static Block constantBlock(BlockFactory blockFactory, ElementType type, case BYTES_REF -> blockFactory.newConstantBytesRefBlockWith(toBytesRef(val), size); case DOUBLE -> blockFactory.newConstantDoubleBlockWith((double) val, size); case BOOLEAN -> blockFactory.newConstantBooleanBlockWith((boolean) val, size); + case COMPOSITE -> { + if (val instanceof AggregateMetricDoubleLiteral aggregateMetricDoubleLiteral) { + yield blockFactory.newConstantAggregateMetricDoubleBlock(aggregateMetricDoubleLiteral, size); + } + throw new UnsupportedOperationException( + "Composite block but received value that wasn't AggregateMetricDoubleLiteral [" + val + "]" + ); + } default -> throw new UnsupportedOperationException("unsupported element type [" + type + "]"); }; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java index b83e2d1efc259..6dfe4c9229e76 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java @@ -91,17 +91,22 @@ public int getTotalValueCount() { @Override public int getFirstValueIndex(int position) { - throw new UnsupportedOperationException("Composite block"); + return blocks[0].getFirstValueIndex(position); } @Override public int getValueCount(int position) { - throw new UnsupportedOperationException("Composite block"); + return blocks[0].getValueCount(position); } @Override public boolean isNull(int position) { - throw new UnsupportedOperationException("Composite block"); + for (Block block : blocks) { + if (block.isNull(position) == false) { + return false; + } + } + return true; } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java index f38c6d70991f9..cdf6711e14058 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java @@ -33,7 +33,7 @@ public enum ElementType { /** * Composite blocks which contain array of sub-blocks. */ - COMPOSITE("Composite", (blockFactory, estimatedSize) -> { throw new UnsupportedOperationException("can't build composite blocks"); }), + COMPOSITE("Composite", BlockFactory::newAggregateMetricDoubleBlockBuilder), /** * Intermediate blocks which don't support retrieving elements. @@ -73,6 +73,8 @@ public static ElementType fromJava(Class type) { elementType = BYTES_REF; } else if (type == Boolean.class) { elementType = BOOLEAN; + } else if (type == AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral.class) { + elementType = COMPOSITE; } else if (type == null || type == Void.class) { elementType = NULL; } else { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java index 8fbb946587470..841789e8ada3c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java @@ -698,6 +698,11 @@ public BytesRefBlock constantBytes(BytesRef value) { public BlockLoader.SingletonOrdinalsBuilder singletonOrdinalsBuilder(SortedDocValues ordinals, int count) { return new SingletonOrdinalsBuilder(factory, ordinals, count); } + + @Override + public BlockLoader.AggregateMetricDoubleBuilder aggregateMetricDoubleBuilder(int count) { + return factory.newAggregateMetricDoubleBlockBuilder(count); + } } // TODO tests that mix source loaded fields and doc values in the same block diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 919a963f7fc98..3e072e9a05c20 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -782,9 +782,8 @@ public static Literal randomLiteral(DataType type) { throw new UncheckedIOException(e); } } - case UNSUPPORTED, OBJECT, DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG -> throw new IllegalArgumentException( - "can't make random values for [" + type.typeName() + "]" - ); + case UNSUPPORTED, OBJECT, DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG, AGGREGATE_METRIC_DOUBLE -> + throw new IllegalArgumentException("can't make random values for [" + type.typeName() + "]"); }, type); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index b8b911afe7fd4..47a8a586bf1df 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -779,7 +779,12 @@ public enum Cap { /** * Support match options in match function */ - MATCH_FUNCTION_OPTIONS; + MATCH_FUNCTION_OPTIONS, + + /** + * Support for aggregate_metric_double type + */ + AGGREGATE_METRIC_DOUBLE; private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java index 0def56c70dc35..a065d0bd5e3a7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java @@ -148,7 +148,8 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa return builder.value(versionToString(val)); } }; - case NULL -> new PositionToXContent(block) { + // TODO: Add implementation for aggregate_metric_double + case NULL, AGGREGATE_METRIC_DOUBLE -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java index 49fcc167dce0f..710a66fb1d9f4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -132,7 +132,7 @@ private static Object valueAt(DataType dataType, Block block, int offset, BytesR case GEO_POINT, GEO_SHAPE, CARTESIAN_POINT, CARTESIAN_SHAPE -> spatialToString( ((BytesRefBlock) block).getBytesRef(offset, scratch) ); - case UNSUPPORTED -> (String) null; + case UNSUPPORTED, AGGREGATE_METRIC_DOUBLE -> (String) null; case SOURCE -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); try { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java index 3a0d616d407a3..5ce43c7b3872d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.CountAggregatorFunction; +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; @@ -22,6 +23,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromAggregateMetricDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvCount; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; @@ -71,6 +73,7 @@ public Count( optional = true, name = "field", type = { + "aggregate_metric_double", "boolean", "cartesian_point", "date", @@ -141,6 +144,9 @@ protected TypeResolution resolveType() { public Expression surrogate() { var s = source(); var field = field(); + if (field.dataType() == DataType.AGGREGATE_METRIC_DOUBLE) { + return new Sum(s, FromAggregateMetricDouble.withMetric(source(), field, AggregateMetricDoubleBlockBuilder.Metric.COUNT)); + } if (field.foldable()) { if (field instanceof Literal l) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index eb0c8abd1080b..6a8ce792ec8c1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -16,6 +16,7 @@ import org.elasticsearch.compute.aggregation.MaxIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MaxIpAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MaxLongAggregatorFunctionSupplier; +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; @@ -27,6 +28,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromAggregateMetricDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMax; import org.elasticsearch.xpack.esql.planner.ToAggregator; @@ -73,7 +75,19 @@ public Max( Source source, @Param( name = "field", - type = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "text", "long", "version" } + type = { + "aggregate_metric_double", + "boolean", + "double", + "integer", + "long", + "date", + "date_nanos", + "ip", + "keyword", + "text", + "long", + "version" } ) Expression field ) { this(source, field, Literal.TRUE); @@ -111,7 +125,7 @@ public Max replaceChildren(List newChildren) { protected TypeResolution resolveType() { return TypeResolutions.isType( field(), - SUPPLIERS::containsKey, + dt -> SUPPLIERS.containsKey(dt) || dt == DataType.AGGREGATE_METRIC_DOUBLE, sourceText(), DEFAULT, "representable except unsigned_long and spatial types" @@ -120,6 +134,9 @@ protected TypeResolution resolveType() { @Override public DataType dataType() { + if (field().dataType() == DataType.AGGREGATE_METRIC_DOUBLE) { + return DataType.DOUBLE; + } return field().dataType().noText(); } @@ -135,6 +152,9 @@ public final AggregatorFunctionSupplier supplier(List inputChannels) { @Override public Expression surrogate() { + if (field().dataType() == DataType.AGGREGATE_METRIC_DOUBLE) { + return new Max(source(), FromAggregateMetricDouble.withMetric(source(), field(), AggregateMetricDoubleBlockBuilder.Metric.MAX)); + } return field().foldable() ? new MvMax(source(), field()) : null; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index 472f0b1ff5cd1..f2ae1292e47e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -16,6 +16,7 @@ import org.elasticsearch.compute.aggregation.MinIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MinIpAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MinLongAggregatorFunctionSupplier; +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; @@ -27,6 +28,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromAggregateMetricDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; import org.elasticsearch.xpack.esql.planner.ToAggregator; @@ -73,7 +75,19 @@ public Min( Source source, @Param( name = "field", - type = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "text", "long", "version" } + type = { + "aggregate_metric_double", + "boolean", + "double", + "integer", + "long", + "date", + "date_nanos", + "ip", + "keyword", + "text", + "long", + "version" } ) Expression field ) { this(source, field, Literal.TRUE); @@ -111,7 +125,7 @@ public Min withFilter(Expression filter) { protected TypeResolution resolveType() { return TypeResolutions.isType( field(), - SUPPLIERS::containsKey, + dt -> SUPPLIERS.containsKey(dt) || dt == DataType.AGGREGATE_METRIC_DOUBLE, sourceText(), DEFAULT, "representable except unsigned_long and spatial types" @@ -120,6 +134,9 @@ protected TypeResolution resolveType() { @Override public DataType dataType() { + if (field().dataType() == DataType.AGGREGATE_METRIC_DOUBLE) { + return DataType.DOUBLE; + } return field().dataType().noText(); } @@ -135,6 +152,9 @@ public final AggregatorFunctionSupplier supplier(List inputChannels) { @Override public Expression surrogate() { + if (field().dataType() == DataType.AGGREGATE_METRIC_DOUBLE) { + return new Min(source(), FromAggregateMetricDouble.withMetric(source(), field(), AggregateMetricDoubleBlockBuilder.Metric.MIN)); + } return field().foldable() ? new MvMin(source(), field()) : null; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java index 37c2abaae1e4e..1c69edb9f0da9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java @@ -12,8 +12,10 @@ import org.elasticsearch.compute.aggregation.SumDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionSupplier; +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -22,6 +24,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromAggregateMetricDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; @@ -29,6 +32,9 @@ import java.util.List; import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; +import static org.elasticsearch.xpack.esql.core.type.DataType.AGGREGATE_METRIC_DOUBLE; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; @@ -53,7 +59,7 @@ public class Sum extends NumericAggregate implements SurrogateExpression { tag = "docsStatsSumNestedExpression" ) } ) - public Sum(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { + public Sum(Source source, @Param(name = "number", type = { "aggregate_metric_double", "double", "integer", "long" }) Expression field) { this(source, field, Literal.TRUE); } @@ -106,10 +112,34 @@ protected AggregatorFunctionSupplier doubleSupplier(List inputChannels) return new SumDoubleAggregatorFunctionSupplier(inputChannels); } + @Override + protected TypeResolution resolveType() { + if (supportsDates()) { + return TypeResolutions.isType( + this, + e -> e == DataType.DATETIME || e == DataType.AGGREGATE_METRIC_DOUBLE || e.isNumeric() && e != DataType.UNSIGNED_LONG, + sourceText(), + DEFAULT, + "datetime", + "aggregate_metric_double or numeric except unsigned_long or counter types" + ); + } + return isType( + field(), + dt -> dt == DataType.AGGREGATE_METRIC_DOUBLE || dt.isNumeric() && dt != DataType.UNSIGNED_LONG, + sourceText(), + DEFAULT, + "aggregate_metric_double or numeric except unsigned_long or counter types" + ); + } + @Override public Expression surrogate() { var s = source(); var field = field(); + if (field.dataType() == AGGREGATE_METRIC_DOUBLE) { + return new Sum(s, FromAggregateMetricDouble.withMetric(source(), field, AggregateMetricDoubleBlockBuilder.Metric.SUM)); + } // SUM(const) is equivalent to MV_SUM(const)*COUNT(*). return field.foldable() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java index 0d3bacbd47605..90152d546097c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromAggregateMetricDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; @@ -67,6 +68,7 @@ public static List getNamedWriteables() { entries.add(Concat.ENTRY); entries.add(E.ENTRY); entries.add(EndsWith.ENTRY); + entries.add(FromAggregateMetricDouble.ENTRY); entries.add(Greatest.ENTRY); entries.add(Hash.ENTRY); entries.add(Hypot.ENTRY); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDouble.java new file mode 100644 index 0000000000000..f1bde9f57b671 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDouble.java @@ -0,0 +1,171 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.CompositeBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; + +public class FromAggregateMetricDouble extends EsqlScalarFunction { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "FromAggregateMetricDouble", + FromAggregateMetricDouble::new + ); + + private final Expression field; + private final Expression subfieldIndex; + + @FunctionInfo(returnType = { "long", "double" }, description = "Convert aggregate double metric to a block of a single subfield.") + public FromAggregateMetricDouble( + Source source, + @Param( + name = "aggregate_metric_double", + type = { "aggregate_metric_double" }, + description = "Aggregate double metric to convert." + ) Expression field, + @Param(name = "subfieldIndex", type = "int", description = "Index of subfield") Expression subfieldIndex + ) { + super(source, List.of(field, subfieldIndex)); + this.field = field; + this.subfieldIndex = subfieldIndex; + } + + public static FromAggregateMetricDouble withMetric(Source source, Expression field, AggregateMetricDoubleBlockBuilder.Metric metric) { + return new FromAggregateMetricDouble(source, field, new Literal(source, metric.getIndex(), INTEGER)); + } + + private FromAggregateMetricDouble(StreamInput in) throws IOException { + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + source().writeTo(out); + out.writeNamedWriteable(field); + out.writeNamedWriteable(subfieldIndex); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + public DataType dataType() { + if (subfieldIndex.foldable() == false) { + throw new EsqlIllegalArgumentException("Received a non-foldable value for subfield index"); + } + var folded = subfieldIndex.fold(FoldContext.small()); + if (folded == null) { + return NULL; + } + var subfield = ((Number) folded).intValue(); + if (subfield == AggregateMetricDoubleBlockBuilder.Metric.COUNT.getIndex()) { + return INTEGER; + } + return DOUBLE; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new FromAggregateMetricDouble(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, FromAggregateMetricDouble::new, field, subfieldIndex); + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + return isType(field, dt -> dt == DataType.AGGREGATE_METRIC_DOUBLE, sourceText(), DEFAULT, "aggregate_metric_double only"); + } + + @Override + public boolean foldable() { + return Expressions.foldable(children()); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { + var fieldEvaluator = toEvaluator.apply(field); + return new EvalOperator.ExpressionEvaluator.Factory() { + + @Override + public String toString() { + return "FromAggregateMetricDoubleEvaluator[" + "field=" + fieldEvaluator + ",subfieldIndex=" + subfieldIndex + "]"; + } + + @Override + public EvalOperator.ExpressionEvaluator get(DriverContext context) { + final EvalOperator.ExpressionEvaluator eval = fieldEvaluator.get(context); + + return new EvalOperator.ExpressionEvaluator() { + @Override + public Block eval(Page page) { + Block block = eval.eval(page); + if (block.areAllValuesNull()) { + return block; + } + try { + CompositeBlock compositeBlock = (CompositeBlock) block; + Block resultBlock = compositeBlock.getBlock(((Number) subfieldIndex.fold(FoldContext.small())).intValue()); + resultBlock.incRef(); + return resultBlock; + } finally { + block.close(); + } + } + + @Override + public void close() { + Releasables.closeExpectNoException(eval); + } + + @Override + public String toString() { + return "FromAggregateMetricDoubleEvaluator[field=" + eval + ",subfieldIndex=" + subfieldIndex + "]"; + } + }; + + } + }; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 611c7a456864a..a426a14b0a319 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -210,7 +210,9 @@ public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { CoalesceBytesRefEvaluator.toEvaluator(toEvaluator, children()); case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; case UNSUPPORTED, SHORT, BYTE, DATE_PERIOD, OBJECT, DOC_DATA_TYPE, SOURCE, TIME_DURATION, FLOAT, HALF_FLOAT, TSID_DATA_TYPE, - SCALED_FLOAT, PARTIAL_AGG -> throw new UnsupportedOperationException(dataType() + " can't be coalesced"); + SCALED_FLOAT, PARTIAL_AGG, AGGREGATE_METRIC_DOUBLE -> throw new UnsupportedOperationException( + dataType() + " can't be coalesced" + ); }; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index e420cd501cccd..a66a302354df2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -212,6 +212,9 @@ private static Stream groupingAndNonGrouping(Tuple, Tuple aggClass) { case CARTESIAN_POINT -> "CartesianPoint"; case GEO_SHAPE -> "GeoShape"; case CARTESIAN_SHAPE -> "CartesianShape"; + case AGGREGATE_METRIC_DOUBLE -> "AggregateMetricDouble"; case UNSUPPORTED, NULL, UNSIGNED_LONG, SHORT, BYTE, FLOAT, HALF_FLOAT, SCALED_FLOAT, OBJECT, SOURCE, DATE_PERIOD, TIME_DURATION, DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG -> throw new EsqlIllegalArgumentException( "illegal agg type: " + type.typeName() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 2e0f97c29ab13..aa24ea113cb48 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -372,7 +372,7 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte case GEO_POINT, CARTESIAN_POINT, GEO_SHAPE, CARTESIAN_SHAPE, COUNTER_LONG, COUNTER_INTEGER, COUNTER_DOUBLE, SOURCE -> TopNEncoder.DEFAULT_UNSORTABLE; // unsupported fields are encoded as BytesRef, we'll use the same encoder; all values should be null at this point - case PARTIAL_AGG, UNSUPPORTED -> TopNEncoder.UNSUPPORTED; + case PARTIAL_AGG, UNSUPPORTED, AGGREGATE_METRIC_DOUBLE -> TopNEncoder.UNSUPPORTED; }; } List orders = topNExec.order().stream().map(order -> { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 15b30f4dd6e30..c5139d45f4b37 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -292,7 +292,7 @@ public static ElementType toElementType(DataType dataType, MappedFieldType.Field case TSID_DATA_TYPE -> ElementType.BYTES_REF; case GEO_POINT, CARTESIAN_POINT -> fieldExtractPreference == DOC_VALUES ? ElementType.LONG : ElementType.BYTES_REF; case GEO_SHAPE, CARTESIAN_SHAPE -> fieldExtractPreference == EXTRACT_SPATIAL_BOUNDS ? ElementType.INT : ElementType.BYTES_REF; - case PARTIAL_AGG -> ElementType.COMPOSITE; + case PARTIAL_AGG, AGGREGATE_METRIC_DOUBLE -> ElementType.COMPOSITE; case SHORT, BYTE, DATE_PERIOD, TIME_DURATION, OBJECT, FLOAT, HALF_FLOAT, SCALED_FLOAT -> throw EsqlIllegalArgumentException .illegalDataType(dataType); }; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 69e6d97c6daed..4fdb4a7bf042b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; +import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import org.elasticsearch.transport.RemoteClusterAware; @@ -175,7 +176,8 @@ private ColumnInfoImpl randomColumnInfo() { t -> false == DataType.isPrimitiveAndSupported(t) || t == DataType.DATE_PERIOD || t == DataType.TIME_DURATION - || t == DataType.PARTIAL_AGG, + || t == DataType.PARTIAL_AGG + || t == DataType.AGGREGATE_METRIC_DOUBLE, () -> randomFrom(DataType.types()) ).widenSmallNumeric(); return new ColumnInfoImpl(randomAlphaOfLength(10), type.esType()); @@ -214,6 +216,13 @@ private Page randomPage(List columns) { case CARTESIAN_SHAPE -> ((BytesRefBlock.Builder) builder).appendBytesRef( CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(randomBoolean())) ); + case AGGREGATE_METRIC_DOUBLE -> { + BlockLoader.AggregateMetricDoubleBuilder aggBuilder = (BlockLoader.AggregateMetricDoubleBuilder) builder; + aggBuilder.min().appendDouble(randomDouble()); + aggBuilder.max().appendDouble(randomDouble()); + aggBuilder.sum().appendDouble(randomDouble()); + aggBuilder.count().appendInt(randomInt()); + } case NULL -> builder.appendNull(); case SOURCE -> { try { @@ -939,6 +948,13 @@ static Page valuesToPage(BlockFactory blockFactory, List columns BytesRef wkb = stringToSpatial(value.toString()); ((BytesRefBlock.Builder) builder).appendBytesRef(wkb); } + case AGGREGATE_METRIC_DOUBLE -> { + BlockLoader.AggregateMetricDoubleBuilder aggBuilder = (BlockLoader.AggregateMetricDoubleBuilder) builder; + aggBuilder.min().appendDouble(((Number) value).doubleValue()); + aggBuilder.max().appendDouble(((Number) value).doubleValue()); + aggBuilder.sum().appendDouble(((Number) value).doubleValue()); + aggBuilder.count().appendInt(((Number) value).intValue()); + } } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 151a91b587c1b..1c288a9bc33f9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1961,7 +1961,7 @@ public void testUnsupportedTypesInStats() { found value [x] type [unsigned_long] line 2:96: first argument of [percentile(x, 10)] must be [numeric except unsigned_long],\ found value [x] type [unsigned_long] - line 2:115: argument of [sum(x)] must be [numeric except unsigned_long or counter types],\ + line 2:115: argument of [sum(x)] must be [aggregate_metric_double or numeric except unsigned_long or counter types],\ found value [x] type [unsigned_long]"""); verifyUnsupported(""" @@ -1976,7 +1976,8 @@ public void testUnsupportedTypesInStats() { line 2:29: argument of [median_absolute_deviation(x)] must be [numeric except unsigned_long or counter types],\ found value [x] type [version] line 2:59: first argument of [percentile(x, 10)] must be [numeric except unsigned_long], found value [x] type [version] - line 2:78: argument of [sum(x)] must be [numeric except unsigned_long or counter types], found value [x] type [version]"""); + line 2:78: argument of [sum(x)] must be [aggregate_metric_double or numeric except unsigned_long or counter types],\ + found value [x] type [version]"""); } public void testInOnText() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 291a10d570093..4403477e51125 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -759,7 +759,7 @@ public void testUnsignedLongNegation() { public void testSumOnDate() { assertEquals( - "1:19: argument of [sum(hire_date)] must be [numeric except unsigned_long or counter types]," + "1:19: argument of [sum(hire_date)] must be [aggregate_metric_double or numeric except unsigned_long or counter types]," + " found value [hire_date] type [datetime]", error("from test | stats sum(hire_date)") ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index b196bd49f6bb2..2fa82b9f1caa2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -59,7 +59,7 @@ public class CaseTests extends AbstractScalarFunctionTestCase { DataType.NULL ).collect(Collectors.toList()); if (Build.current().isSnapshot()) { - t.addAll(DataType.UNDER_CONSTRUCTION.keySet()); + t.addAll(DataType.UNDER_CONSTRUCTION.keySet().stream().filter(type -> type != DataType.AGGREGATE_METRIC_DOUBLE).toList()); } TYPES = unmodifiableList(t); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDoubleTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDoubleTests.java new file mode 100644 index 0000000000000..94d9bd5f64cbd --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDoubleTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.compute.data.AggregateMetricDoubleBlockBuilder; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matchers; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +@FunctionName("from_aggregate_metric_double") +public class FromAggregateMetricDoubleTests extends AbstractScalarFunctionTestCase { + public FromAggregateMetricDoubleTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @Override + protected Expression build(Source source, List args) { + assumeTrue("Test sometimes wraps literals as fields", args.get(1).foldable()); + return new FromAggregateMetricDouble(source, args.get(0), args.get(1)); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + DataType dataType = DataType.AGGREGATE_METRIC_DOUBLE; + for (int i = 0; i < 4; i++) { + int index = i; + suppliers.add(new TestCaseSupplier(List.of(dataType, DataType.INTEGER), () -> { + var agg_metric = new AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral( + randomDoubleBetween(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, true), + randomDoubleBetween(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, true), + randomDoubleBetween(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, true), + randomIntBetween(Integer.MIN_VALUE, Integer.MAX_VALUE) + ); + Double expectedValue = index == AggregateMetricDoubleBlockBuilder.Metric.MIN.getIndex() ? agg_metric.min() + : index == AggregateMetricDoubleBlockBuilder.Metric.MAX.getIndex() ? agg_metric.max() + : index == AggregateMetricDoubleBlockBuilder.Metric.SUM.getIndex() ? agg_metric.sum() + : (Double) agg_metric.count().doubleValue(); + + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(agg_metric, dataType, "agg_metric"), + new TestCaseSupplier.TypedData(index, DataType.INTEGER, "subfield_index").forceLiteral() + ), + "FromAggregateMetricDoubleEvaluator[field=Attribute[channel=0],subfieldIndex=" + index + "]", + index == AggregateMetricDoubleBlockBuilder.Metric.COUNT.getIndex() ? DataType.INTEGER : DataType.DOUBLE, + index == AggregateMetricDoubleBlockBuilder.Metric.COUNT.getIndex() ? Matchers.equalTo(agg_metric.count()) + : expectedValue == null ? Matchers.nullValue() + : Matchers.closeTo(expectedValue, Math.abs(expectedValue * 0.00001)) + ); + })); + } + + return parameterSuppliersFromTypedData( + anyNullIsNull( + suppliers, + (nullPosition, nullValueDataType, original) -> nullPosition == 1 ? DataType.NULL : original.expectedType(), + (nullPosition, nullData, original) -> nullData.isForceLiteral() ? Matchers.equalTo("LiteralsEvaluator[lit=null]") : original + ) + ); + } +} diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java index df4a0aed01bc2..a58f8dae8cc73 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.Query; import org.apache.lucene.search.SortField; @@ -27,6 +28,8 @@ import org.elasticsearch.index.fielddata.ScriptDocValues.DoublesSupplier; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.index.mapper.BlockDocValuesReader; +import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.CompositeSyntheticFieldLoader; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; @@ -288,7 +291,7 @@ public AggregateDoubleMetricFieldType(String name) { } public AggregateDoubleMetricFieldType(String name, Map meta, MetricType metricType) { - super(name, true, false, false, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); + super(name, true, false, true, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.metricType = metricType; } @@ -508,6 +511,144 @@ public ValueFetcher valueFetcher(SearchExecutionContext context, String format) return SourceValueFetcher.identity(name(), context, format); } + public class AggregateMetricDoubleBlockLoader extends BlockDocValuesReader.DocValuesBlockLoader { + NumberFieldMapper.NumberFieldType minFieldType = metricFields.get(Metric.min); + NumberFieldMapper.NumberFieldType maxFieldType = metricFields.get(Metric.max); + NumberFieldMapper.NumberFieldType sumFieldType = metricFields.get(Metric.sum); + NumberFieldMapper.NumberFieldType countFieldType = metricFields.get(Metric.value_count); + + private AggregateMetricDoubleBlockLoader() {} + + static NumericDocValues getNumericDocValues(NumberFieldMapper.NumberFieldType field, LeafReader leafReader) throws IOException { + if (field == null) { + return null; + } + String fieldName = field.name(); + var values = leafReader.getNumericDocValues(fieldName); + if (values != null) { + return values; + } + + var sortedValues = leafReader.getSortedNumericDocValues(fieldName); + return DocValues.unwrapSingleton(sortedValues); + } + + @Override + public AllReader reader(LeafReaderContext context) throws IOException { + NumericDocValues minValues = getNumericDocValues(minFieldType, context.reader()); + NumericDocValues maxValues = getNumericDocValues(maxFieldType, context.reader()); + NumericDocValues sumValues = getNumericDocValues(sumFieldType, context.reader()); + NumericDocValues valueCountValues = getNumericDocValues(countFieldType, context.reader()); + + if (minValues == null || maxValues == null || sumValues == null || valueCountValues == null) { + throw new UnsupportedOperationException("Must have all subfields to use aggregate double metric in ESQL"); + } + return new BlockDocValuesReader() { + + private int docID = -1; + + @Override + protected int docId() { + return docID; + } + + @Override + public String toString() { + return "BlockDocValuesReader.AggregateMetricDouble"; + } + + @Override + public Block read(BlockFactory factory, Docs docs) throws IOException { + try (var builder = factory.aggregateMetricDoubleBuilder(docs.count())) { + copyDoubleValuesToBuilder(docs, builder.min(), minValues); + copyDoubleValuesToBuilder(docs, builder.max(), maxValues); + copyDoubleValuesToBuilder(docs, builder.sum(), sumValues); + copyIntValuesToBuilder(docs, builder.count(), valueCountValues); + return builder.build(); + } + } + + private void copyDoubleValuesToBuilder(Docs docs, BlockLoader.DoubleBuilder builder, NumericDocValues values) + throws IOException { + int lastDoc = -1; + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < lastDoc) { + throw new IllegalStateException("docs within same block must be in order"); + } + if (values.advanceExact(doc)) { + double value = NumericUtils.sortableLongToDouble(values.longValue()); + lastDoc = doc; + this.docID = doc; + builder.appendDouble(value); + } else { + builder.appendNull(); + } + } + } + + private void copyIntValuesToBuilder(Docs docs, BlockLoader.IntBuilder builder, NumericDocValues values) + throws IOException { + int lastDoc = -1; + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < lastDoc) { + throw new IllegalStateException("docs within same block must be in order"); + } + if (values.advanceExact(doc)) { + int value = Math.toIntExact(values.longValue()); + lastDoc = doc; + this.docID = doc; + builder.appendInt(value); + } else { + builder.appendNull(); + } + } + } + + @Override + public void read(int docId, StoredFields storedFields, Builder builder) throws IOException { + var blockBuilder = (AggregateMetricDoubleBuilder) builder; + this.docID = docId; + read(docId, blockBuilder); + } + + private void read(int docId, AggregateMetricDoubleBuilder builder) throws IOException { + if (minValues.advanceExact(docId)) { + builder.min().appendDouble(NumericUtils.sortableLongToDouble(minValues.longValue())); + } else { + builder.min().appendNull(); + } + if (maxValues.advanceExact(docId)) { + builder.max().appendDouble(NumericUtils.sortableLongToDouble(maxValues.longValue())); + } else { + builder.max().appendNull(); + } + if (sumValues.advanceExact(docId)) { + builder.sum().appendDouble(NumericUtils.sortableLongToDouble(sumValues.longValue())); + } else { + builder.sum().appendNull(); + } + if (valueCountValues.advanceExact(docId)) { + builder.count().appendInt(Math.toIntExact(valueCountValues.longValue())); + } else { + builder.count().appendNull(); + } + } + }; + } + + @Override + public Builder builder(BlockFactory factory, int expectedCount) { + return factory.aggregateMetricDoubleBuilder(expectedCount); + } + } + + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + return new AggregateMetricDoubleBlockLoader(); + } + /** * If field is a time series metric field, returns its metric type * @return the metric type or null diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java index 72c2beeed3ba4..0d62e7a9c1fd2 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java @@ -36,6 +36,7 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Names.IGNORE_MALFORMED; import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Names.METRICS; @@ -618,4 +619,15 @@ public void testSyntheticSourceKeepArrays() { protected boolean supportsCopyTo() { return false; } + + @Override + protected Function loadBlockExpected() { + return n -> ((Number) n); + } + + @Override + protected Function loadBlockExpected(BlockReaderSupport blockReaderSupport, boolean columnReader) { + assumeTrue("Not supporting", false); + return null; + } } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml index b9415bce62ea9..5bdd2baf60506 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml @@ -80,8 +80,8 @@ setup: time_series_dimension: true agg_metric: type: aggregate_metric_double - metrics: - - max + # TODO: tests with a subset of metrics + metrics: [ min, max, sum, value_count ] default_metric: max k8s: properties: @@ -99,9 +99,9 @@ setup: index: test2 body: - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:04.467Z", "dim": "A", "agg_metric": {"max": 10}}' + - '{"@timestamp": "2021-04-28T18:50:04.467Z", "dim": "A", "agg_metric": {"max": 10, "min": -1, "sum": 20, "value_count": 5}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:24.467Z", "dim": "B", "agg_metric": {"max": 20}}' + - '{"@timestamp": "2021-04-28T18:50:24.467Z", "dim": "B", "agg_metric": {"max": 20, "min": 3, "sum": 50, "value_count": 7}}' --- load everything: @@ -201,6 +201,14 @@ cast then sort on counter: --- from doc with aggregate_metric_double: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [aggregate_metric_double] + reason: "Support for aggregate_metric_double" - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" @@ -211,7 +219,7 @@ from doc with aggregate_metric_double: - match: {columns.0.name: "@timestamp"} - match: {columns.0.type: "date"} - match: {columns.1.name: "agg_metric"} - - match: {columns.1.type: "unsupported"} + - match: {columns.1.type: "aggregate_metric_double"} - match: {columns.2.name: "dim"} - match: {columns.2.type: "keyword"} - match: {columns.3.name: "k8s.pod.ip"} @@ -222,14 +230,45 @@ from doc with aggregate_metric_double: --- stats on aggregate_metric_double: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [aggregate_metric_double] + reason: "Support for aggregate_metric_double" - do: - catch: /Cannot use field \[agg_metric\] with unsupported type \[aggregate_metric_double\]/ + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM test2 | STATS max(agg_metric) BY dim' + query: 'FROM test2 | STATS max(agg_metric), min(agg_metric), sum(agg_metric), count(agg_metric)' + - length: {values: 1} + - length: {values.0: 4} + - match: {columns.0.name: "max(agg_metric)"} + - match: {columns.0.type: "double"} + - match: {columns.1.name: "min(agg_metric)"} + - match: {columns.1.type: "double"} + - match: {columns.2.name: "sum(agg_metric)"} + - match: {columns.2.type: "double"} + - match: {columns.3.name: "count(agg_metric)"} + - match: {columns.3.type: "long"} + - match: {values.0.0: 20.0} + - match: {values.0.1: -1.0} + - match: {values.0.2: 70.0} + - match: {values.0.3: 12.0} --- from index pattern unsupported counter: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [aggregate_metric_double] + reason: "Support for aggregate_metric_double" - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" @@ -240,7 +279,7 @@ from index pattern unsupported counter: - match: {columns.0.name: "@timestamp"} - match: {columns.0.type: "date"} - match: {columns.1.name: "agg_metric"} - - match: {columns.1.type: "unsupported"} + - match: {columns.1.type: "aggregate_metric_double"} - match: {columns.2.name: "dim"} - match: {columns.2.type: "keyword"} - match: {columns.3.name: "k8s.pod.ip"} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml index e100f30717aef..8e5a6e6d231d6 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml @@ -13,7 +13,7 @@ setup: properties: aggregate_metric_double: type: aggregate_metric_double - metrics: [ min, max ] + metrics: [ min, max, sum, value_count ] default_metric: max binary: type: binary @@ -81,7 +81,7 @@ setup: body: - { "index": { } } - { - "aggregate_metric_double": { "min": 1.0, "max": 3.0 }, + "aggregate_metric_double": { "min": 1.0, "max": 3.0, "sum": 10.1, "value_count": 5 }, "binary": "U29tZSBiaW5hcnkgYmxvYg==", "completion": "foo bar", "date_nanos": "2015-01-01T12:10:30.123456789Z", @@ -119,8 +119,8 @@ unsupported: - method: POST path: /_query parameters: [ ] - capabilities: [ date_nanos_type ] - reason: "support for date nanos type" + capabilities: [ aggregate_metric_double ] + reason: "support for aggregate_metric_double type" - do: allowed_warnings_regex: @@ -131,7 +131,7 @@ unsupported: query: 'from test' - match: { columns.0.name: aggregate_metric_double } - - match: { columns.0.type: unsupported } + - match: { columns.0.type: aggregate_metric_double } - match: { columns.1.name: binary } - match: { columns.1.type: unsupported } - match: { columns.2.name: completion } @@ -227,7 +227,7 @@ unsupported: body: query: 'from test | limit 0' - match: { columns.0.name: aggregate_metric_double } - - match: { columns.0.type: unsupported } + - match: { columns.0.type: aggregate_metric_double } - match: { columns.1.name: binary } - match: { columns.1.type: unsupported } - match: { columns.2.name: completion } @@ -308,8 +308,8 @@ unsupported with sort: - method: POST path: /_query parameters: [ ] - capabilities: [ date_nanos_type ] - reason: "support for date nanos type" + capabilities: [ aggregate_metric_double ] + reason: "support for aggregate_metric_double type" - do: allowed_warnings_regex: @@ -317,97 +317,94 @@ unsupported with sort: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'from test | sort some_doc.bar' + query: 'from test | drop aggregate_metric_double | sort some_doc.bar' - - match: { columns.0.name: aggregate_metric_double } + - match: { columns.0.name: binary } - match: { columns.0.type: unsupported } - - match: { columns.1.name: binary } + - match: { columns.1.name: completion } - match: { columns.1.type: unsupported } - - match: { columns.2.name: completion } - - match: { columns.2.type: unsupported } - - match: { columns.3.name: date_nanos } - - match: { columns.3.type: date_nanos } - - match: { columns.4.name: date_range } + - match: { columns.2.name: date_nanos } + - match: { columns.2.type: date_nanos } + - match: { columns.3.name: date_range } + - match: { columns.3.type: unsupported } + - match: { columns.4.name: dense_vector } - match: { columns.4.type: unsupported } - - match: { columns.5.name: dense_vector } + - match: { columns.5.name: double_range } - match: { columns.5.type: unsupported } - - match: { columns.6.name: double_range } + - match: { columns.6.name: float_range } - match: { columns.6.type: unsupported } - - match: { columns.7.name: float_range } - - match: { columns.7.type: unsupported } - - match: { columns.8.name: geo_point } + - match: { columns.7.name: geo_point } + - match: { columns.7.type: geo_point } + - match: { columns.8.name: geo_point_alias } - match: { columns.8.type: geo_point } - - match: { columns.9.name: geo_point_alias } - - match: { columns.9.type: geo_point } - - match: { columns.10.name: geo_shape } - - match: { columns.10.type: geo_shape } - - match: { columns.11.name: histogram } + - match: { columns.9.name: geo_shape } + - match: { columns.9.type: geo_shape } + - match: { columns.10.name: histogram } + - match: { columns.10.type: unsupported } + - match: { columns.11.name: integer_range } - match: { columns.11.type: unsupported } - - match: { columns.12.name: integer_range } + - match: { columns.12.name: ip_range } - match: { columns.12.type: unsupported } - - match: { columns.13.name: ip_range } + - match: { columns.13.name: long_range } - match: { columns.13.type: unsupported } - - match: { columns.14.name: long_range } - - match: { columns.14.type: unsupported } - - match: { columns.15.name: match_only_text } - - match: { columns.15.type: text } - - match: { columns.16.name: name } - - match: { columns.16.type: keyword } - - match: { columns.17.name: point } - - match: { columns.17.type: cartesian_point } - - match: { columns.18.name: rank_feature } + - match: { columns.14.name: match_only_text } + - match: { columns.14.type: text } + - match: { columns.15.name: name } + - match: { columns.15.type: keyword } + - match: { columns.16.name: point } + - match: { columns.16.type: cartesian_point } + - match: { columns.17.name: rank_feature } + - match: { columns.17.type: unsupported } + - match: { columns.18.name: rank_features } - match: { columns.18.type: unsupported } - - match: { columns.19.name: rank_features } + - match: { columns.19.name: search_as_you_type } - match: { columns.19.type: unsupported } - - match: { columns.20.name: search_as_you_type } + - match: { columns.20.name: search_as_you_type._2gram } - match: { columns.20.type: unsupported } - - match: { columns.21.name: search_as_you_type._2gram } + - match: { columns.21.name: search_as_you_type._3gram } - match: { columns.21.type: unsupported } - - match: { columns.22.name: search_as_you_type._3gram } + - match: { columns.22.name: search_as_you_type._index_prefix } - match: { columns.22.type: unsupported } - - match: { columns.23.name: search_as_you_type._index_prefix } - - match: { columns.23.type: unsupported } - - match: { columns.24.name: shape } - - match: { columns.24.type: cartesian_shape } - - match: { columns.25.name: some_doc.bar } - - match: { columns.25.type: long } - - match: { columns.26.name: some_doc.foo } - - match: { columns.26.type: keyword } - - match: { columns.27.name: text } - - match: { columns.27.type: text } - - match: { columns.28.name: token_count } - - match: { columns.28.type: integer } + - match: { columns.23.name: shape } + - match: { columns.23.type: cartesian_shape } + - match: { columns.24.name: some_doc.bar } + - match: { columns.24.type: long } + - match: { columns.25.name: some_doc.foo } + - match: { columns.25.type: keyword } + - match: { columns.26.name: text } + - match: { columns.26.type: text } + - match: { columns.27.name: token_count } + - match: { columns.27.type: integer } - length: { values: 1 } - match: { values.0.0: null } - match: { values.0.1: null } - - match: { values.0.2: null } - - match: { values.0.3: "2015-01-01T12:10:30.123456789Z" } + - match: { values.0.2: "2015-01-01T12:10:30.123456789Z" } + - match: { values.0.3: null } - match: { values.0.4: null } - match: { values.0.5: null } - match: { values.0.6: null } - - match: { values.0.7: null } + - match: { values.0.7: "POINT (10.0 12.0)" } - match: { values.0.8: "POINT (10.0 12.0)" } - - match: { values.0.9: "POINT (10.0 12.0)" } - - match: { values.0.10: "LINESTRING (-97.154 25.996, -97.159 25.998, -97.181 25.991, -97.187 25.985)" } + - match: { values.0.9: "LINESTRING (-97.154 25.996, -97.159 25.998, -97.181 25.991, -97.187 25.985)" } + - match: { values.0.10: null } - match: { values.0.11: null } - match: { values.0.12: null } - match: { values.0.13: null } - - match: { values.0.14: null } - - match: { values.0.15: "foo bar baz" } - - match: { values.0.16: Alice } - - match: { values.0.17: "POINT (-97.15447 25.9961525)" } + - match: { values.0.14: "foo bar baz" } + - match: { values.0.15: Alice } + - match: { values.0.16: "POINT (-97.15447 25.9961525)" } + - match: { values.0.17: null } - match: { values.0.18: null } - match: { values.0.19: null } - match: { values.0.20: null } - match: { values.0.21: null } - match: { values.0.22: null } - - match: { values.0.23: null } - - match: { values.0.24: "LINESTRING (-377.03653 389.897676, -377.009051 389.889939)" } - - match: { values.0.25: 12 } - - match: { values.0.26: xy } - - match: { values.0.27: "foo bar" } - - match: { values.0.28: 3 } + - match: { values.0.23: "LINESTRING (-377.03653 389.897676, -377.009051 389.889939)" } + - match: { values.0.24: 12 } + - match: { values.0.25: xy } + - match: { values.0.26: "foo bar" } + - match: { values.0.27: 3 } --- nested declared inline: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/46_downsample.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/46_downsample.yml new file mode 100644 index 0000000000000..5a0b8b281e88f --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/46_downsample.yml @@ -0,0 +1,119 @@ +setup: + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + index: + mode: time_series + routing_path: [ metricset, k8s.pod.uid ] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + metricset: + type: keyword + time_series_dimension: true + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + name: + type: keyword + created_at: + type: date_nanos + running: + type: boolean + number_of_containers: + type: integer + ip: + type: ip + tags: + type: keyword + values: + type: integer + network: + properties: + tx: + type: long + time_series_metric: gauge + rx: + type: long + time_series_metric: gauge + - do: + bulk: + refresh: true + index: test + body: + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001810, "rx": 802133}, "created_at": "2021-04-28T19:34:00.000Z", "running": false, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 6]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:24.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.26", "network": {"tx": 2005177, "rx": 801479}, "created_at": "2021-04-28T19:35:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west1"], "values": [1, 1, 3]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:50:44.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.41", "network": {"tx": 2006223, "rx": 802337}, "created_at": "2021-04-28T19:36:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west2"], "values": [4, 1, 2]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:51:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.22", "network": {"tx": 2012916, "rx": 803685}, "created_at": "2021-04-28T19:37:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 1]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.33", "network": {"tx": 1434521, "rx": 530575}, "created_at": "2021-04-28T19:42:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test"], "values": [2, 3, 4]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:23.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.56", "network": {"tx": 1434577, "rx": 530600}, "created_at": "2021-04-28T19:43:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test", "us-west2"], "values": [2, 1, 1]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:50:53.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.37", "network": {"tx": 1434587, "rx": 530604}, "created_at": "2021-04-28T19:44:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [4, 5, 2]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.120", "network": {"tx": 1434595, "rx": 530605}, "created_at": "2021-04-28T19:45:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [3, 2, 1]}}}' + + - do: + indices.put_settings: + index: test + body: + index.blocks.write: true + +--- +"Query stats on downsampled index": + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [aggregate_metric_double] + reason: "Support for aggregate_metric_double" + - do: + indices.downsample: + index: test + target_index: test-downsample + body: > + { + "fixed_interval": "1h" + } + - is_true: acknowledged + + - do: + esql.query: + body: + query: "FROM test-downsample | + STATS max(k8s.pod.network.rx), min(k8s.pod.network.rx), sum(k8s.pod.network.rx), count(k8s.pod.network.rx) + | LIMIT 100" + + - length: {values: 1} + - length: {values.0: 4} + - match: {columns.0.name: "max(k8s.pod.network.rx)"} + - match: {columns.0.type: "double"} + - match: {columns.1.name: "min(k8s.pod.network.rx)"} + - match: {columns.1.type: "double"} + - match: {columns.2.name: "sum(k8s.pod.network.rx)"} + - match: {columns.2.type: "double"} + - match: {columns.3.name: "count(k8s.pod.network.rx)"} + - match: {columns.3.type: "long"} + - match: {values.0.0: 803685.0} + - match: {values.0.1: 530575.0} + - match: {values.0.2: 5332018.0} + - match: {values.0.3: 8} + From 5d76de5ffa4d55d34be04b747d2f141b6409968f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 09:23:25 +1100 Subject: [PATCH 243/383] Mute org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT test {yaml=indices.get_alias/10_basic/Get aliases via /_alias/_all} #121242 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 8d9bdff3d2572..f3a77d3783b69 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -331,6 +331,9 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=nodes.stats/11_indices_metrics/Metric - blank for indices mappings} issue: https://github.com/elastic/elasticsearch/issues/121238 +- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT + method: test {yaml=indices.get_alias/10_basic/Get aliases via /_alias/_all} + issue: https://github.com/elastic/elasticsearch/issues/121242 # Examples: # From d88ddcac185ea3796e978c000b442ab0f0044d7d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 09:49:58 +1100 Subject: [PATCH 244/383] Mute org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT test {yaml=cluster.stats/10_basic/Sparse vector stats} #121246 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f3a77d3783b69..440a67256ac45 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -334,6 +334,9 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=indices.get_alias/10_basic/Get aliases via /_alias/_all} issue: https://github.com/elastic/elasticsearch/issues/121242 +- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT + method: test {yaml=cluster.stats/10_basic/Sparse vector stats} + issue: https://github.com/elastic/elasticsearch/issues/121246 # Examples: # From afdd45364aeb4ef6f488aa767c02b58623538df9 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Thu, 30 Jan 2025 08:58:14 +1000 Subject: [PATCH 245/383] Run `TransportGetEnrichPolicyAction` on local node (#121124) This action solely needs the cluster state, it can run on any node. Additionally, it needs to be cancellable to avoid doing unnecessary work after a client failure or timeout. --- docs/changelog/121124.yaml | 5 + .../rest-api-spec/api/enrich.get_policy.json | 2 +- .../enrich/action/GetEnrichPolicyAction.java | 33 ++-- x-pack/plugin/enrich/build.gradle | 1 + .../EnrichRestActionCancellationIT.java | 141 ++++++++++++++++++ .../TransportGetEnrichPolicyAction.java | 42 +++--- .../rest/RestGetEnrichPolicyAction.java | 7 +- .../GetEnrichPolicyActionRequestTests.java | 29 ---- .../GetEnrichPolicyActionResponseTests.java | 82 ---------- .../TransportGetEnrichPolicyActionTests.java | 34 +++-- 10 files changed, 222 insertions(+), 154 deletions(-) create mode 100644 docs/changelog/121124.yaml create mode 100644 x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichRestActionCancellationIT.java delete mode 100644 x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionRequestTests.java delete mode 100644 x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionResponseTests.java diff --git a/docs/changelog/121124.yaml b/docs/changelog/121124.yaml new file mode 100644 index 0000000000000..066145386ecb4 --- /dev/null +++ b/docs/changelog/121124.yaml @@ -0,0 +1,5 @@ +pr: 121124 +summary: Run `TransportGetEnrichPolicyAction` on local node +area: Ingest Node +type: enhancement +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.get_policy.json b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.get_policy.json index aed7397877393..e735a75f67ee9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.get_policy.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/enrich.get_policy.json @@ -30,7 +30,7 @@ "params": { "master_timeout":{ "type":"time", - "description":"Timeout for processing on master node" + "description":"Timeout for waiting for new cluster state in case it is blocked" } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java index 7f138dec7ee23..ae02dc781e0dd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java @@ -9,10 +9,14 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.action.support.local.LocalClusterStateRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV10; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; @@ -33,7 +37,7 @@ private GetEnrichPolicyAction() { super(NAME); } - public static class Request extends MasterNodeReadRequest { + public static class Request extends LocalClusterStateRequest { private final List names; @@ -42,6 +46,11 @@ public Request(TimeValue masterNodeTimeout, String... names) { this.names = List.of(names); } + /** + * NB prior to 9.0 this was a TransportMasterNodeReadAction so for BwC we must remain able to read these requests until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) public Request(StreamInput in) throws IOException { super(in); this.names = in.readStringCollectionAsImmutableList(); @@ -52,14 +61,13 @@ public ActionRequestValidationException validate() { return null; } - public List getNames() { - return names; + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, "", parentTaskId, headers); } - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeStringCollection(names); + public List getNames() { + return names; } @Override @@ -89,10 +97,11 @@ public Response(Map policies) { .collect(Collectors.toList()); } - public Response(StreamInput in) throws IOException { - policies = in.readCollectionAsList(EnrichPolicy.NamedPolicy::new); - } - + /** + * NB prior to 9.0 this was a TransportMasterNodeReadAction so for BwC we must remain able to write these responses until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(policies); diff --git a/x-pack/plugin/enrich/build.gradle b/x-pack/plugin/enrich/build.gradle index 352b7a3e64171..46972578ae1fc 100644 --- a/x-pack/plugin/enrich/build.gradle +++ b/x-pack/plugin/enrich/build.gradle @@ -19,6 +19,7 @@ dependencies { testImplementation project(path: ':modules:legacy-geo') testImplementation project(xpackModule('spatial')) testImplementation(testArtifact(project(xpackModule('monitoring')))) + internalClusterTestImplementation project(':modules:rest-root') } addQaCheckDependencies(project) diff --git a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichRestActionCancellationIT.java b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichRestActionCancellationIT.java new file mode 100644 index 0000000000000..a75dd26eaceeb --- /dev/null +++ b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichRestActionCancellationIT.java @@ -0,0 +1,141 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.enrich; + +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.action.support.CancellableActionTestPlugin; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.root.MainRestPlugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.transport.netty4.Netty4Plugin; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.CancellationException; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.action.support.ActionTestUtils.wrapAsRestResponseListener; +import static org.elasticsearch.test.TaskAssertions.assertAllTasksHaveFinished; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.oneOf; + +public class EnrichRestActionCancellationIT extends ESIntegTestCase { + + @Override + protected boolean addMockHttpTransport() { + return false; // enable http + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(NetworkModule.TRANSPORT_TYPE_KEY, Netty4Plugin.NETTY_TRANSPORT_NAME) + .put(NetworkModule.HTTP_TYPE_KEY, Netty4Plugin.NETTY_HTTP_TRANSPORT_NAME) + .build(); + } + + @Override + protected Collection> nodePlugins() { + return List.of(getTestTransportPlugin(), MainRestPlugin.class, CancellableActionTestPlugin.class, EnrichPlugin.class); + } + + public void testGetEnrichPolicyCancellation() throws IOException { + runRestActionCancellationTest(new Request(HttpGet.METHOD_NAME, "/_enrich/policy"), GetEnrichPolicyAction.NAME); + } + + private void runRestActionCancellationTest(Request request, String actionName) { + final var node = usually() ? internalCluster().getRandomNodeName() : internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); + + try ( + var restClient = createRestClient(node); + var capturingAction = CancellableActionTestPlugin.capturingActionOnNode(actionName, node) + ) { + final var responseFuture = new PlainActionFuture(); + final var restInvocation = restClient.performRequestAsync(request, wrapAsRestResponseListener(responseFuture)); + + if (randomBoolean()) { + // cancel by aborting the REST request + capturingAction.captureAndCancel(restInvocation::cancel); + expectThrows(ExecutionException.class, CancellationException.class, () -> responseFuture.get(10, TimeUnit.SECONDS)); + } else { + // cancel via the task management API + final var cancelFuture = new PlainActionFuture(); + capturingAction.captureAndCancel( + () -> SubscribableListener + + .newForked( + l -> restClient.performRequestAsync( + getListTasksRequest(node, actionName), + wrapAsRestResponseListener(l.map(ObjectPath::createFromResponse)) + ) + ) + + .andThen((l, listTasksResponse) -> { + final var taskCount = listTasksResponse.evaluateArraySize("tasks"); + assertThat(taskCount, greaterThan(0)); + try (var listeners = new RefCountingListener(l)) { + for (int i = 0; i < taskCount; i++) { + final var taskPrefix = "tasks." + i + "."; + assertTrue(listTasksResponse.evaluate(taskPrefix + "cancellable")); + assertFalse(listTasksResponse.evaluate(taskPrefix + "cancelled")); + restClient.performRequestAsync( + getCancelTaskRequest( + listTasksResponse.evaluate(taskPrefix + "node"), + listTasksResponse.evaluate(taskPrefix + "id") + ), + wrapAsRestResponseListener(listeners.acquire(EnrichRestActionCancellationIT::assertOK)) + ); + } + } + }) + + .addListener(cancelFuture) + ); + cancelFuture.get(10, TimeUnit.SECONDS); + expectThrows(Exception.class, () -> responseFuture.get(10, TimeUnit.SECONDS)); + } + + assertAllTasksHaveFinished(actionName); + } catch (Exception e) { + fail(e); + } + } + + private static Request getListTasksRequest(String taskNode, String actionName) { + final var listTasksRequest = new Request(HttpGet.METHOD_NAME, "/_tasks"); + listTasksRequest.addParameter("nodes", taskNode); + listTasksRequest.addParameter("actions", actionName); + listTasksRequest.addParameter("group_by", "none"); + return listTasksRequest; + } + + private static Request getCancelTaskRequest(String taskNode, int taskId) { + final var cancelTaskRequest = new Request(HttpPost.METHOD_NAME, Strings.format("/_tasks/%s:%d/_cancel", taskNode, taskId)); + cancelTaskRequest.addParameter("wait_for_completion", null); + return cancelTaskRequest; + } + + public static void assertOK(Response response) { + assertThat(response.getStatusLine().getStatusCode(), oneOf(200, 201)); + } + +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyAction.java index cff0ff60c599b..3af102e481e38 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyAction.java @@ -8,16 +8,17 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.action.support.ChannelActionListener; +import org.elasticsearch.action.support.local.TransportLocalClusterStateAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; @@ -26,32 +27,38 @@ import java.util.HashMap; import java.util.Map; -public class TransportGetEnrichPolicyAction extends TransportMasterNodeReadAction< +public class TransportGetEnrichPolicyAction extends TransportLocalClusterStateAction< GetEnrichPolicyAction.Request, GetEnrichPolicyAction.Response> { + /** + * NB prior to 9.0 this was a TransportMasterNodeReadAction so for BwC it must be registered with the TransportService until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) + @SuppressWarnings("this-escape") @Inject - public TransportGetEnrichPolicyAction( - TransportService transportService, - ClusterService clusterService, - ThreadPool threadPool, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver - ) { + public TransportGetEnrichPolicyAction(TransportService transportService, ClusterService clusterService, ActionFilters actionFilters) { super( GetEnrichPolicyAction.NAME, - transportService, - clusterService, - threadPool, actionFilters, - GetEnrichPolicyAction.Request::new, - GetEnrichPolicyAction.Response::new, + transportService.getTaskManager(), + clusterService, EsExecutors.DIRECT_EXECUTOR_SERVICE ); + + transportService.registerRequestHandler( + actionName, + executor, + false, + true, + GetEnrichPolicyAction.Request::new, + (request, channel, task) -> executeDirect(task, request, new ChannelActionListener<>(channel)) + ); } @Override - protected void masterOperation( + protected void localClusterStateOperation( Task task, GetEnrichPolicyAction.Request request, ClusterState state, @@ -71,6 +78,7 @@ protected void masterOperation( } } } + ((CancellableTask) task).ensureNotCancelled(); listener.onResponse(new GetEnrichPolicyAction.Response(policies)); } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestGetEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestGetEnrichPolicyAction.java index 2fb9f63c1eb4a..4796bfcdbfeb0 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestGetEnrichPolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestGetEnrichPolicyAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; @@ -39,6 +40,10 @@ protected RestChannelConsumer prepareRequest(final RestRequest restRequest, fina RestUtils.getMasterNodeTimeout(restRequest), Strings.splitStringByCommaToArray(restRequest.param("name")) ); - return channel -> client.execute(GetEnrichPolicyAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> new RestCancellableNodeClient(client, restRequest.getHttpChannel()).execute( + GetEnrichPolicyAction.INSTANCE, + request, + new RestToXContentListener<>(channel) + ); } } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionRequestTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionRequestTests.java deleted file mode 100644 index 051eadac48467..0000000000000 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionRequestTests.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.enrich.action; - -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; - -public class GetEnrichPolicyActionRequestTests extends AbstractWireSerializingTestCase { - - @Override - protected GetEnrichPolicyAction.Request createTestInstance() { - return new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, generateRandomStringArray(0, 4, false)); - } - - @Override - protected GetEnrichPolicyAction.Request mutateInstance(GetEnrichPolicyAction.Request instance) { - return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 - } - - @Override - protected Writeable.Reader instanceReader() { - return GetEnrichPolicyAction.Request::new; - } -} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionResponseTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionResponseTests.java deleted file mode 100644 index c46005163fa12..0000000000000 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionResponseTests.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.enrich.action; - -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.test.AbstractXContentSerializingTestCase; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static org.elasticsearch.xpack.enrich.EnrichPolicyTests.assertEqualPolicies; -import static org.elasticsearch.xpack.enrich.EnrichPolicyTests.randomEnrichPolicy; -import static org.hamcrest.core.IsEqual.equalTo; - -public class GetEnrichPolicyActionResponseTests extends AbstractXContentSerializingTestCase { - - @Override - protected GetEnrichPolicyAction.Response doParseInstance(XContentParser parser) throws IOException { - Map policies = new HashMap<>(); - assert parser.nextToken() == XContentParser.Token.START_OBJECT; - assert parser.nextToken() == XContentParser.Token.FIELD_NAME; - assert parser.currentName().equals("policies"); - assert parser.nextToken() == XContentParser.Token.START_ARRAY; - - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - assert token == XContentParser.Token.START_OBJECT; - assert parser.nextToken() == XContentParser.Token.FIELD_NAME; - assert parser.currentName().equals("config"); - assert parser.nextToken() == XContentParser.Token.START_OBJECT; - EnrichPolicy.NamedPolicy policy = EnrichPolicy.NamedPolicy.fromXContent(parser); - policies.put(policy.getName(), policy.getPolicy()); - assert parser.nextToken() == XContentParser.Token.END_OBJECT; - } - - return new GetEnrichPolicyAction.Response(policies); - } - - @Override - protected GetEnrichPolicyAction.Response createTestInstance() { - Map items = new HashMap<>(); - for (int i = 0; i < randomIntBetween(0, 3); i++) { - EnrichPolicy policy = randomEnrichPolicy(XContentType.JSON); - items.put(randomAlphaOfLength(3), policy); - } - return new GetEnrichPolicyAction.Response(items); - } - - @Override - protected GetEnrichPolicyAction.Response mutateInstance(GetEnrichPolicyAction.Response instance) { - return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 - } - - @Override - protected Writeable.Reader instanceReader() { - return GetEnrichPolicyAction.Response::new; - } - - @Override - protected void assertEqualInstances(GetEnrichPolicyAction.Response expectedInstance, GetEnrichPolicyAction.Response newInstance) { - assertNotSame(expectedInstance, newInstance); - // the tests shuffle around the policy query source xcontent type, so this is needed here - assertThat(expectedInstance.getPolicies().size(), equalTo(newInstance.getPolicies().size())); - // since the backing store is a treemap the list will be sorted so we can just check each - // instance is the same - for (int i = 0; i < expectedInstance.getPolicies().size(); i++) { - EnrichPolicy.NamedPolicy expected = expectedInstance.getPolicies().get(i); - EnrichPolicy.NamedPolicy newed = newInstance.getPolicies().get(i); - assertThat(expected.getName(), equalTo(newed.getName())); - assertEqualPolicies(expected.getPolicy(), newed.getPolicy()); - } - } -} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyActionTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyActionTests.java index 6a3c1eb2555b1..448f6d42a992c 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyActionTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyActionTests.java @@ -10,6 +10,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; @@ -17,6 +19,7 @@ import org.elasticsearch.xpack.enrich.EnrichPolicyLocks; import org.junit.After; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; @@ -34,7 +37,8 @@ public void cleanupPolicies() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); final AtomicReference reference = new AtomicReference<>(); final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); - ActionTestUtils.execute(transportAction, null, new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT), new ActionListener<>() { + final var task = createTask(); + ActionTestUtils.execute(transportAction, task, new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT), new ActionListener<>() { @Override public void onResponse(GetEnrichPolicyAction.Response response) { reference.set(response); @@ -43,7 +47,7 @@ public void onResponse(GetEnrichPolicyAction.Response response) { } public void onFailure(final Exception e) { - fail(); + fail(e); } }); latch.await(); @@ -74,7 +78,8 @@ public void testListPolicies() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); final AtomicReference reference = new AtomicReference<>(); final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); - ActionTestUtils.execute(transportAction, null, new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT), new ActionListener<>() { + final var task = createTask(); + ActionTestUtils.execute(transportAction, task, new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT), new ActionListener<>() { @Override public void onResponse(GetEnrichPolicyAction.Response response) { reference.set(response); @@ -83,7 +88,7 @@ public void onResponse(GetEnrichPolicyAction.Response response) { } public void onFailure(final Exception e) { - fail(); + fail(e); } }); latch.await(); @@ -101,7 +106,8 @@ public void testListEmptyPolicies() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); final AtomicReference reference = new AtomicReference<>(); final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); - ActionTestUtils.execute(transportAction, null, new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT), new ActionListener<>() { + final var task = createTask(); + ActionTestUtils.execute(transportAction, task, new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT), new ActionListener<>() { @Override public void onResponse(GetEnrichPolicyAction.Response response) { reference.set(response); @@ -110,7 +116,7 @@ public void onResponse(GetEnrichPolicyAction.Response response) { } public void onFailure(final Exception e) { - fail(); + fail(e); } }); latch.await(); @@ -137,7 +143,7 @@ public void testGetPolicy() throws InterruptedException { final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); ActionTestUtils.execute( transportAction, - null, + createTask(), new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, name), new ActionListener<>() { @Override @@ -147,7 +153,7 @@ public void onResponse(GetEnrichPolicyAction.Response response) { } public void onFailure(final Exception e) { - fail(); + fail(e); } } ); @@ -184,7 +190,7 @@ public void testGetMultiplePolicies() throws InterruptedException { final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); ActionTestUtils.execute( transportAction, - null, + createTask(), new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, name, anotherName), new ActionListener<>() { @Override @@ -194,7 +200,7 @@ public void onResponse(GetEnrichPolicyAction.Response response) { } public void onFailure(final Exception e) { - fail(); + fail(e); } } ); @@ -218,7 +224,7 @@ public void testGetPolicyThrowsError() throws InterruptedException { final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); ActionTestUtils.execute( transportAction, - null, + createTask(), new GetEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "non-exists"), new ActionListener<>() { @Override @@ -228,7 +234,7 @@ public void onResponse(GetEnrichPolicyAction.Response response) { } public void onFailure(final Exception e) { - fail(); + fail(e); } } ); @@ -236,4 +242,8 @@ public void onFailure(final Exception e) { assertNotNull(reference.get()); assertThat(reference.get().getPolicies().size(), equalTo(0)); } + + private static CancellableTask createTask() { + return new CancellableTask(randomNonNegativeLong(), "test", GetEnrichPolicyAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()); + } } From 06b23a665d00dc428e11ffd4741284a0ff6e90b6 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Thu, 30 Jan 2025 09:09:00 +1000 Subject: [PATCH 246/383] Avoid unnecessarily copying enrich policies map (#121127) Instead of always copying the map of enrich policies, we should return the (already read-only) map straight from the `EnrichMetadata` and make a modifiable copy only when necessary. --- .../xpack/core/enrich/EnrichMetadata.java | 2 ++ .../xpack/enrich/EnrichStore.java | 35 ++++++++----------- .../esql/enrich/EnrichPolicyResolver.java | 4 +-- 3 files changed, 19 insertions(+), 22 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java index e433093cdfb9a..6bfa7e5bdb2bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichMetadata.java @@ -36,6 +36,8 @@ public final class EnrichMetadata extends AbstractNamedDiffable static final ParseField POLICIES = new ParseField("policies"); + public static final EnrichMetadata EMPTY = new EnrichMetadata(Collections.emptyMap()); + @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "enrich_metadata", diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichStore.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichStore.java index 82f9877826a5c..6e7f3846963ca 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichStore.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichStore.java @@ -81,6 +81,10 @@ public static void putPolicy( } updateClusterState(clusterService, handler, current -> { + final Map originalPolicies = getPolicies(current); + if (originalPolicies.containsKey(name)) { + throw new ResourceAlreadyExistsException("policy [{}] already exists", name); + } for (String indexExpression : policy.getIndices()) { // indices field in policy can contain wildcards, aliases etc. String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames( @@ -101,12 +105,9 @@ public static void putPolicy( } } - final Map policies = getPolicies(current); - EnrichPolicy existing = policies.putIfAbsent(name, policy); - if (existing != null) { - throw new ResourceAlreadyExistsException("policy [{}] already exists", name); - } - return policies; + final Map updatedPolicies = new HashMap<>(originalPolicies); + updatedPolicies.put(name, policy); + return updatedPolicies; }); } @@ -125,13 +126,14 @@ public static void deletePolicy(String name, ClusterService clusterService, Cons } updateClusterState(clusterService, handler, current -> { - final Map policies = getPolicies(current); - if (policies.containsKey(name) == false) { + final Map originalPolicies = getPolicies(current); + if (originalPolicies.containsKey(name) == false) { throw new ResourceNotFoundException("policy [{}] not found", name); } - policies.remove(name); - return policies; + final Map updatedPolicies = new HashMap<>(originalPolicies); + updatedPolicies.remove(name); + return updatedPolicies; }); } @@ -153,18 +155,11 @@ public static EnrichPolicy getPolicy(String name, ClusterState state) { * Gets all policies in the cluster. * * @param state the cluster state - * @return a Map of policyName, EnrichPolicy of the policies + * @return a read-only Map of policyName, EnrichPolicy of the policies */ public static Map getPolicies(ClusterState state) { - final Map policies; - final EnrichMetadata enrichMetadata = state.metadata().custom(EnrichMetadata.TYPE); - if (enrichMetadata != null) { - // Make a copy, because policies map inside custom metadata is read only: - policies = new HashMap<>(enrichMetadata.getPolicies()); - } else { - policies = new HashMap<>(); - } - return policies; + final EnrichMetadata metadata = state.metadata().custom(EnrichMetadata.TYPE, EnrichMetadata.EMPTY); + return metadata.getPolicies(); } private static void updateClusterState( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java index c8e993b7dbf0b..cd571ebb676ac 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java @@ -434,8 +434,8 @@ public void messageReceived(LookupRequest request, TransportChannel channel, Tas } protected Map availablePolicies() { - final EnrichMetadata metadata = clusterService.state().metadata().custom(EnrichMetadata.TYPE); - return metadata == null ? Map.of() : metadata.getPolicies(); + final EnrichMetadata metadata = clusterService.state().metadata().custom(EnrichMetadata.TYPE, EnrichMetadata.EMPTY); + return metadata.getPolicies(); } protected void getRemoteConnection(String cluster, ActionListener listener) { From 231a616b23b9504a172c08c5530e554ae3030c0d Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Wed, 29 Jan 2025 15:16:35 -0800 Subject: [PATCH 247/383] Enable preview features for entitlements projects in IDEA (#121235) --- build-tools-internal/src/main/groovy/elasticsearch.ide.gradle | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index 90a4f74b5e9f4..fd973c3b0502e 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -153,6 +153,10 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { doLast { enablePreview('.idea/modules/libs/native/elasticsearch.libs.native.main.iml', 'JDK_21_PREVIEW') enablePreview('.idea/modules/libs/native/elasticsearch.libs.native.test.iml', 'JDK_21_PREVIEW') + enablePreview('.idea/modules/libs/entitlement/elasticsearch.libs.entitlement.main.iml', 'JDK_21_PREVIEW') + enablePreview('.idea/modules/libs/entitlement/elasticsearch.libs.entitlement.test.iml', 'JDK_21_PREVIEW') + enablePreview('.idea/modules/libs/entitlement/bridge/elasticsearch.libs.entitlement.bridge.main.iml', 'JDK_21_PREVIEW') + enablePreview('.idea/modules/libs/entitlement/bridge/elasticsearch.libs.entitlement.bridge.test.iml', 'JDK_21_PREVIEW') } } From 55f2db853e81dbb694b899c90c51cb4fd1580632 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 10:20:17 +1100 Subject: [PATCH 248/383] Mute org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityEsqlIT testCrossClusterAsyncQueryStop #121249 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 440a67256ac45..53ec9de7fd46b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -337,6 +337,9 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cluster.stats/10_basic/Sparse vector stats} issue: https://github.com/elastic/elasticsearch/issues/121246 +- class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityEsqlIT + method: testCrossClusterAsyncQueryStop + issue: https://github.com/elastic/elasticsearch/issues/121249 # Examples: # From 748801eb57d20d46301333778dd777eb32dc9b3d Mon Sep 17 00:00:00 2001 From: Svilen Mihaylov Date: Wed, 29 Jan 2025 18:29:42 -0500 Subject: [PATCH 249/383] Make test setup more reliable #121110 Make test setup more reliable Resolves #119197 --- .../test/search.vectors/42_knn_search_int4_flat.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml index f9f8d56e1d9c9..73e54532ca152 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int4_flat.yml @@ -59,6 +59,17 @@ setup: - do: indices.refresh: {} + # For added test reliability, pending the resolution of https://github.com/elastic/elasticsearch/issues/109416. + - do: + indices.forcemerge: + max_num_segments: 1 + index: int4_flat + - do: + indices.refresh: {} + - do: + indices.forcemerge: + max_num_segments: 1 + index: int4_flat --- "kNN search only": - do: From a40370ab563e203ae97f410c42ca0179fb74cdb6 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Thu, 30 Jan 2025 00:50:17 +0100 Subject: [PATCH 250/383] [Inference API] Add node-local rate limiting for the inference API (#120400) * Add node-local rate limiting for the inference API * Fix integration tests by using new LocalStateInferencePlugin instead of InferencePlugin and adjust formatting. * Correct feature flag name * Add more docs, reorganize methods and make some methods package private * Clarify comment in BaseInferenceActionRequest * Fix wrong merge * Fix checkstyle * Fix checkstyle in tests * Check that the service we want to the read the rate limit config for actually exists * [CI] Auto commit changes from spotless * checkStyle apply * Update docs/changelog/120400.yaml * Move rate limit division logic to RequestExecutorService * Spotless apply * Remove debug sout * Adding a few suggestions * Adam feedback * Fix compilation error * [CI] Auto commit changes from spotless * Add BWC test case to InferenceActionRequestTests * Add BWC test case to UnifiedCompletionActionRequestTests * Update x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculator.java Co-authored-by: Adam Demjen * Update x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculator.java Co-authored-by: Adam Demjen * Remove addressed TODO * Spotless apply * Only use new rate limit specific feature flag * Use ThreadLocalRandom * [CI] Auto commit changes from spotless * Use Randomness.get() * [CI] Auto commit changes from spotless * Fix import * Use ConcurrentHashMap in InferenceServiceNodeLocalRateLimitCalculator * Check for null value in getRateLimitAssignment and remove AtomicReference * Remove newAssignments * Up the default rate limit for completions * Put deprecated feature flag back in * Check feature flag in BaseTransportInferenceAction * spotlessApply * Export inference.common * Do not export inference.common * Provide noop rate limit calculator, if feature flag is disabled * Add proper dependency injection --------- Co-authored-by: elasticsearchmachine Co-authored-by: Jonathan Buttner Co-authored-by: Adam Demjen --- docs/changelog/120400.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../action/BaseInferenceActionRequest.java | 31 +++ .../action/InferenceActionRequestTests.java | 23 ++ .../UnifiedCompletionActionRequestTests.java | 20 ++ .../xpack/inference/InferencePlugin.java | 37 +++- .../action/BaseTransportInferenceAction.java | 178 +++++++++++++-- .../action/TransportInferenceAction.java | 13 +- ...sportUnifiedCompletionInferenceAction.java | 13 +- ...nceAPIClusterAwareRateLimitingFeature.java | 28 +++ ...ceServiceNodeLocalRateLimitCalculator.java | 197 +++++++++++++++++ .../InferenceServiceRateLimitCalculator.java | 18 ++ .../NoopNodeLocalRateLimitCalculator.java | 27 +++ .../inference/common/RateLimitAssignment.java | 19 ++ .../xpack/inference/common/RateLimiter.java | 2 +- .../AmazonBedrockRequestSender.java | 5 + .../external/http/RequestExecutor.java | 2 + .../http/sender/HttpRequestSender.java | 4 + .../http/sender/RequestExecutorService.java | 56 ++++- .../external/http/sender/RequestManager.java | 2 + .../external/http/sender/Sender.java | 2 + .../inference/services/SenderService.java | 2 +- ...renceServiceCompletionServiceSettings.java | 2 +- .../BaseTransportInferenceActionTestCase.java | 22 +- .../action/TransportInferenceActionTests.java | 130 ++++++++++- ...TransportUnifiedCompletionActionTests.java | 13 +- ...viceNodeLocalRateLimitCalculatorTests.java | 205 ++++++++++++++++++ .../AmazonBedrockMockRequestSender.java | 5 + ...ServiceCompletionServiceSettingsTests.java | 2 +- 29 files changed, 1015 insertions(+), 49 deletions(-) create mode 100644 docs/changelog/120400.yaml create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceAPIClusterAwareRateLimitingFeature.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculator.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceRateLimitCalculator.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/NoopNodeLocalRateLimitCalculator.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimitAssignment.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java diff --git a/docs/changelog/120400.yaml b/docs/changelog/120400.yaml new file mode 100644 index 0000000000000..57d40730e0c8d --- /dev/null +++ b/docs/changelog/120400.yaml @@ -0,0 +1,5 @@ +pr: 120400 +summary: "[Inference API] Add node-local rate limiting for the inference API" +area: Machine Learning +type: feature +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 94e018535908c..8f747a59ae5e0 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -170,6 +170,7 @@ static TransportVersion def(int id) { public static final TransportVersion RESOURCE_DEPRECATION_CHECKS = def(8_836_00_0); public static final TransportVersion LINEAR_RETRIEVER_SUPPORT = def(8_837_00_0); public static final TransportVersion TIMEOUT_GET_PARAM_FOR_RESOLVE_CLUSTER = def(8_838_00_0); + public static final TransportVersion INFERENCE_REQUEST_ADAPTIVE_RATE_LIMITING = def(8_839_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/BaseInferenceActionRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/BaseInferenceActionRequest.java index e426574c52ce6..855b0bdebb417 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/BaseInferenceActionRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/BaseInferenceActionRequest.java @@ -7,20 +7,35 @@ package org.elasticsearch.xpack.core.inference.action; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.inference.TaskType; import java.io.IOException; +/** + * Base class for inference action requests. Tracks request routing state to prevent potential routing loops + * and supports both streaming and non-streaming inference operations. + */ public abstract class BaseInferenceActionRequest extends ActionRequest { + private boolean hasBeenRerouted; + public BaseInferenceActionRequest() { super(); } public BaseInferenceActionRequest(StreamInput in) throws IOException { super(in); + if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_REQUEST_ADAPTIVE_RATE_LIMITING)) { + this.hasBeenRerouted = in.readBoolean(); + } else { + // For backwards compatibility, we treat all inference requests coming from ES nodes having + // a version pre-node-local-rate-limiting as already rerouted to maintain pre-node-local-rate-limiting behavior. + this.hasBeenRerouted = true; + } } public abstract boolean isStreaming(); @@ -28,4 +43,20 @@ public BaseInferenceActionRequest(StreamInput in) throws IOException { public abstract TaskType getTaskType(); public abstract String getInferenceEntityId(); + + public void setHasBeenRerouted(boolean hasBeenRerouted) { + this.hasBeenRerouted = hasBeenRerouted; + } + + public boolean hasBeenRerouted() { + return hasBeenRerouted; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_REQUEST_ADAPTIVE_RATE_LIMITING)) { + out.writeBoolean(hasBeenRerouted); + } + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java index 01c0ff88be222..e9f4df7a523ad 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java @@ -386,6 +386,29 @@ public void testWriteTo_WhenVersionIsBeforeInputTypeAdded_ShouldSetInputTypeToUn assertThat(deserializedInstance.getInputType(), is(InputType.UNSPECIFIED)); } + public void testWriteTo_WhenVersionIsBeforeAdaptiveRateLimiting_ShouldSetHasBeenReroutedToTrue() throws IOException { + var instance = new InferenceAction.Request( + TaskType.TEXT_EMBEDDING, + "model", + null, + List.of("input"), + Map.of(), + InputType.UNSPECIFIED, + InferenceAction.Request.DEFAULT_TIMEOUT, + false + ); + + InferenceAction.Request deserializedInstance = copyWriteable( + instance, + getNamedWriteableRegistry(), + instanceReader(), + TransportVersions.V_8_13_0 + ); + + // Verify that hasBeenRerouted is true after deserializing a request coming from an older transport version + assertTrue(deserializedInstance.hasBeenRerouted()); + } + public void testGetInputTypeToWrite_ReturnsIngest_WhenInputTypeIsUnspecified_VersionBeforeUnspecifiedIntroduced() { assertThat(getInputTypeToWrite(InputType.UNSPECIFIED, TransportVersions.V_8_12_1), is(InputType.INGEST)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionActionRequestTests.java index f548bfa0709ed..ceb7c9853a0f4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionActionRequestTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.core.inference.action; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; @@ -65,6 +66,25 @@ public void testValidation_ReturnsNull_When_TaskType_IsAny() { assertNull(request.validate()); } + public void testWriteTo_WhenVersionIsBeforeAdaptiveRateLimiting_ShouldSetHasBeenReroutedToTrue() throws IOException { + var instance = new UnifiedCompletionAction.Request( + "model", + TaskType.ANY, + UnifiedCompletionRequest.of(List.of(UnifiedCompletionRequestTests.randomMessage())), + TimeValue.timeValueSeconds(10) + ); + + UnifiedCompletionAction.Request deserializedInstance = copyWriteable( + instance, + getNamedWriteableRegistry(), + instanceReader(), + TransportVersions.ELASTIC_INFERENCE_SERVICE_UNIFIED_CHAT_COMPLETIONS_INTEGRATION + ); + + // Verify that hasBeenRerouted is true after deserializing a request coming from an older transport version + assertTrue(deserializedInstance.hasBeenRerouted()); + } + @Override protected UnifiedCompletionAction.Request mutateInstanceForVersion(UnifiedCompletionAction.Request instance, TransportVersion version) { return instance; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index adea09adb8afc..6f302f944c005 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -72,6 +72,9 @@ import org.elasticsearch.xpack.inference.action.TransportUnifiedCompletionInferenceAction; import org.elasticsearch.xpack.inference.action.TransportUpdateInferenceModelAction; import org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilter; +import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; +import org.elasticsearch.xpack.inference.common.NoopNodeLocalRateLimitCalculator; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockRequestSender; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; @@ -133,6 +136,7 @@ import java.util.function.Supplier; import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.inference.common.InferenceAPIClusterAwareRateLimitingFeature.INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG; public class InferencePlugin extends Plugin implements @@ -229,6 +233,7 @@ public List getRestHandlers( @Override public Collection createComponents(PluginServices services) { + var components = new ArrayList<>(); var throttlerManager = new ThrottlerManager(settings, services.threadPool(), services.clusterService()); var truncator = new Truncator(settings, services.clusterService()); serviceComponents.set(new ServiceComponents(services.threadPool(), throttlerManager, settings, truncator)); @@ -297,20 +302,38 @@ public Collection createComponents(PluginServices services) { // This must be done after the HttpRequestSenderFactory is created so that the services can get the // reference correctly - var registry = new InferenceServiceRegistry(inferenceServices, factoryContext); - registry.init(services.client()); - for (var service : registry.getServices().values()) { + var serviceRegistry = new InferenceServiceRegistry(inferenceServices, factoryContext); + serviceRegistry.init(services.client()); + for (var service : serviceRegistry.getServices().values()) { service.defaultConfigIds().forEach(modelRegistry::addDefaultIds); } - inferenceServiceRegistry.set(registry); + inferenceServiceRegistry.set(serviceRegistry); - var actionFilter = new ShardBulkInferenceActionFilter(services.clusterService(), registry, modelRegistry); + var actionFilter = new ShardBulkInferenceActionFilter(services.clusterService(), serviceRegistry, modelRegistry); shardBulkInferenceActionFilter.set(actionFilter); var meterRegistry = services.telemetryProvider().getMeterRegistry(); - var stats = new PluginComponentBinding<>(InferenceStats.class, InferenceStats.create(meterRegistry)); + var inferenceStats = new PluginComponentBinding<>(InferenceStats.class, InferenceStats.create(meterRegistry)); + + components.add(serviceRegistry); + components.add(modelRegistry); + components.add(httpClientManager); + components.add(inferenceStats); + + // Only add InferenceServiceNodeLocalRateLimitCalculator (which is a ClusterStateListener) for cluster aware rate limiting, + // if the rate limiting feature flags are enabled, otherwise provide noop implementation + InferenceServiceRateLimitCalculator calculator; + if (INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG.isEnabled()) { + calculator = new InferenceServiceNodeLocalRateLimitCalculator(services.clusterService(), serviceRegistry); + } else { + calculator = new NoopNodeLocalRateLimitCalculator(); + } + + // Add binding for interface -> implementation + components.add(new PluginComponentBinding<>(InferenceServiceRateLimitCalculator.class, calculator)); + components.add(calculator); - return List.of(modelRegistry, registry, httpClientManager, stats); + return components; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java index b6c7d26b36f9a..08d74a36d6503 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java @@ -13,6 +13,10 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.ChunkedToXContent; @@ -27,24 +31,42 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.inference.action.BaseInferenceActionRequest; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; import org.elasticsearch.xpack.inference.common.DelegatingProcessor; +import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; import org.elasticsearch.xpack.inference.telemetry.InferenceTimer; +import java.io.IOException; +import java.util.Random; +import java.util.concurrent.Executor; import java.util.function.Supplier; import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.InferencePlugin.INFERENCE_API_FEATURE; +import static org.elasticsearch.xpack.inference.common.InferenceAPIClusterAwareRateLimitingFeature.INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG; import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.modelAttributes; import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.responseAttributes; +/** + * Base class for transport actions that handle inference requests. + * Works in conjunction with {@link InferenceServiceNodeLocalRateLimitCalculator} to + * route requests to specific nodes, iff they support "node-local" rate limiting, which is described in detail + * in {@link InferenceServiceNodeLocalRateLimitCalculator}. + * + * @param The specific type of inference request being handled + */ public abstract class BaseTransportInferenceAction extends HandledTransportAction< Request, InferenceAction.Response> { @@ -57,6 +79,11 @@ public abstract class BaseTransportInferenceAction requestReader + Writeable.Reader requestReader, + InferenceServiceRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + NodeClient nodeClient, + ThreadPool threadPool ) { super(inferenceActionName, transportService, actionFilters, requestReader, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.licenseState = licenseState; @@ -75,8 +105,24 @@ public BaseTransportInferenceAction( this.serviceRegistry = serviceRegistry; this.inferenceStats = inferenceStats; this.streamingTaskManager = streamingTaskManager; + this.inferenceServiceRateLimitCalculator = inferenceServiceNodeLocalRateLimitCalculator; + this.nodeClient = nodeClient; + this.threadPool = threadPool; + this.transportService = transportService; + this.random = Randomness.get(); } + protected abstract boolean isInvalidTaskTypeForInferenceEndpoint(Request request, UnparsedModel unparsedModel); + + protected abstract ElasticsearchStatusException createInvalidTaskTypeException(Request request, UnparsedModel unparsedModel); + + protected abstract void doInference( + Model model, + Request request, + InferenceService service, + ActionListener listener + ); + @Override protected void doExecute(Task task, Request request, ActionListener listener) { if (INFERENCE_API_FEATURE.check(licenseState) == false) { @@ -87,31 +133,32 @@ protected void doExecute(Task task, Request request, ActionListener { - var service = serviceRegistry.getService(unparsedModel.service()); + var serviceName = unparsedModel.service(); + try { - validationHelper(service::isEmpty, () -> unknownServiceException(unparsedModel.service(), request.getInferenceEntityId())); - validationHelper( - () -> request.getTaskType().isAnyOrSame(unparsedModel.taskType()) == false, - () -> requestModelTaskTypeMismatchException(request.getTaskType(), unparsedModel.taskType()) - ); - validationHelper( - () -> isInvalidTaskTypeForInferenceEndpoint(request, unparsedModel), - () -> createInvalidTaskTypeException(request, unparsedModel) - ); + validateRequest(request, unparsedModel); } catch (Exception e) { recordMetrics(unparsedModel, timer, e); listener.onFailure(e); return; } - var model = service.get() - .parsePersistedConfigWithSecrets( + var service = serviceRegistry.getService(serviceName).get(); + var routingDecision = determineRouting(serviceName, request, unparsedModel); + + if (routingDecision.currentNodeShouldHandleRequest()) { + var model = service.parsePersistedConfigWithSecrets( unparsedModel.inferenceEntityId(), unparsedModel.taskType(), unparsedModel.settings(), unparsedModel.secrets() ); - inferOnServiceWithMetrics(model, request, service.get(), timer, listener); + inferOnServiceWithMetrics(model, request, service, timer, listener); + } else { + // Reroute request + request.setHasBeenRerouted(true); + rerouteRequest(request, listener, routingDecision.targetNode); + } }, e -> { try { inferenceStats.inferenceDuration().record(timer.elapsedMillis(), responseAttributes(e)); @@ -124,15 +171,95 @@ protected void doExecute(Task task, Request request, ActionListener unknownServiceException(serviceName, request.getInferenceEntityId())); + validationHelper( + () -> request.getTaskType().isAnyOrSame(unparsedModel.taskType()) == false, + () -> requestModelTaskTypeMismatchException(requestTaskType, unparsedModel.taskType()) + ); + validationHelper( + () -> isInvalidTaskTypeForInferenceEndpoint(request, unparsedModel), + () -> createInvalidTaskTypeException(request, unparsedModel) + ); + } + + private NodeRoutingDecision determineRouting(String serviceName, Request request, UnparsedModel unparsedModel) { + if (INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG.isEnabled() == false) { + return NodeRoutingDecision.handleLocally(); + } + + var modelTaskType = unparsedModel.taskType(); + + // Rerouting not supported or request was already rerouted + if (inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceName, modelTaskType) == false + || request.hasBeenRerouted()) { + return NodeRoutingDecision.handleLocally(); + } + + var rateLimitAssignment = inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceName, modelTaskType); + + // No assignment yet + if (rateLimitAssignment == null) { + return NodeRoutingDecision.handleLocally(); + } + + var responsibleNodes = rateLimitAssignment.responsibleNodes(); + + // Empty assignment + if (responsibleNodes == null || responsibleNodes.isEmpty()) { + return NodeRoutingDecision.handleLocally(); + } + + var nodeToHandleRequest = responsibleNodes.get(random.nextInt(responsibleNodes.size())); + String localNodeId = nodeClient.getLocalNodeId(); + + // The drawn node is the current node + if (nodeToHandleRequest.getId().equals(localNodeId)) { + return NodeRoutingDecision.handleLocally(); + } + + // Reroute request + return NodeRoutingDecision.routeTo(nodeToHandleRequest); + } + private static void validationHelper(Supplier validationFailure, Supplier exceptionCreator) { if (validationFailure.get()) { throw exceptionCreator.get(); } } - protected abstract boolean isInvalidTaskTypeForInferenceEndpoint(Request request, UnparsedModel unparsedModel); - - protected abstract ElasticsearchStatusException createInvalidTaskTypeException(Request request, UnparsedModel unparsedModel); + private void rerouteRequest(Request request, ActionListener listener, DiscoveryNode nodeToHandleRequest) { + transportService.sendRequest( + nodeToHandleRequest, + InferenceAction.NAME, + request, + new TransportResponseHandler() { + @Override + public Executor executor() { + return threadPool.executor(InferencePlugin.UTILITY_THREAD_POOL_NAME); + } + + @Override + public void handleResponse(InferenceAction.Response response) { + listener.onResponse(response); + } + + @Override + public void handleException(TransportException exp) { + listener.onFailure(exp); + } + + @Override + public InferenceAction.Response read(StreamInput in) throws IOException { + return new InferenceAction.Response(in); + } + } + ); + } private void recordMetrics(UnparsedModel model, InferenceTimer timer, @Nullable Throwable t) { try { @@ -185,13 +312,6 @@ private void inferOnService(Model model, Request request, InferenceService servi } } - protected abstract void doInference( - Model model, - Request request, - InferenceService service, - ActionListener listener - ); - private ElasticsearchStatusException unsupportedStreamingTaskException(Request request, InferenceService service) { var supportedTasks = service.supportedStreamingTasks(); if (supportedTasks.isEmpty()) { @@ -259,4 +379,14 @@ public void onComplete() { super.onComplete(); } } + + private record NodeRoutingDecision(boolean currentNodeShouldHandleRequest, DiscoveryNode targetNode) { + static NodeRoutingDecision handleLocally() { + return new NodeRoutingDecision(true, null); + } + + static NodeRoutingDecision routeTo(DiscoveryNode node) { + return new NodeRoutingDecision(false, node); + } + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index 24ef0d7d610d0..e8f52e42f5708 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.InferenceServiceResults; @@ -17,9 +18,11 @@ import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; @@ -33,7 +36,10 @@ public TransportInferenceAction( ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, - StreamingTaskManager streamingTaskManager + StreamingTaskManager streamingTaskManager, + InferenceServiceRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + NodeClient nodeClient, + ThreadPool threadPool ) { super( InferenceAction.NAME, @@ -44,7 +50,10 @@ public TransportInferenceAction( serviceRegistry, inferenceStats, streamingTaskManager, - InferenceAction.Request::new + InferenceAction.Request::new, + inferenceServiceNodeLocalRateLimitCalculator, + nodeClient, + threadPool ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java index 9354ac2a83182..2e3090f2afd59 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.InferenceServiceResults; @@ -19,9 +20,11 @@ import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; @@ -35,7 +38,10 @@ public TransportUnifiedCompletionInferenceAction( ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, - StreamingTaskManager streamingTaskManager + StreamingTaskManager streamingTaskManager, + InferenceServiceRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + NodeClient nodeClient, + ThreadPool threadPool ) { super( UnifiedCompletionAction.NAME, @@ -46,7 +52,10 @@ public TransportUnifiedCompletionInferenceAction( serviceRegistry, inferenceStats, streamingTaskManager, - UnifiedCompletionAction.Request::new + UnifiedCompletionAction.Request::new, + inferenceServiceNodeLocalRateLimitCalculator, + nodeClient, + threadPool ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceAPIClusterAwareRateLimitingFeature.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceAPIClusterAwareRateLimitingFeature.java new file mode 100644 index 0000000000000..22de92526ba89 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceAPIClusterAwareRateLimitingFeature.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.common.util.FeatureFlag; +import org.elasticsearch.xpack.inference.InferencePlugin; + +/** + * Cluster aware rate limiting feature flag. When the feature is complete and fully rolled out, this flag will be removed. + * Enable feature via JVM option: `-Des.inference_cluster_aware_rate_limiting_feature_flag_enabled=true`. + * + * This controls, whether {@link InferenceServiceNodeLocalRateLimitCalculator} gets instantiated and + * added as injectable {@link InferencePlugin} component. + */ +public class InferenceAPIClusterAwareRateLimitingFeature { + + public static final FeatureFlag INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG = new FeatureFlag( + "inference_cluster_aware_rate_limiting" + ); + + private InferenceAPIClusterAwareRateLimitingFeature() {} + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculator.java new file mode 100644 index 0000000000000..4778e4cc6d30c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculator.java @@ -0,0 +1,197 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.xpack.core.inference.action.BaseInferenceActionRequest; +import org.elasticsearch.xpack.inference.action.BaseTransportInferenceAction; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.SenderService; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Note: {@link InferenceAPIClusterAwareRateLimitingFeature} needs to be enabled for this class to get + * instantiated inside {@link org.elasticsearch.xpack.inference.InferencePlugin} and be available via dependency injection. + * + * Calculates and manages node-local rate limits for inference services based on changes in the cluster topology. + * This calculator calculates a "node-local" rate-limit, which essentially divides the rate limit for a service/task type + * through the number of nodes, which got assigned to this service/task type pair. Without this calculator the rate limit stored + * in the inference endpoint configuration would get effectively multiplied by the number of nodes in a cluster (assuming a ~ uniform + * distribution of requests to the nodes in the cluster). + * + * The calculator works in conjunction with several other components: + * - {@link BaseTransportInferenceAction} - Uses the calculator to determine, whether to reroute a request or not + * - {@link BaseInferenceActionRequest} - Tracks, if the request (an instance of a subclass of {@link BaseInferenceActionRequest}) + * already got re-routed at least once + * - {@link HttpRequestSender} - Provides original rate limits that this calculator divides through the number of nodes + * responsible for a service/task type + */ +public class InferenceServiceNodeLocalRateLimitCalculator implements InferenceServiceRateLimitCalculator { + + public static final Integer DEFAULT_MAX_NODES_PER_GROUPING = 3; + + /** + * Configuration mapping services to their task type rate limiting settings. + * Each service can have multiple configs defining: + * - Which task types support request re-routing and "node-local" rate limit calculation + * - How many nodes should handle requests for each task type, based on cluster size (dynamically calculated or statically provided) + **/ + static final Map> SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS = Map.of( + ElasticInferenceService.NAME, + // TODO: should probably be a map/set + List.of(new NodeLocalRateLimitConfig(TaskType.SPARSE_EMBEDDING, (numNodesInCluster) -> DEFAULT_MAX_NODES_PER_GROUPING)) + ); + + record NodeLocalRateLimitConfig(TaskType taskType, MaxNodesPerGroupingStrategy maxNodesPerGroupingStrategy) {} + + @FunctionalInterface + private interface MaxNodesPerGroupingStrategy { + + Integer calculate(Integer numberOfNodesInCluster); + + } + + private static final Logger logger = LogManager.getLogger(InferenceServiceNodeLocalRateLimitCalculator.class); + + private final InferenceServiceRegistry serviceRegistry; + + private final ConcurrentHashMap> serviceAssignments; + + @Inject + public InferenceServiceNodeLocalRateLimitCalculator(ClusterService clusterService, InferenceServiceRegistry serviceRegistry) { + clusterService.addListener(this); + this.serviceRegistry = serviceRegistry; + this.serviceAssignments = new ConcurrentHashMap<>(); + } + + @Override + public void clusterChanged(ClusterChangedEvent event) { + boolean clusterTopologyChanged = event.nodesChanged(); + + // TODO: feature flag per node? We should not reroute to nodes not having eis and/or the inference plugin enabled + // Every node should land on the same grouping by calculation, so no need to put anything into the cluster state + if (clusterTopologyChanged) { + updateAssignments(event); + } + } + + public boolean isTaskTypeReroutingSupported(String serviceName, TaskType taskType) { + return SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.getOrDefault(serviceName, Collections.emptyList()) + .stream() + .anyMatch(rateLimitConfig -> taskType.equals(rateLimitConfig.taskType)); + } + + public RateLimitAssignment getRateLimitAssignment(String service, TaskType taskType) { + var assignmentsPerTaskType = serviceAssignments.get(service); + + if (assignmentsPerTaskType == null) { + return null; + } + + return assignmentsPerTaskType.get(taskType); + } + + /** + * Updates instances of {@link RateLimitAssignment} for each service and task type when the cluster topology changes. + * For each service and supported task type, calculates which nodes should handle requests + * and what their local rate limits should be per inference endpoint. + */ + private void updateAssignments(ClusterChangedEvent event) { + var newClusterState = event.state(); + var nodes = newClusterState.nodes().getAllNodes(); + + // Sort nodes by id (every node lands on the same result) + var sortedNodes = nodes.stream().sorted(Comparator.comparing(DiscoveryNode::getId)).toList(); + + // Sort inference services by name (every node lands on the same result) + var sortedServices = new ArrayList<>(serviceRegistry.getServices().values()); + sortedServices.sort(Comparator.comparing(InferenceService::name)); + + for (String serviceName : SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet()) { + Optional service = serviceRegistry.getService(serviceName); + + if (service.isPresent()) { + var inferenceService = service.get(); + + for (NodeLocalRateLimitConfig rateLimitConfig : SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.get(serviceName)) { + Map perTaskTypeAssignments = new HashMap<>(); + TaskType taskType = rateLimitConfig.taskType(); + + // Calculate node assignments needed for re-routing + var assignedNodes = calculateServiceAssignment(rateLimitConfig.maxNodesPerGroupingStrategy(), sortedNodes); + + // Update rate limits to be "node-local" + var numAssignedNodes = assignedNodes.size(); + updateRateLimits(inferenceService, numAssignedNodes); + + perTaskTypeAssignments.put(taskType, new RateLimitAssignment(assignedNodes)); + serviceAssignments.put(serviceName, perTaskTypeAssignments); + } + } else { + logger.warn( + "Service [{}] is configured for node-local rate limiting but was not found in the service registry", + serviceName + ); + } + } + } + + private List calculateServiceAssignment( + MaxNodesPerGroupingStrategy maxNodesPerGroupingStrategy, + List sortedNodes + ) { + int numberOfNodes = sortedNodes.size(); + int nodesPerGrouping = Math.min(numberOfNodes, maxNodesPerGroupingStrategy.calculate(numberOfNodes)); + + List assignedNodes = new ArrayList<>(); + + // TODO: here we can probably be smarter: if |num nodes in cluster| > |num nodes per task types| + // -> make sure a service provider is not assigned the same nodes for all task types; only relevant as soon as we support more task + // types + for (int j = 0; j < nodesPerGrouping; j++) { + var assignedNode = sortedNodes.get(j % numberOfNodes); + assignedNodes.add(assignedNode); + } + + return assignedNodes; + } + + private void updateRateLimits(InferenceService service, int responsibleNodes) { + if ((service instanceof SenderService) == false) { + return; + } + + SenderService senderService = (SenderService) service; + Sender sender = senderService.getSender(); + // TODO: this needs to take in service and task type as soon as multiple services/task types are supported + sender.updateRateLimitDivisor(responsibleNodes); + } + + InferenceServiceRegistry serviceRegistry() { + return serviceRegistry; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceRateLimitCalculator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceRateLimitCalculator.java new file mode 100644 index 0000000000000..e05637f629ec6 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/InferenceServiceRateLimitCalculator.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.inference.TaskType; + +public interface InferenceServiceRateLimitCalculator extends ClusterStateListener { + + boolean isTaskTypeReroutingSupported(String serviceName, TaskType taskType); + + RateLimitAssignment getRateLimitAssignment(String service, TaskType taskType); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/NoopNodeLocalRateLimitCalculator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/NoopNodeLocalRateLimitCalculator.java new file mode 100644 index 0000000000000..a07217d9e9af7 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/NoopNodeLocalRateLimitCalculator.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.inference.TaskType; + +public class NoopNodeLocalRateLimitCalculator implements InferenceServiceRateLimitCalculator { + + @Override + public void clusterChanged(ClusterChangedEvent event) { + // Do nothing + } + + public boolean isTaskTypeReroutingSupported(String serviceName, TaskType taskType) { + return false; + } + + public RateLimitAssignment getRateLimitAssignment(String service, TaskType taskType) { + return null; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimitAssignment.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimitAssignment.java new file mode 100644 index 0000000000000..de8d85c96271c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimitAssignment.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.cluster.node.DiscoveryNode; + +import java.util.List; + +/** + * Record for storing rate limit assignment information. + * + * @param responsibleNodes - nodes responsible for a certain service and task type + */ +public record RateLimitAssignment(List responsibleNodes) {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java index b74e473155aec..c7509b44ac1a8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java @@ -63,7 +63,7 @@ public RateLimiter(double accumulatedTokensLimit, double tokensPerTimeUnit, Time setRate(accumulatedTokensLimit, tokensPerTimeUnit, unit); } - public final synchronized void setRate(double newAccumulatedTokensLimit, double newTokensPerTimeUnit, TimeUnit newUnit) { + public synchronized void setRate(double newAccumulatedTokensLimit, double newTokensPerTimeUnit, TimeUnit newUnit) { Objects.requireNonNull(newUnit); if (newAccumulatedTokensLimit < 0) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java index ec4550b036d23..c8e544c26f293 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java @@ -88,6 +88,11 @@ protected AmazonBedrockRequestSender( ); } + @Override + public void updateRateLimitDivisor(int rateLimitDivisor) { + executorService.updateRateLimitDivisor(rateLimitDivisor); + } + @Override public void start() { if (started.compareAndSet(false, true)) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java index 63c042ce8a623..6c7c6e0d114c7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java @@ -21,6 +21,8 @@ public interface RequestExecutor { void shutdown(); + void updateRateLimitDivisor(int newDivisor); + boolean isShutdown(); boolean isTerminated(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java index 42671b8166537..689c9e2ec8fc1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java @@ -111,6 +111,10 @@ public void start() { } } + public void updateRateLimitDivisor(int rateLimitDivisor) { + service.updateRateLimitDivisor(rateLimitDivisor); + } + private void waitForStartToComplete() { try { if (startCompleted.await(START_COMPLETED_WAIT_TIME.getSeconds(), TimeUnit.SECONDS) == false) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java index ad1324d0a315f..5ec2acab70596 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java @@ -19,6 +19,7 @@ import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.common.AdjustableCapacityBlockingQueue; +import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; import org.elasticsearch.xpack.inference.common.RateLimiter; import org.elasticsearch.xpack.inference.external.http.RequestExecutor; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; @@ -36,6 +37,7 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; @@ -92,12 +94,22 @@ interface RateLimiterCreator { RateLimiter create(double accumulatedTokensLimit, double tokensPerTimeUnit, TimeUnit unit); } + // TODO: for later (after 8.18) + // TODO: pass in divisor to RateLimiterCreator + // TODO: another map for service/task-type-key -> set of RateLimitingEndpointHandler (used for updates; update divisor and then update + // all endpoint handlers) + // TODO: one map for service/task-type-key -> divisor (this gets also read when we create an inference endpoint) + // TODO: divisor value read/writes need to be synchronized in some way + // default for testing static final RateLimiterCreator DEFAULT_RATE_LIMIT_CREATOR = RateLimiter::new; private static final Logger logger = LogManager.getLogger(RequestExecutorService.class); private static final TimeValue RATE_LIMIT_GROUP_CLEANUP_INTERVAL = TimeValue.timeValueDays(1); private final ConcurrentMap rateLimitGroupings = new ConcurrentHashMap<>(); + // TODO: add one atomic integer (number of nodes); also explain the assumption and why this works + // TODO: document that this impacts chat completion (and increase the default rate limit) + private final AtomicInteger rateLimitDivisor = new AtomicInteger(1); private final ThreadPool threadPool; private final CountDownLatch startupLatch; private final CountDownLatch terminationLatch = new CountDownLatch(1); @@ -174,6 +186,19 @@ public int queueSize() { return rateLimitGroupings.values().stream().mapToInt(RateLimitingEndpointHandler::queueSize).sum(); } + @Override + public void updateRateLimitDivisor(int numResponsibleNodes) { + // in the unlikely case where we get an invalid value, we'll just ignore it + if (numResponsibleNodes <= 0) { + return; + } + + rateLimitDivisor.set(numResponsibleNodes); + for (var rateLimitingEndpointHandler : rateLimitGroupings.values()) { + rateLimitingEndpointHandler.updateTokensPerTimeUnit(rateLimitDivisor.get()); + } + } + /** * Begin servicing tasks. *

@@ -299,9 +324,12 @@ public void execute( clock, requestManager.rateLimitSettings(), this::isShutdown, - rateLimiterCreator + rateLimiterCreator, + rateLimitDivisor.get() ); + // TODO: add or create/compute if absent set for new map (service/task-type-key -> rate limit endpoint handler) + endpointHandler.init(); return endpointHandler; }); @@ -314,7 +342,7 @@ public void execute( * This allows many requests to be serialized if they are being sent too fast. If the rate limit has not been met they will be sent * as soon as a thread is available. */ - private static class RateLimitingEndpointHandler { + static class RateLimitingEndpointHandler { private static final TimeValue NO_TASKS_AVAILABLE = TimeValue.MAX_VALUE; private static final TimeValue EXECUTED_A_TASK = TimeValue.ZERO; @@ -329,6 +357,8 @@ private static class RateLimitingEndpointHandler { private final Clock clock; private final RateLimiter rateLimiter; private final RequestExecutorServiceSettings requestExecutorServiceSettings; + private final RateLimitSettings rateLimitSettings; + private final Long originalRequestsPerTimeUnit; RateLimitingEndpointHandler( String id, @@ -338,7 +368,8 @@ private static class RateLimitingEndpointHandler { Clock clock, RateLimitSettings rateLimitSettings, Supplier isShutdownMethod, - RateLimiterCreator rateLimiterCreator + RateLimiterCreator rateLimiterCreator, + Integer rateLimitDivisor ) { this.requestExecutorServiceSettings = Objects.requireNonNull(settings); this.id = Objects.requireNonNull(id); @@ -346,6 +377,8 @@ private static class RateLimitingEndpointHandler { this.requestSender = Objects.requireNonNull(requestSender); this.clock = Objects.requireNonNull(clock); this.isShutdownMethod = Objects.requireNonNull(isShutdownMethod); + this.rateLimitSettings = Objects.requireNonNull(rateLimitSettings); + this.originalRequestsPerTimeUnit = rateLimitSettings.requestsPerTimeUnit(); Objects.requireNonNull(rateLimitSettings); Objects.requireNonNull(rateLimiterCreator); @@ -355,12 +388,29 @@ private static class RateLimitingEndpointHandler { rateLimitSettings.timeUnit() ); + this.updateTokensPerTimeUnit(rateLimitDivisor); } public void init() { requestExecutorServiceSettings.registerQueueCapacityCallback(id, this::onCapacityChange); } + /** + * This method is solely called by {@link InferenceServiceNodeLocalRateLimitCalculator} to update + * rate limits, so they're "node-local". + * The general idea is described in {@link InferenceServiceNodeLocalRateLimitCalculator} in more detail. + * + * @param divisor - divisor to divide the initial requests per time unit by + */ + public synchronized void updateTokensPerTimeUnit(Integer divisor) { + double updatedTokensPerTimeUnit = (double) originalRequestsPerTimeUnit / divisor; + rateLimiter.setRate(ACCUMULATED_TOKENS_LIMIT, updatedTokensPerTimeUnit, rateLimitSettings.timeUnit()); + } + + public String id() { + return id; + } + private void onCapacityChange(int capacity) { logger.debug(() -> Strings.format("Executor service grouping [%s] setting queue capacity to [%s]", id, capacity)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java index 853d6fdcb2473..aa606e8c7cc5c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java @@ -30,4 +30,6 @@ void execute( // executePreparedRequest() which will execute all prepared requests aka sends the batch String inferenceEntityId(); + + // TODO: add service() and taskType() } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java index 3975a554586b7..fed92263f9999 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java @@ -27,6 +27,8 @@ void send( ActionListener listener ); + void updateRateLimitDivisor(int rateLimitDivisor); + void sendWithoutQueuing( Logger logger, Request request, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java index ac6e57d31b740..9695dbf0d210c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java @@ -47,7 +47,7 @@ public SenderService(HttpRequestSender.Factory factory, ServiceComponents servic this.serviceComponents = Objects.requireNonNull(serviceComponents); } - protected Sender getSender() { + public Sender getSender() { return sender; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettings.java index 3c8182a7d41a4..293ca1bcb41c0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettings.java @@ -36,7 +36,7 @@ public class ElasticInferenceServiceCompletionServiceSettings extends FilteredXC public static final String NAME = "elastic_inference_service_completion_service_settings"; // TODO what value do we put here? - private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(240L); + private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(720L); public static ElasticInferenceServiceCompletionServiceSettings fromMap(Map map, ConfigurationParseContext context) { ValidationException validationException = new ValidationException(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java index c0fc818e421d0..4fa0a1ec49c74 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; @@ -21,11 +22,13 @@ import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.BaseInferenceActionRequest; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; +import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; import org.junit.Before; @@ -61,6 +64,9 @@ public abstract class BaseTransportInferenceActionTestCase createAction( ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, - StreamingTaskManager streamingTaskManager + StreamingTaskManager streamingTaskManager, + InferenceServiceNodeLocalRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + NodeClient nodeClient, + ThreadPool threadPool ); protected abstract Request createRequest(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java index c303e029cb415..e71d15dbe0420 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java @@ -8,16 +8,32 @@ package org.elasticsearch.xpack.inference.action; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.TaskType; import org.elasticsearch.license.MockLicenseState; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; +import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; +import org.elasticsearch.xpack.inference.common.RateLimitAssignment; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; +import java.util.List; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class TransportInferenceActionTests extends BaseTransportInferenceActionTestCase { @@ -33,7 +49,10 @@ protected BaseTransportInferenceAction createAction( ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, - StreamingTaskManager streamingTaskManager + StreamingTaskManager streamingTaskManager, + InferenceServiceNodeLocalRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + NodeClient nodeClient, + ThreadPool threadPool ) { return new TransportInferenceAction( transportService, @@ -42,7 +61,10 @@ protected BaseTransportInferenceAction createAction( modelRegistry, serviceRegistry, inferenceStats, - streamingTaskManager + streamingTaskManager, + inferenceServiceNodeLocalRateLimitCalculator, + nodeClient, + threadPool ); } @@ -50,4 +72,108 @@ protected BaseTransportInferenceAction createAction( protected InferenceAction.Request createRequest() { return mock(); } + + public void testNoRerouting_WhenTaskTypeNotSupported() { + TaskType unsupportedTaskType = TaskType.COMPLETION; + mockService(listener -> listener.onResponse(mock())); + + when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, unsupportedTaskType)).thenReturn(false); + + var listener = doExecute(unsupportedTaskType); + + verify(listener).onResponse(any()); + // Verify request was handled locally (not rerouted using TransportService) + verify(transportService, never()).sendRequest(any(), any(), any(), any()); + } + + public void testNoRerouting_WhenNoGroupingCalculatedYet() { + mockService(listener -> listener.onResponse(mock())); + + when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(null); + + var listener = doExecute(taskType); + + verify(listener).onResponse(any()); + // Verify request was handled locally (not rerouted using TransportService) + verify(transportService, never()).sendRequest(any(), any(), any(), any()); + } + + public void testNoRerouting_WhenEmptyNodeList() { + mockService(listener -> listener.onResponse(mock())); + + when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn( + new RateLimitAssignment(List.of()) + ); + + var listener = doExecute(taskType); + + verify(listener).onResponse(any()); + // Verify request was handled locally (not rerouted using TransportService) + verify(transportService, never()).sendRequest(any(), any(), any(), any()); + } + + public void testRerouting_ToOtherNode() { + DiscoveryNode otherNode = mock(DiscoveryNode.class); + when(otherNode.getId()).thenReturn("other-node"); + + // The local node is different to the "other-node" responsible for serviceId + when(nodeClient.getLocalNodeId()).thenReturn("local-node"); + when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + // Requests for serviceId are always routed to "other-node" + var assignment = new RateLimitAssignment(List.of(otherNode)); + when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); + + mockService(listener -> listener.onResponse(mock())); + var listener = doExecute(taskType); + + // Verify request was rerouted + verify(transportService).sendRequest(same(otherNode), eq(InferenceAction.NAME), any(), any()); + // Verify local execution didn't happen + verify(listener, never()).onResponse(any()); + } + + public void testRerouting_ToLocalNode_WithoutGoingThroughTransportLayerAgain() { + DiscoveryNode localNode = mock(DiscoveryNode.class); + String localNodeId = "local-node"; + when(localNode.getId()).thenReturn(localNodeId); + + // The local node is the only one responsible for serviceId + when(nodeClient.getLocalNodeId()).thenReturn(localNodeId); + when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + var assignment = new RateLimitAssignment(List.of(localNode)); + when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); + + mockService(listener -> listener.onResponse(mock())); + var listener = doExecute(taskType); + + verify(listener).onResponse(any()); + // Verify request was handled locally (not rerouted using TransportService) + verify(transportService, never()).sendRequest(any(), any(), any(), any()); + } + + public void testRerouting_HandlesTransportException_FromOtherNode() { + DiscoveryNode otherNode = mock(DiscoveryNode.class); + when(otherNode.getId()).thenReturn("other-node"); + + when(nodeClient.getLocalNodeId()).thenReturn("local-node"); + when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + var assignment = new RateLimitAssignment(List.of(otherNode)); + when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); + + mockService(listener -> listener.onResponse(mock())); + + TransportException expectedException = new TransportException("Failed to route"); + doAnswer(invocation -> { + TransportResponseHandler handler = invocation.getArgument(3); + handler.handleException(expectedException); + return null; + }).when(transportService).sendRequest(any(), any(), any(), any()); + + var listener = doExecute(taskType); + + // Verify exception was propagated from "other-node" to "local-node" + verify(listener).onFailure(same(expectedException)); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java index e8e7d9ac21bed..4ed69e5abe537 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java @@ -9,13 +9,16 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.TaskType; import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; +import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; @@ -45,7 +48,10 @@ protected BaseTransportInferenceAction createAc ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, - StreamingTaskManager streamingTaskManager + StreamingTaskManager streamingTaskManager, + InferenceServiceNodeLocalRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + NodeClient nodeClient, + ThreadPool threadPool ) { return new TransportUnifiedCompletionInferenceAction( transportService, @@ -54,7 +60,10 @@ protected BaseTransportInferenceAction createAc modelRegistry, serviceRegistry, inferenceStats, - streamingTaskManager + streamingTaskManager, + inferenceServiceNodeLocalRateLimitCalculator, + nodeClient, + threadPool ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java new file mode 100644 index 0000000000000..f6bc7e5981411 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java @@ -0,0 +1,205 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; +import org.elasticsearch.xpack.inference.services.SenderService; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Set; + +import static org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator.DEFAULT_MAX_NODES_PER_GROUPING; +import static org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator.SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS; +import static org.hamcrest.Matchers.equalTo; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 0) +public class InferenceServiceNodeLocalRateLimitCalculatorTests extends ESIntegTestCase { + + public void setUp() throws Exception { + super.setUp(); + } + + public void testInitialClusterGrouping_Correct() { + // Start with 2-5 nodes + var numNodes = randomIntBetween(2, 5); + var nodeNames = internalCluster().startNodes(numNodes); + ensureStableCluster(numNodes); + + RateLimitAssignment firstAssignment = null; + + for (String nodeName : nodeNames) { + var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeName); + + // Check first node's assignments + if (firstAssignment == null) { + // Get assignment for a specific service (e.g., EIS) + firstAssignment = calculator.getRateLimitAssignment(ElasticInferenceService.NAME, TaskType.SPARSE_EMBEDDING); + + assertNotNull(firstAssignment); + // Verify there are assignments for this service + assertFalse(firstAssignment.responsibleNodes().isEmpty()); + } else { + // Verify other nodes see the same assignment + var currentAssignment = calculator.getRateLimitAssignment(ElasticInferenceService.NAME, TaskType.SPARSE_EMBEDDING); + assertEquals(firstAssignment, currentAssignment); + } + } + } + + public void testNumberOfNodesPerGroup_Decreases_When_NodeLeavesCluster() throws IOException { + // Start with 3-5 nodes + var numNodes = randomIntBetween(3, 5); + var nodeNames = internalCluster().startNodes(numNodes); + ensureStableCluster(numNodes); + + var nodeLeftInCluster = nodeNames.getFirst(); + var currentNumberOfNodes = numNodes; + + // Stop all nodes except one + for (String nodeName : nodeNames) { + if (nodeName.equals(nodeLeftInCluster)) { + continue; + } + internalCluster().stopNode(nodeName); + currentNumberOfNodes--; + ensureStableCluster(currentNumberOfNodes); + } + + var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeLeftInCluster); + + Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); + + // Check assignments for each supported service + for (var service : supportedServices) { + var assignment = calculator.getRateLimitAssignment(service, TaskType.SPARSE_EMBEDDING); + + assertNotNull(assignment); + // Should have exactly one responsible node + assertEquals(1, assignment.responsibleNodes().size()); + // That node should be our remaining node + assertEquals(nodeLeftInCluster, assignment.responsibleNodes().get(0).getName()); + } + } + + public void testGrouping_RespectsMaxNodesPerGroupingLimit() { + // Start with more nodes possible per grouping + var numNodes = DEFAULT_MAX_NODES_PER_GROUPING + randomIntBetween(1, 3); + var nodeNames = internalCluster().startNodes(numNodes); + ensureStableCluster(numNodes); + + var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + + Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); + + for (var service : supportedServices) { + var assignment = calculator.getRateLimitAssignment(service, TaskType.SPARSE_EMBEDDING); + + assertNotNull(assignment); + assertThat(DEFAULT_MAX_NODES_PER_GROUPING, equalTo(assignment.responsibleNodes().size())); + } + } + + public void testInitialRateLimitsCalculation_Correct() throws IOException { + // Start with max nodes per grouping (=3) + int numNodes = DEFAULT_MAX_NODES_PER_GROUPING; + var nodeNames = internalCluster().startNodes(numNodes); + ensureStableCluster(numNodes); + + var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + + Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); + + for (var serviceName : supportedServices) { + try (var serviceRegistry = calculator.serviceRegistry()) { + var serviceOptional = serviceRegistry.getService(serviceName); + assertTrue(serviceOptional.isPresent()); + var service = serviceOptional.get(); + + if ((service instanceof SenderService senderService)) { + var sender = senderService.getSender(); + if (sender instanceof HttpRequestSender httpSender) { + var assignment = calculator.getRateLimitAssignment(service.name(), TaskType.SPARSE_EMBEDDING); + + assertNotNull(assignment); + assertThat(DEFAULT_MAX_NODES_PER_GROUPING, equalTo(assignment.responsibleNodes().size())); + } + } + } + + } + } + + public void testRateLimits_Decrease_OnNodeJoin() { + // Start with 2 nodes + var initialNodes = 2; + var nodeNames = internalCluster().startNodes(initialNodes); + ensureStableCluster(initialNodes); + + var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + + for (var serviceName : SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet()) { + var configs = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.get(serviceName); + for (var config : configs) { + // Get initial assignments and rate limits + var initialAssignment = calculator.getRateLimitAssignment(serviceName, config.taskType()); + assertEquals(2, initialAssignment.responsibleNodes().size()); + + // Add a new node + internalCluster().startNode(); + ensureStableCluster(initialNodes + 1); + + // Get updated assignments + var updatedAssignment = calculator.getRateLimitAssignment(serviceName, config.taskType()); + + // Verify number of responsible nodes increased + assertEquals(3, updatedAssignment.responsibleNodes().size()); + } + } + } + + public void testRateLimits_Increase_OnNodeLeave() throws IOException { + // Start with max nodes per grouping (=3) + int numNodes = DEFAULT_MAX_NODES_PER_GROUPING; + var nodeNames = internalCluster().startNodes(numNodes); + ensureStableCluster(numNodes); + + var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + + for (var serviceName : SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet()) { + var configs = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.get(serviceName); + for (var config : configs) { + // Get initial assignments and rate limits + var initialAssignment = calculator.getRateLimitAssignment(serviceName, config.taskType()); + assertThat(DEFAULT_MAX_NODES_PER_GROUPING, equalTo(initialAssignment.responsibleNodes().size())); + + // Remove a node + var nodeToRemove = nodeNames.get(numNodes - 1); + internalCluster().stopNode(nodeToRemove); + ensureStableCluster(numNodes - 1); + + // Get updated assignments + var updatedAssignment = calculator.getRateLimitAssignment(serviceName, config.taskType()); + + // Verify number of responsible nodes decreased + assertThat(2, equalTo(updatedAssignment.responsibleNodes().size())); + } + } + } + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(LocalStateInferencePlugin.class); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java index ed5aa5ba7bea9..57b9b03b9781b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java @@ -63,6 +63,11 @@ public void start() { // do nothing } + @Override + public void updateRateLimitDivisor(int rateLimitDivisor) { + // do nothing + } + @Override public void send( RequestManager requestCreator, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettingsTests.java index 0f6386f670338..c530ff5c03482 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettingsTests.java @@ -53,7 +53,7 @@ public void testFromMap() { ConfigurationParseContext.REQUEST ); - assertThat(serviceSettings, is(new ElasticInferenceServiceCompletionServiceSettings(modelId, new RateLimitSettings(240L)))); + assertThat(serviceSettings, is(new ElasticInferenceServiceCompletionServiceSettings(modelId, new RateLimitSettings(720L)))); } public void testFromMap_MissingModelId_ThrowsException() { From 716614d08c2c7563ea23b2c15884ff979f7df8b2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 11:00:05 +1100 Subject: [PATCH 251/383] Add 8.18 to branches.json --- branches.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/branches.json b/branches.json index 95fbdb1efd655..01e79708d4b60 100644 --- a/branches.json +++ b/branches.json @@ -7,6 +7,9 @@ { "branch": "8.16" }, + { + "branch": "8.18" + }, { "branch": "8.17" }, From eb6f95d6bb8b607faf0ea1ab794142b16e6f304d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 11:00:07 +1100 Subject: [PATCH 252/383] [renovate] Update branches config --- renovate.json | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/renovate.json b/renovate.json index 71c6301f8e0c2..1d05594395159 100644 --- a/renovate.json +++ b/renovate.json @@ -7,8 +7,20 @@ "schedule": [ "after 1pm on tuesday" ], - "labels": [">non-issue", ":Delivery/Packaging", "Team:Delivery", "auto-merge-without-approval"], - "baseBranches": ["main", "8.x", "8.17", "8.16"], + "labels": [ + ">non-issue", + ":Delivery/Packaging", + "Team:Delivery", + "auto-merge-without-approval" + ], + "baseBranches": [ + "main", + "8.16", + "8.18", + "8.17", + "8.x", + "7.17" + ], "packageRules": [ { "groupName": "wolfi (versioned)", From 74e8ae5448f39e6024842285626a0e1049e6d0de Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Wed, 29 Jan 2025 16:15:46 -0800 Subject: [PATCH 253/383] Add single flag entitlement validation (#121234) This adds basic flag entitlement validation when creating PolicyManager. If a module has the same flag entitlement as part of it's policy multiple times we will throw an IllegalArgumentException. With this validation we can safely assume FileEntitlement is the only one we currently have that allows multiple entitlements in a policy. --- .../runtime/policy/PolicyManager.java | 32 +++++++++ .../runtime/policy/PolicyManagerTests.java | 72 +++++++++++++++++++ 2 files changed, 104 insertions(+) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index 04942e15d10a4..73e9d0c446bef 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -20,6 +20,7 @@ import java.lang.module.ModuleFinder; import java.lang.module.ModuleReference; import java.nio.file.Path; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; @@ -120,12 +121,43 @@ public PolicyManager( this.pluginResolver = pluginResolver; this.agentsPackageName = agentsPackageName; this.entitlementsModule = entitlementsModule; + + for (var e : serverEntitlements.entrySet()) { + validateEntitlementsPerModule("server", e.getKey(), e.getValue()); + } + validateEntitlementsPerModule("agent", "unnamed", agentEntitlements); + for (var p : pluginsEntitlements.entrySet()) { + for (var m : p.getValue().entrySet()) { + validateEntitlementsPerModule(p.getKey(), m.getKey(), m.getValue()); + } + } } private static Map> buildScopeEntitlementsMap(Policy policy) { return policy.scopes().stream().collect(toUnmodifiableMap(Scope::moduleName, Scope::entitlements)); } + private static void validateEntitlementsPerModule(String sourceName, String moduleName, List entitlements) { + Set> flagEntitlements = new HashSet<>(); + for (var e : entitlements) { + if (e instanceof FileEntitlement) { + continue; + } + if (flagEntitlements.contains(e.getClass())) { + throw new IllegalArgumentException( + "[" + + sourceName + + "] using module [" + + moduleName + + "] found duplicate flag entitlements [" + + e.getClass().getName() + + "]" + ); + } + flagEntitlements.add(e.getClass()); + } + } + public void checkStartProcess(Class callerClass) { neverEntitled(callerClass, "start process"); } diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index 20035d0bb258b..6854ef54ca5f0 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -286,6 +286,78 @@ public void testAgentsEntitlements() throws IOException, ClassNotFoundException } } + public void testDuplicateFlagEntitlements() { + IllegalArgumentException iae = expectThrows( + IllegalArgumentException.class, + () -> new PolicyManager( + new Policy( + "server", + List.of(new Scope("test", List.of(new CreateClassLoaderEntitlement(), new CreateClassLoaderEntitlement()))) + ), + List.of(), + Map.of(), + c -> "test", + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ) + ); + assertEquals( + "[server] using module [test] found duplicate flag entitlements " + + "[org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement]", + iae.getMessage() + ); + + iae = expectThrows( + IllegalArgumentException.class, + () -> new PolicyManager( + createEmptyTestServerPolicy(), + List.of(new CreateClassLoaderEntitlement(), new CreateClassLoaderEntitlement()), + Map.of(), + c -> "test", + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ) + ); + assertEquals( + "[agent] using module [unnamed] found duplicate flag entitlements " + + "[org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement]", + iae.getMessage() + ); + + iae = expectThrows( + IllegalArgumentException.class, + () -> new PolicyManager( + createEmptyTestServerPolicy(), + List.of(), + Map.of( + "plugin1", + new Policy( + "test", + List.of( + new Scope( + "test", + List.of( + new FileEntitlement("/test/path", FileEntitlement.Mode.READ), + new CreateClassLoaderEntitlement(), + new FileEntitlement("/test/test", FileEntitlement.Mode.READ), + new CreateClassLoaderEntitlement() + ) + ) + ) + ) + ), + c -> "plugin1", + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ) + ); + assertEquals( + "[plugin1] using module [test] found duplicate flag entitlements " + + "[org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement]", + iae.getMessage() + ); + } + private static Class makeClassInItsOwnModule() throws IOException, ClassNotFoundException { final Path home = createTempDir(); Path jar = createMockPluginJar(home); From 10aec784fec2c6ca35667586484eaae5840f3eea Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Wed, 29 Jan 2025 16:19:55 -0800 Subject: [PATCH 254/383] Ensure that feature flag is enabled in new KeywordFieldMapperTests (#121248) `DOC_VALUES_SPARSE_INDEX` is needed for tests to work. --- muted-tests.yml | 6 ------ .../index/mapper/KeywordFieldMapperTests.java | 16 ++++++++++++++++ 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 53ec9de7fd46b..10197fdd19b2f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -319,12 +319,6 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cat.aliases/10_basic/Simple alias} issue: https://github.com/elastic/elasticsearch/issues/121186 -- class: org.elasticsearch.index.mapper.KeywordFieldMapperTests - method: testFieldTypeWithSkipDocValues_LogsDbMode - issue: https://github.com/elastic/elasticsearch/issues/121232 -- class: org.elasticsearch.index.mapper.KeywordFieldMapperTests - method: testFieldTypeDefault_ConfiguredDocValues - issue: https://github.com/elastic/elasticsearch/issues/121233 - class: org.elasticsearch.xpack.ml.integration.ClassificationIT method: testWithDatastreams issue: https://github.com/elastic/elasticsearch/issues/121236 diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index d78f2110daa67..924c32f527f7a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -773,6 +773,8 @@ public void testDocValuesLoadedFromStoredSynthetic() throws IOException { } public void testFieldTypeWithSkipDocValues_LogsDbMode() throws IOException { + assumeTrue("Needs feature flag to be enabled", FieldMapper.DOC_VALUES_SPARSE_INDEX.isEnabled()); + final MapperService mapperService = createMapperService( Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) @@ -792,6 +794,8 @@ public void testFieldTypeWithSkipDocValues_LogsDbMode() throws IOException { } public void testFieldTypeDefault_StandardMode() throws IOException { + assumeTrue("Needs feature flag to be enabled", FieldMapper.DOC_VALUES_SPARSE_INDEX.isEnabled()); + final MapperService mapperService = createMapperService( Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.name()) @@ -811,6 +815,8 @@ public void testFieldTypeDefault_StandardMode() throws IOException { } public void testFieldTypeDefault_NonMatchingFieldName() throws IOException { + assumeTrue("Needs feature flag to be enabled", FieldMapper.DOC_VALUES_SPARSE_INDEX.isEnabled()); + final MapperService mapperService = createMapperService( Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) @@ -830,6 +836,8 @@ public void testFieldTypeDefault_NonMatchingFieldName() throws IOException { } public void testFieldTypeDefault_ConfiguredIndexed() throws IOException { + assumeTrue("Needs feature flag to be enabled", FieldMapper.DOC_VALUES_SPARSE_INDEX.isEnabled()); + final MapperService mapperService = createMapperService( Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) @@ -850,6 +858,8 @@ public void testFieldTypeDefault_ConfiguredIndexed() throws IOException { } public void testFieldTypeDefault_ConfiguredDocValues() throws IOException { + assumeTrue("Needs feature flag to be enabled", FieldMapper.DOC_VALUES_SPARSE_INDEX.isEnabled()); + final MapperService mapperService = createMapperService( Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) @@ -870,6 +880,8 @@ public void testFieldTypeDefault_ConfiguredDocValues() throws IOException { } public void testFieldTypeDefault_LogsDbMode_NonSortField() throws IOException { + assumeTrue("Needs feature flag to be enabled", FieldMapper.DOC_VALUES_SPARSE_INDEX.isEnabled()); + final MapperService mapperService = createMapperService( Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()).build(), mapping(b -> { @@ -886,6 +898,8 @@ public void testFieldTypeDefault_LogsDbMode_NonSortField() throws IOException { } public void testFieldTypeWithSkipDocValues_IndexedFalseDocValuesTrue() throws IOException { + assumeTrue("Needs feature flag to be enabled", FieldMapper.DOC_VALUES_SPARSE_INDEX.isEnabled()); + final MapperService mapperService = createMapperService( Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) @@ -907,6 +921,8 @@ public void testFieldTypeWithSkipDocValues_IndexedFalseDocValuesTrue() throws IO } public void testFieldTypeDefault_IndexedFalseDocValuesFalse() throws IOException { + assumeTrue("Needs feature flag to be enabled", FieldMapper.DOC_VALUES_SPARSE_INDEX.isEnabled()); + final MapperService mapperService = createMapperService( Settings.builder() .put(IndexSettings.MODE.getKey(), IndexMode.LOGSDB.name()) From a92e724b202b87bb40585e6056e67394df36b307 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 11:30:10 +1100 Subject: [PATCH 255/383] Add 9.0 to branches.json --- branches.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/branches.json b/branches.json index 01e79708d4b60..81d5a46991445 100644 --- a/branches.json +++ b/branches.json @@ -7,6 +7,9 @@ { "branch": "8.16" }, + { + "branch": "9.0" + }, { "branch": "8.18" }, From d67167def775ced047d832bd72ffcd4e1d946b98 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 11:30:11 +1100 Subject: [PATCH 256/383] [renovate] Update branches config --- renovate.json | 1 + 1 file changed, 1 insertion(+) diff --git a/renovate.json b/renovate.json index 1d05594395159..29406c488f94e 100644 --- a/renovate.json +++ b/renovate.json @@ -16,6 +16,7 @@ "baseBranches": [ "main", "8.16", + "9.0", "8.18", "8.17", "8.x", From 66aac2d03eb69d90ad0b1a0448c72c42f94da166 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 11:33:27 +1100 Subject: [PATCH 257/383] Mute org.elasticsearch.xpack.test.rest.XPackRestIT org.elasticsearch.xpack.test.rest.XPackRestIT #120816 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 10197fdd19b2f..26d3ecff7e700 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -334,6 +334,8 @@ tests: - class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityEsqlIT method: testCrossClusterAsyncQueryStop issue: https://github.com/elastic/elasticsearch/issues/121249 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + issue: https://github.com/elastic/elasticsearch/issues/120816 # Examples: # From 7fd7d6467aa286146e5f93376d4b67f143893af9 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 11:59:25 +1100 Subject: [PATCH 258/383] Mute org.elasticsearch.upgrades.VectorSearchIT testBBQVectorSearch {upgradedNodes=0} #121253 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 26d3ecff7e700..dade2bd712c05 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -336,6 +336,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/121249 - class: org.elasticsearch.xpack.test.rest.XPackRestIT issue: https://github.com/elastic/elasticsearch/issues/120816 +- class: org.elasticsearch.upgrades.VectorSearchIT + method: testBBQVectorSearch {upgradedNodes=0} + issue: https://github.com/elastic/elasticsearch/issues/121253 # Examples: # From fc500d147cdbc6bf7fd9522af95290e5250fa9da Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 18:10:11 +1100 Subject: [PATCH 259/383] Mute org.elasticsearch.lucene.FullClusterRestartLuceneIndexCompatibilityIT org.elasticsearch.lucene.FullClusterRestartLuceneIndexCompatibilityIT #121257 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index dade2bd712c05..1b492c4efaf3d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -339,6 +339,8 @@ tests: - class: org.elasticsearch.upgrades.VectorSearchIT method: testBBQVectorSearch {upgradedNodes=0} issue: https://github.com/elastic/elasticsearch/issues/121253 +- class: org.elasticsearch.lucene.FullClusterRestartLuceneIndexCompatibilityIT + issue: https://github.com/elastic/elasticsearch/issues/121257 # Examples: # From d537a1f32312abc7526b509416d2c511e31c5136 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 30 Jan 2025 10:03:49 +0100 Subject: [PATCH 260/383] Remove redundant LatchedActionListener from ESIntegTestCase (#121244) This is effectively the same as the other class. The logging is irrelevant and the dead `addError` is too => lets remove this. --- .../elasticsearch/test/ESIntegTestCase.java | 36 ++++--------------- 1 file changed, 6 insertions(+), 30 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index caa66e928827a..6dbcc798c3ae6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainRequest; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainResponse; import org.elasticsearch.action.admin.cluster.allocation.TransportClusterAllocationExplainAction; @@ -1753,7 +1754,8 @@ public void indexRandom(boolean forceRefresh, boolean dummyDocuments, boolean ma logger.info("Index [{}] docs async: [{}] bulk: [{}]", builders.size(), true, false); for (IndexRequestBuilder indexRequestBuilder : builders) { indexRequestBuilder.execute( - new LatchedActionListener(newLatch(inFlightAsyncOperations)).delegateResponse((l, e) -> fail(e)) + new LatchedActionListener(ActionListener.noop(), newLatch(inFlightAsyncOperations)) + .delegateResponse((l, e) -> fail(e)) ); postIndexAsyncActions(indicesArray, inFlightAsyncOperations, maybeFlush); } @@ -1845,17 +1847,17 @@ private void postIndexAsyncActions(String[] indices, List inFlig if (rarely()) { indicesAdmin().prepareRefresh(indices) .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .execute(new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); + .execute(new LatchedActionListener<>(ActionListener.noop(), newLatch(inFlightAsyncOperations))); } else if (maybeFlush && rarely()) { indicesAdmin().prepareFlush(indices) .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .execute(new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); + .execute(new LatchedActionListener<>(ActionListener.noop(), newLatch(inFlightAsyncOperations))); } else if (rarely()) { indicesAdmin().prepareForceMerge(indices) .setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setMaxNumSegments(between(1, 10)) .setFlush(maybeFlush && randomBoolean()) - .execute(new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); + .execute(new LatchedActionListener<>(ActionListener.noop(), newLatch(inFlightAsyncOperations))); } } while (inFlightAsyncOperations.size() > MAX_IN_FLIGHT_ASYNC_INDEXES) { @@ -1939,32 +1941,6 @@ public enum Scope { int numClientNodes() default InternalTestCluster.DEFAULT_NUM_CLIENT_NODES; } - private class LatchedActionListener implements ActionListener { - private final CountDownLatch latch; - - LatchedActionListener(CountDownLatch latch) { - this.latch = latch; - } - - @Override - public final void onResponse(Response response) { - latch.countDown(); - } - - @Override - public final void onFailure(Exception t) { - try { - logger.info("Action Failed", t); - addError(t); - } finally { - latch.countDown(); - } - } - - protected void addError(Exception e) {} - - } - /** * Clears the given scroll Ids */ From a45d589a78d650c964deb8da7331e6f10a2576b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mariusz=20J=C3=B3zala?= <377355+jozala@users.noreply.github.com> Date: Thu, 30 Jan 2025 11:13:15 +0100 Subject: [PATCH 261/383] Revert "Add 8.18 to branches.json" (#121265) This reverts commit 716614d0 This is to temporarily mitigate the issue with CI builds failing due to missing ML 8.19 snapshots. --- branches.json | 3 --- 1 file changed, 3 deletions(-) diff --git a/branches.json b/branches.json index 81d5a46991445..5084f5a4544a0 100644 --- a/branches.json +++ b/branches.json @@ -10,9 +10,6 @@ { "branch": "9.0" }, - { - "branch": "8.18" - }, { "branch": "8.17" }, From 894fa241a3ae5fe6eb2a804f0fa5f643c842fcc5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 21:32:33 +1100 Subject: [PATCH 262/383] Mute org.elasticsearch.test.rest.ClientYamlTestSuiteIT org.elasticsearch.test.rest.ClientYamlTestSuiteIT #121269 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1b492c4efaf3d..58e1ec165d7c4 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -341,6 +341,8 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/121253 - class: org.elasticsearch.lucene.FullClusterRestartLuceneIndexCompatibilityIT issue: https://github.com/elastic/elasticsearch/issues/121257 +- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT + issue: https://github.com/elastic/elasticsearch/issues/121269 # Examples: # From 80ebc6b924cc9559c2af6bf38cad34149adbb486 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 22:17:09 +1100 Subject: [PATCH 263/383] Mute org.elasticsearch.upgrades.VectorSearchIT testBBQVectorSearch {upgradedNodes=1} #121271 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 58e1ec165d7c4..89eb394e29fe9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -343,6 +343,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/121257 - class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/121269 +- class: org.elasticsearch.upgrades.VectorSearchIT + method: testBBQVectorSearch {upgradedNodes=1} + issue: https://github.com/elastic/elasticsearch/issues/121271 # Examples: # From df4173cd813e28059b89b94f8241c7c760c82743 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 22:17:19 +1100 Subject: [PATCH 264/383] Mute org.elasticsearch.upgrades.VectorSearchIT testBBQVectorSearch {upgradedNodes=2} #121272 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 89eb394e29fe9..55655c6dc6cd1 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -346,6 +346,9 @@ tests: - class: org.elasticsearch.upgrades.VectorSearchIT method: testBBQVectorSearch {upgradedNodes=1} issue: https://github.com/elastic/elasticsearch/issues/121271 +- class: org.elasticsearch.upgrades.VectorSearchIT + method: testBBQVectorSearch {upgradedNodes=2} + issue: https://github.com/elastic/elasticsearch/issues/121272 # Examples: # From 2d4a385cd40b72f469c8384bc73bd7e810293c8e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 30 Jan 2025 22:17:29 +1100 Subject: [PATCH 265/383] Mute org.elasticsearch.upgrades.VectorSearchIT testBBQVectorSearch {upgradedNodes=3} #121273 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 55655c6dc6cd1..df39bd09bfa2b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -349,6 +349,9 @@ tests: - class: org.elasticsearch.upgrades.VectorSearchIT method: testBBQVectorSearch {upgradedNodes=2} issue: https://github.com/elastic/elasticsearch/issues/121272 +- class: org.elasticsearch.upgrades.VectorSearchIT + method: testBBQVectorSearch {upgradedNodes=3} + issue: https://github.com/elastic/elasticsearch/issues/121273 # Examples: # From e260f29464f3d19e39b3c999c9cb75496166074e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Thu, 30 Jan 2025 12:29:24 +0100 Subject: [PATCH 266/383] [Entitlements] "dynamic" instrumentation method keys (#120811) --- libs/entitlement/asm-provider/build.gradle | 1 + .../src/main/java/module-info.java | 2 + .../impl/InstrumentationServiceImpl.java | 160 +++++++++++++++++- .../impl/InstrumentationServiceImplTests.java | 121 ++++++++++++- .../bridge/EntitlementChecker.java | 5 + .../EntitlementInitialization.java | 25 ++- .../InstrumentationService.java | 14 ++ .../api/ElasticsearchEntitlementChecker.java | 7 + .../runtime/policy/PolicyManager.java | 3 +- 9 files changed, 324 insertions(+), 14 deletions(-) diff --git a/libs/entitlement/asm-provider/build.gradle b/libs/entitlement/asm-provider/build.gradle index dcec0579a5bae..c6b51b7da3dff 100644 --- a/libs/entitlement/asm-provider/build.gradle +++ b/libs/entitlement/asm-provider/build.gradle @@ -11,6 +11,7 @@ apply plugin: 'elasticsearch.build' dependencies { compileOnly project(':libs:entitlement') + compileOnly project(':libs:core') implementation 'org.ow2.asm:asm:9.7.1' testImplementation project(":test:framework") testImplementation project(":libs:entitlement:bridge") diff --git a/libs/entitlement/asm-provider/src/main/java/module-info.java b/libs/entitlement/asm-provider/src/main/java/module-info.java index 8cbeafc9013aa..f953454f93b91 100644 --- a/libs/entitlement/asm-provider/src/main/java/module-info.java +++ b/libs/entitlement/asm-provider/src/main/java/module-info.java @@ -14,5 +14,7 @@ requires org.objectweb.asm; requires org.elasticsearch.entitlement; + requires static org.elasticsearch.base; // for SuppressForbidden + provides InstrumentationService with InstrumentationServiceImpl; } diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java index eaf4d0ad98ef5..5a7868325e20d 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.instrumentation.impl; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.instrumentation.CheckMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.Instrumenter; @@ -20,11 +21,15 @@ import org.objectweb.asm.Type; import java.io.IOException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; public class InstrumentationServiceImpl implements InstrumentationService { @@ -48,15 +53,15 @@ public MethodVisitor visitMethod( String[] exceptions ) { var mv = super.visitMethod(access, checkerMethodName, checkerMethodDescriptor, signature, exceptions); + if (checkerMethodName.startsWith(InstrumentationService.CHECK_METHOD_PREFIX)) { + var checkerMethodArgumentTypes = Type.getArgumentTypes(checkerMethodDescriptor); + var methodToInstrument = parseCheckerMethodSignature(checkerMethodName, checkerMethodArgumentTypes); - var checkerMethodArgumentTypes = Type.getArgumentTypes(checkerMethodDescriptor); - var methodToInstrument = parseCheckerMethodSignature(checkerMethodName, checkerMethodArgumentTypes); - - var checkerParameterDescriptors = Arrays.stream(checkerMethodArgumentTypes).map(Type::getDescriptor).toList(); - var checkMethod = new CheckMethod(Type.getInternalName(checkerClass), checkerMethodName, checkerParameterDescriptors); - - methodsToInstrument.put(methodToInstrument, checkMethod); + var checkerParameterDescriptors = Arrays.stream(checkerMethodArgumentTypes).map(Type::getDescriptor).toList(); + var checkMethod = new CheckMethod(Type.getInternalName(checkerClass), checkerMethodName, checkerParameterDescriptors); + methodsToInstrument.put(methodToInstrument, checkMethod); + } return mv; } }; @@ -64,6 +69,143 @@ public MethodVisitor visitMethod( return methodsToInstrument; } + @SuppressForbidden(reason = "Need access to abstract methods (protected/package internal) in base class") + @Override + public InstrumentationInfo lookupImplementationMethod( + Class targetSuperclass, + String methodName, + Class implementationClass, + Class checkerClass, + String checkMethodName, + Class... parameterTypes + ) throws NoSuchMethodException, ClassNotFoundException { + + var targetMethod = targetSuperclass.getDeclaredMethod(methodName, parameterTypes); + validateTargetMethod(implementationClass, targetMethod); + + var checkerAdditionalArguments = Stream.of(Class.class, targetSuperclass); + var checkMethodArgumentTypes = Stream.concat(checkerAdditionalArguments, Arrays.stream(parameterTypes)) + .map(Type::getType) + .toArray(Type[]::new); + + CheckMethod[] checkMethod = new CheckMethod[1]; + + try { + InstrumenterImpl.ClassFileInfo classFileInfo = InstrumenterImpl.getClassFileInfo(checkerClass); + ClassReader reader = new ClassReader(classFileInfo.bytecodes()); + ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) { + @Override + public MethodVisitor visitMethod( + int access, + String methodName, + String methodDescriptor, + String signature, + String[] exceptions + ) { + var mv = super.visitMethod(access, methodName, methodDescriptor, signature, exceptions); + if (methodName.equals(checkMethodName)) { + var methodArgumentTypes = Type.getArgumentTypes(methodDescriptor); + if (Arrays.equals(methodArgumentTypes, checkMethodArgumentTypes)) { + var checkerParameterDescriptors = Arrays.stream(methodArgumentTypes).map(Type::getDescriptor).toList(); + checkMethod[0] = new CheckMethod(Type.getInternalName(checkerClass), methodName, checkerParameterDescriptors); + } + } + return mv; + } + }; + reader.accept(visitor, 0); + } catch (IOException e) { + throw new ClassNotFoundException("Cannot find a definition for class [" + checkerClass.getName() + "]", e); + } + + if (checkMethod[0] == null) { + throw new NoSuchMethodException( + String.format( + Locale.ROOT, + "Cannot find a method with name [%s] and arguments [%s] in class [%s]", + checkMethodName, + Arrays.stream(checkMethodArgumentTypes).map(Type::toString).collect(Collectors.joining()), + checkerClass.getName() + ) + ); + } + + return new InstrumentationInfo( + new MethodKey( + Type.getInternalName(implementationClass), + targetMethod.getName(), + Arrays.stream(parameterTypes).map(c -> Type.getType(c).getInternalName()).toList() + ), + checkMethod[0] + ); + } + + private static void validateTargetMethod(Class implementationClass, Method targetMethod) { + if (targetMethod.getDeclaringClass().isAssignableFrom(implementationClass) == false) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Not an implementation class for %s: %s does not implement %s", + targetMethod.getName(), + implementationClass.getName(), + targetMethod.getDeclaringClass().getName() + ) + ); + } + if (Modifier.isPrivate(targetMethod.getModifiers())) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Not a valid instrumentation method: %s is private in %s", + targetMethod.getName(), + targetMethod.getDeclaringClass().getName() + ) + ); + } + if (Modifier.isStatic(targetMethod.getModifiers())) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Not a valid instrumentation method: %s is static in %s", + targetMethod.getName(), + targetMethod.getDeclaringClass().getName() + ) + ); + } + try { + var implementationMethod = implementationClass.getMethod(targetMethod.getName(), targetMethod.getParameterTypes()); + var methodModifiers = implementationMethod.getModifiers(); + if (Modifier.isAbstract(methodModifiers)) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Not a valid instrumentation method: %s is abstract in %s", + targetMethod.getName(), + implementationClass.getName() + ) + ); + } + if (Modifier.isPublic(methodModifiers) == false) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Not a valid instrumentation method: %s is not public in %s", + targetMethod.getName(), + implementationClass.getName() + ) + ); + } + } catch (NoSuchMethodException e) { + assert false + : String.format( + Locale.ROOT, + "Not a valid instrumentation method: %s cannot be found in %s", + targetMethod.getName(), + implementationClass.getName() + ); + } + } + private static final Type CLASS_TYPE = Type.getType(Class.class); static ParsedCheckerMethod parseCheckerMethodName(String checkerMethodName) { @@ -85,8 +227,8 @@ static ParsedCheckerMethod parseCheckerMethodName(String checkerMethodName) { String.format( Locale.ROOT, "Checker method %s has incorrect name format. " - + "It should be either check$$methodName (instance), check$package_ClassName$methodName (static) or " - + "check$package_ClassName$ (ctor)", + + "It should be either check$package_ClassName$methodName (instance), check$package_ClassName$$methodName (static) " + + "or check$package_ClassName$ (ctor)", checkerMethodName ) ); diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java index ab0d96a8df96d..33c48fb8823ca 100644 --- a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java @@ -29,7 +29,23 @@ public class InstrumentationServiceImplTests extends ESTestCase { final InstrumentationService instrumentationService = new InstrumentationServiceImpl(); - static class TestTargetClass {} + interface TestTargetInterface { + void instanceMethod(int x, String y); + } + + static class TestTargetClass implements TestTargetInterface { + @Override + public void instanceMethod(int x, String y) {} + } + + abstract static class TestTargetBaseClass { + abstract void instanceMethod(int x, String y); + } + + static class TestTargetImplementationClass extends TestTargetBaseClass { + @Override + public void instanceMethod(int x, String y) {} + } interface TestChecker { void check$org_example_TestTargetClass$$staticMethod(Class clazz, int arg0, String arg1, Object arg2); @@ -51,6 +67,14 @@ interface TestCheckerCtors { void check$org_example_TestTargetClass$(Class clazz, int x, String y); } + interface TestCheckerMixed { + void check$org_example_TestTargetClass$$staticMethod(Class clazz, int arg0, String arg1, Object arg2); + + void checkInstanceMethodManual(Class clazz, TestTargetInterface that, int x, String y); + + void checkInstanceMethodManual(Class clazz, TestTargetBaseClass that, int x, String y); + } + public void testInstrumentationTargetLookup() throws IOException { Map checkMethods = instrumentationService.lookupMethods(TestChecker.class); @@ -168,6 +192,101 @@ public void testInstrumentationTargetLookupWithCtors() throws IOException { ); } + public void testInstrumentationTargetLookupWithExtraMethods() throws IOException { + Map checkMethods = instrumentationService.lookupMethods(TestCheckerMixed.class); + + assertThat(checkMethods, aMapWithSize(1)); + assertThat( + checkMethods, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethod", List.of("I", "java/lang/String", "java/lang/Object"))), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerMixed", + "check$org_example_TestTargetClass$$staticMethod", + List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;", "Ljava/lang/Object;") + ) + ) + ) + ); + } + + public void testLookupImplementationMethodWithInterface() throws ClassNotFoundException, NoSuchMethodException { + var info = instrumentationService.lookupImplementationMethod( + TestTargetInterface.class, + "instanceMethod", + TestTargetClass.class, + TestCheckerMixed.class, + "checkInstanceMethodManual", + int.class, + String.class + ); + + assertThat( + info.targetMethod(), + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethod", + List.of("I", "java/lang/String") + ) + ) + ); + assertThat( + info.checkMethod(), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerMixed", + "checkInstanceMethodManual", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetInterface;", + "I", + "Ljava/lang/String;" + ) + ) + ) + ); + } + + public void testLookupImplementationMethodWithBaseClass() throws ClassNotFoundException, NoSuchMethodException { + var info = instrumentationService.lookupImplementationMethod( + TestTargetBaseClass.class, + "instanceMethod", + TestTargetImplementationClass.class, + TestCheckerMixed.class, + "checkInstanceMethodManual", + int.class, + String.class + ); + + assertThat( + info.targetMethod(), + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetImplementationClass", + "instanceMethod", + List.of("I", "java/lang/String") + ) + ) + ); + assertThat( + info.checkMethod(), + equalTo( + new CheckMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerMixed", + "checkInstanceMethodManual", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetBaseClass;", + "I", + "Ljava/lang/String;" + ) + ) + ) + ); + } + public void testParseCheckerMethodSignatureStaticMethod() { var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( "check$org_example_TestClass$$staticMethod", diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index de47e88aa8e95..bde467d20f0aa 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -49,8 +49,10 @@ import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.nio.charset.Charset; +import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.attribute.UserPrincipal; +import java.nio.file.spi.FileSystemProvider; import java.security.cert.CertStoreParameters; import java.util.List; import java.util.Locale; @@ -510,4 +512,7 @@ public interface EntitlementChecker { void check$java_nio_file_Files$$probeContentType(Class callerClass, Path path); void check$java_nio_file_Files$$setOwner(Class callerClass, Path path, UserPrincipal principal); + + // hand-wired methods + void checkNewInputStream(Class callerClass, FileSystemProvider that, Path path, OpenOption... options); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 4bc7c54e4cfda..5b80afa521e57 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -31,11 +31,17 @@ import java.lang.instrument.Instrumentation; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; +import java.nio.file.FileSystems; +import java.nio.file.OpenOption; +import java.nio.file.Path; +import java.nio.file.spi.FileSystemProvider; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import java.util.stream.Stream; /** * Called by the agent during {@code agentmain} to configure the entitlement system, @@ -60,11 +66,24 @@ public static EntitlementChecker checker() { public static void initialize(Instrumentation inst) throws Exception { manager = initChecker(); - Map checkMethods = INSTRUMENTER_FACTORY.lookupMethods(EntitlementChecker.class); + Map checkMethods = new HashMap<>(INSTRUMENTATION_SERVICE.lookupMethods(EntitlementChecker.class)); + + var fileSystemProviderClass = FileSystems.getDefault().provider().getClass(); + Stream.of( + INSTRUMENTATION_SERVICE.lookupImplementationMethod( + FileSystemProvider.class, + "newInputStream", + fileSystemProviderClass, + EntitlementChecker.class, + "checkNewInputStream", + Path.class, + OpenOption[].class + ) + ).forEach(instrumentation -> checkMethods.put(instrumentation.targetMethod(), instrumentation.checkMethod())); var classesToTransform = checkMethods.keySet().stream().map(MethodKey::className).collect(Collectors.toSet()); - Instrumenter instrumenter = INSTRUMENTER_FACTORY.newInstrumenter(EntitlementChecker.class, checkMethods); + Instrumenter instrumenter = INSTRUMENTATION_SERVICE.newInstrumenter(EntitlementChecker.class, checkMethods); inst.addTransformer(new Transformer(instrumenter, classesToTransform), true); inst.retransformClasses(findClassesToRetransform(inst.getAllLoadedClasses(), classesToTransform)); } @@ -141,7 +160,7 @@ private static ElasticsearchEntitlementChecker initChecker() { } } - private static final InstrumentationService INSTRUMENTER_FACTORY = new ProviderLocator<>( + private static final InstrumentationService INSTRUMENTATION_SERVICE = new ProviderLocator<>( "entitlement", InstrumentationService.class, "org.elasticsearch.entitlement.instrumentation", diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java index 66d8ad9488cfa..79673418eb321 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java @@ -16,7 +16,21 @@ * The SPI service entry point for instrumentation. */ public interface InstrumentationService { + + String CHECK_METHOD_PREFIX = "check$"; + + record InstrumentationInfo(MethodKey targetMethod, CheckMethod checkMethod) {} + Instrumenter newInstrumenter(Class clazz, Map methods); Map lookupMethods(Class clazz) throws IOException; + + InstrumentationInfo lookupImplementationMethod( + Class targetSuperclass, + String methodName, + Class implementationClass, + Class checkerClass, + String checkMethodName, + Class... parameterTypes + ) throws NoSuchMethodException, ClassNotFoundException; } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 48a7400a1db7b..d1318845c22f4 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -54,8 +54,10 @@ import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.nio.charset.Charset; +import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.attribute.UserPrincipal; +import java.nio.file.spi.FileSystemProvider; import java.security.cert.CertStoreParameters; import java.util.List; import java.util.Locale; @@ -918,4 +920,9 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { public void check$java_nio_file_Files$$setOwner(Class callerClass, Path path, UserPrincipal principal) { policyManager.checkFileWrite(callerClass, path); } + + @Override + public void checkNewInputStream(Class callerClass, FileSystemProvider that, Path path, OpenOption... options) { + // TODO: policyManger.checkFileSystemRead(path); + } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index 73e9d0c446bef..b08db25a2430b 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -11,6 +11,7 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.runtime.api.NotEntitledException; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -217,7 +218,7 @@ public void checkChangeJVMGlobalState(Class callerClass) { Optional checkMethodName = StackWalker.getInstance() .walk( frames -> frames.map(StackFrame::getMethodName) - .dropWhile(not(methodName -> methodName.startsWith("check$"))) + .dropWhile(not(methodName -> methodName.startsWith(InstrumentationService.CHECK_METHOD_PREFIX))) .findFirst() ); return checkMethodName.map(this::operationDescription).orElse("change JVM global state"); From 6e471eaaf3d2f1b1ced256fade12d09c9a66cf76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mariusz=20J=C3=B3zala?= <377355+jozala@users.noreply.github.com> Date: Thu, 30 Jan 2025 12:49:25 +0100 Subject: [PATCH 267/383] Revert "Add 9.0 to branches.json" (#121274) This reverts commit a92e724b This is to temporarily mitigate the issue with CI builds failing due to missing 8.18 artifacts. --- branches.json | 3 --- 1 file changed, 3 deletions(-) diff --git a/branches.json b/branches.json index 5084f5a4544a0..95fbdb1efd655 100644 --- a/branches.json +++ b/branches.json @@ -7,9 +7,6 @@ { "branch": "8.16" }, - { - "branch": "9.0" - }, { "branch": "8.17" }, From c8dfb4ea9e66ba28031d6da9a9cd3a38b30719f1 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Thu, 30 Jan 2025 12:52:37 +0100 Subject: [PATCH 268/383] [DOCS] Fix missing id syntax (#121264) * [DOCS] Fix missing id syntax * Update docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc * fix id --- .../common-issues/disk-usage-exceeded.asciidoc | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc b/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc index def89b37eb316..67a872c883afe 100644 --- a/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc +++ b/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc @@ -127,12 +127,14 @@ its {cloud}/ec-api-console.html[Elasticsearch API Console] to later with this resolution flow on {ess}, kindly reach out to https://support.elastic.co[Elastic Support] for assistance. -== Prevent watermark errors +[discrete] +[[fix-watermark-errors-prevent]] +=== Prevent watermark errors -To avoid watermark errors in future, , perform one of the following actions: +To avoid watermark errors in future, perform one of the following actions: * If you're using {ess}, {ece}, or {eck}: Enable <>. * Set up {kibana-ref}/kibana-alerts.html[stack monitoring alerts] on top of <> to be notified before -the flood-stage watermark is reached. \ No newline at end of file +the flood-stage watermark is reached. From f2118388599cccae9439057601823c2e8a454d26 Mon Sep 17 00:00:00 2001 From: Jan Calanog Date: Thu, 30 Jan 2025 13:58:28 +0100 Subject: [PATCH 269/383] github-action: Add AsciiDoc freeze warning (#121032) --- .../workflows/comment-on-asciidoc-changes.yml | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 .github/workflows/comment-on-asciidoc-changes.yml diff --git a/.github/workflows/comment-on-asciidoc-changes.yml b/.github/workflows/comment-on-asciidoc-changes.yml new file mode 100644 index 0000000000000..8e5f836b1489a --- /dev/null +++ b/.github/workflows/comment-on-asciidoc-changes.yml @@ -0,0 +1,21 @@ +--- +name: Comment on PR for .asciidoc changes + +on: + # We need to use pull_request_target to be able to comment on PRs from forks + pull_request_target: + types: + - synchronize + - opened + - reopened + branches: + - main + - master + - "9.0" + +jobs: + comment-on-asciidoc-change: + permissions: + contents: read + pull-requests: write + uses: elastic/docs-builder/.github/workflows/comment-on-asciidoc-changes.yml@main From 2f3053d117e55d2afa6fb16e2513e645651ceaa2 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Thu, 30 Jan 2025 15:01:52 +0200 Subject: [PATCH 270/383] Fix NPE in deprecation API (#121263) **Reproduction path** 1. Create an composable index template that does not have a `template` section. 2. Set some settings in `deprecation.skip_deprecated_settings` 3. Run the deprecation API `GET _migration/deprecations?error_trace` 4. **Result:** we receive an error `500` with the following error and stack trace: ``` .... "reason": "Cannot invoke \"org.elasticsearch.cluster.metadata.Template.settings()\" because \"template\" is null", "stack_trace": "java.lang.NullPointerException: Cannot invoke \"org.elasticsearch.cluster.metadata.Template.settings()\" because \"template\" is null org.elasticsearch.xpack.deprecation.DeprecationInfoAction.lambda$removeSkippedSettings$9(DeprecationInfoAction.java:408) .... ``` **Fix** There was a typo when we were performing the null-check, we used `templateName` instead of `template`. In this PR we fix this and we extend the current test to capture this case as well. The bug is not released so it's marked as a non-issue. --- .../xpack/deprecation/DeprecationInfoAction.java | 4 ++-- .../DeprecationInfoActionResponseTests.java | 15 +++++++++++++-- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index c89e61fbcf24d..62d2a231face4 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -353,7 +353,7 @@ public static DeprecationInfoAction.Response from( } /** - * + * Removes the skipped settings from the selected indices and the component and index templates. * @param state The cluster state to modify * @param indexNames The names of the indexes whose settings need to be filtered * @param skipTheseDeprecatedSettings The settings that will be removed from cluster metadata and the index metadata of all the @@ -405,7 +405,7 @@ private static ClusterState removeSkippedSettings(ClusterState state, String[] i String templateName = entry.getKey(); ComposableIndexTemplate indexTemplate = entry.getValue(); Template template = indexTemplate.template(); - if (templateName == null || template.settings() == null || template.settings().isEmpty()) { + if (template == null || template.settings() == null || template.settings().isEmpty()) { return Tuple.tuple(templateName, indexTemplate); } return Tuple.tuple( diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java index 28fd14abecbc1..a06bb2f2718cb 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java @@ -362,7 +362,14 @@ public void testRemoveSkippedSettings() { .put(IndexMetadata.builder("test").settings(inputSettings).numberOfShards(1).numberOfReplicas(0)) .put(dataStreamIndexMetadata, true) .put(DataStream.builder("ds-test", List.of(dataStreamIndexMetadata.getIndex())).build()) - .indexTemplates(Map.of("my-index-template", indexTemplate)) + .indexTemplates( + Map.of( + "my-index-template", + indexTemplate, + "empty-template", + ComposableIndexTemplate.builder().indexPatterns(List.of("random")).build() + ) + ) .componentTemplates(Map.of("my-component-template", componentTemplate)) .persistentSettings(inputSettings) .build(); @@ -391,7 +398,11 @@ public void testRemoveSkippedSettings() { .componentTemplates() .values() .forEach(template -> visibleComponentTemplateSettings.set(template.template().settings())); - cs.metadata().templatesV2().values().forEach(template -> visibleIndexTemplateSettings.set(template.template().settings())); + cs.metadata().templatesV2().values().forEach(template -> { + if (template.template() != null && template.template().settings() != null) { + visibleIndexTemplateSettings.set(template.template().settings()); + } + }); return Map.of(); })); From cdc16120fddbc3549eff45b43ae58b975a3bd0cb Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 00:39:53 +1100 Subject: [PATCH 271/383] Mute org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySessionFactoryTests org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySessionFactoryTests #121285 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index df39bd09bfa2b..ba8860e7bdd83 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -352,6 +352,8 @@ tests: - class: org.elasticsearch.upgrades.VectorSearchIT method: testBBQVectorSearch {upgradedNodes=3} issue: https://github.com/elastic/elasticsearch/issues/121273 +- class: org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySessionFactoryTests + issue: https://github.com/elastic/elasticsearch/issues/121285 # Examples: # From 4baffe4de13a4d2b152aa6e41bbd94142e142de9 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Thu, 30 Jan 2025 13:41:02 +0000 Subject: [PATCH 272/383] Upgrade to Lucene 10.1.0 (#119308) This commit upgrades to Lucene 10.1.0. --- build-tools-internal/version.properties | 2 +- docs/Versions.asciidoc | 4 +- docs/changelog/119308.yaml | 5 + docs/reference/search/profile.asciidoc | 12 +- gradle/verification-metadata.xml | 200 +++++++++++------- .../extras/SourceConfirmedTextQuery.java | 32 ++- rest-api-spec/build.gradle | 2 + .../rest-api-spec/test/search/370_profile.yml | 43 +++- .../search/stats/FieldUsageStatsIT.java | 2 +- server/src/main/java/module-info.java | 3 +- .../diskusage/IndexDiskUsageAnalyzer.java | 6 +- .../elasticsearch/common/lucene/Lucene.java | 26 +-- .../elasticsearch/index/IndexVersions.java | 1 + .../index/codec/CodecService.java | 6 +- .../index/codec/Elasticsearch816Codec.java | 2 +- .../index/codec/Elasticsearch900Codec.java | 8 +- .../codec/Elasticsearch900Lucene101Codec.java | 131 ++++++++++++ .../codec/LegacyPerFieldMapperCodec.java | 6 +- .../index/codec/PerFieldFormatSupplier.java | 10 +- .../index/codec/PerFieldMapperCodec.java | 2 +- .../index/query/IntervalBuilder.java | 2 +- .../elasticsearch/index/query/XIntervals.java | 106 ---------- .../elasticsearch/search/SearchFeatures.java | 3 +- .../bucket/filter/FiltersAggregator.java | 2 +- .../services/org.apache.lucene.codecs.Codec | 1 + .../IndexDiskUsageAnalyzerTests.java | 25 ++- .../elasticsearch/index/codec/CodecTests.java | 2 +- .../vectors/ES813FlatVectorFormatTests.java | 4 +- .../ES813Int8FlatVectorFormatTests.java | 4 +- ...HnswScalarQuantizedVectorsFormatTests.java | 4 +- .../ES815BitFlatVectorFormatTests.java | 4 +- .../ES815HnswBitVectorsFormatTests.java | 4 +- ...S816BinaryQuantizedVectorsFormatTests.java | 4 +- ...HnswBinaryQuantizedVectorsFormatTests.java | 4 +- ...S818BinaryQuantizedVectorsFormatTests.java | 4 +- ...HnswBinaryQuantizedVectorsFormatTests.java | 4 +- .../codec/zstd/StoredFieldCodecDuelTests.java | 6 +- ...estCompressionStoredFieldsFormatTests.java | 4 +- ...td814BestSpeedStoredFieldsFormatTests.java | 4 +- .../engine/CompletionStatsCacheTests.java | 8 +- .../mapper/CompletionFieldMapperTests.java | 10 +- .../mapper/DocumentParserContextTests.java | 1 - .../vectors/SparseVectorFieldMapperTests.java | 4 - .../index/query/IntervalBuilderTests.java | 14 +- .../query/IntervalQueryBuilderTests.java | 16 +- .../search/dfs/DfsPhaseTests.java | 4 +- .../query/ProfileCollectorManagerTests.java | 2 +- .../sourceonly/SourceOnlySnapshot.java | 4 +- .../inference/ltr/QueryFeatureExtractor.java | 2 +- .../mapper/WildcardFieldMapperTests.java | 4 +- 50 files changed, 437 insertions(+), 326 deletions(-) create mode 100644 docs/changelog/119308.yaml create mode 100644 server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Lucene101Codec.java delete mode 100644 server/src/main/java/org/elasticsearch/index/query/XIntervals.java diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 57882fa842b41..c041bd2dd2156 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0 +lucene = 10.1.0 bundled_jdk_vendor = openjdk bundled_jdk = 23+37@3c5b90190c68498b986a97f276efd28a diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index f2e61861bd3a6..c2e14a399b70e 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,8 +1,8 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -:lucene_version: 10.0.0 -:lucene_version_path: 10_0_0 +:lucene_version: 10.1.0 +:lucene_version_path: 10_1_0 :jdk: 11.0.2 :jdk_major: 11 :build_type: tar diff --git a/docs/changelog/119308.yaml b/docs/changelog/119308.yaml new file mode 100644 index 0000000000000..bb47cac148a82 --- /dev/null +++ b/docs/changelog/119308.yaml @@ -0,0 +1,5 @@ +pr: 119308 +summary: Upgrade to Lucene 10.1.0 +area: Search +type: upgrade +issues: [] diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 4fbe5ea1bb9f8..1e03279878fdf 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -176,7 +176,7 @@ The API returns the following result: "time_in_nanos": 775274, "children" : [ { - "name": "SimpleTopScoreDocCollector", + "name": "TopScoreDocCollector", "reason": "search_top_hits", "time_in_nanos": 775274 } @@ -537,7 +537,7 @@ Looking at the previous example: "time_in_nanos": 775274, "children" : [ { - "name": "SimpleTopScoreDocCollector", + "name": "TopScoreDocCollector", "reason": "search_top_hits", "time_in_nanos": 775274 } @@ -551,7 +551,7 @@ Looking at the previous example: We see a top-level collector named `QueryPhaseCollector` which holds a child -`SimpleTopScoreDocCollector`. `SimpleTopScoreDocCollector` is the default +`TopScoreDocCollector`. `TopScoreDocCollector` is the default "scoring and sorting" `Collector` used by {es}. The `reason` field attempts to give a plain English description of the class name. The `time_in_nanos` is similar to the time in the Query tree: a wall-clock time inclusive of all @@ -751,7 +751,7 @@ The API returns the following result: "time_in_nanos": 1945072, "children": [ { - "name": "SimpleTopScoreDocCollector", + "name": "TopScoreDocCollector", "reason": "search_top_hits", "time_in_nanos": 22577 }, @@ -788,7 +788,7 @@ major portions of the query are represented: 2. The second `TermQuery` (message:search) represents the `post_filter` query. The Collector tree is fairly straightforward, showing how a single -QueryPhaseCollector that holds the normal scoring SimpleTopScoreDocCollector +QueryPhaseCollector that holds the normal scoring TopScoreDocCollector used to collect top hits, as well as BucketCollectorWrapper to run all scoped aggregations. @@ -1332,7 +1332,7 @@ One of the `dfs.knn` sections for a shard looks like the following: "rewrite_time" : 1275732, "collector" : [ { - "name" : "SimpleTopScoreDocCollector", + "name" : "TopScoreDocCollector", "reason" : "search_top_hits", "time_in_nanos" : 17163 } diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2c46c4642e56e..b1db86f3a7b28 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2920,129 +2920,179 @@ - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + - - - + + + + + diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java index a992f68d93d9e..153ca8c832ceb 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java @@ -12,6 +12,7 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermStates; import org.apache.lucene.index.memory.MemoryIndex; @@ -23,7 +24,6 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.LeafSimScorer; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Matches; @@ -214,7 +214,6 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo // No need to ever look at the _source for non-scoring term queries return in.createWeight(searcher, scoreMode, boost); } - // We use a LinkedHashSet here to preserve the ordering of terms to ensure that // later summing of float scores per term is consistent final Set terms = new LinkedHashSet<>(); @@ -267,6 +266,7 @@ public boolean isCacheable(LeafReaderContext ctx) { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { + NumericDocValues norms = context.reader().getNormValues(field); RuntimePhraseScorer scorer = (RuntimePhraseScorer) scorerSupplier(context).get(0); if (scorer == null) { return Explanation.noMatch("No matching phrase"); @@ -277,8 +277,7 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } float phraseFreq = scorer.freq(); Explanation freqExplanation = Explanation.match(phraseFreq, "phraseFreq=" + phraseFreq); - final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores()); - Explanation scoreExplanation = leafSimScorer.explain(doc, freqExplanation); + Explanation scoreExplanation = simScorer.explain(freqExplanation, getNormValue(norms, doc)); return Explanation.match( scoreExplanation.getValue(), "weight(" + getQuery() + " in " + doc + ") [" + searcher.getSimilarity().getClass().getSimpleName() + "], result of:", @@ -297,9 +296,9 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti public Scorer get(long leadCost) throws IOException { final Scorer approximationScorer = approximationSupplier.get(leadCost); final DocIdSetIterator approximation = approximationScorer.iterator(); - final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores()); final CheckedIntFunction, IOException> valueFetcher = valueFetcherProvider.apply(context); - return new RuntimePhraseScorer(approximation, leafSimScorer, valueFetcher, field, in); + NumericDocValues norms = context.reader().getNormValues(field); + return new RuntimePhraseScorer(approximation, simScorer, norms, valueFetcher, field, in); } @Override @@ -335,12 +334,23 @@ public Matches matches(LeafReaderContext context, int doc) throws IOException { }; } + private static long getNormValue(NumericDocValues norms, int doc) throws IOException { + if (norms != null) { + boolean found = norms.advanceExact(doc); + assert found; + return norms.longValue(); + } else { + return 1L; // default norm + } + } + private class RuntimePhraseScorer extends Scorer { - private final LeafSimScorer scorer; + private final SimScorer scorer; private final CheckedIntFunction, IOException> valueFetcher; private final String field; private final Query query; private final TwoPhaseIterator twoPhase; + private final NumericDocValues norms; private final MemoryIndexEntry cacheEntry = new MemoryIndexEntry(); @@ -349,12 +359,14 @@ private class RuntimePhraseScorer extends Scorer { private RuntimePhraseScorer( DocIdSetIterator approximation, - LeafSimScorer scorer, + SimScorer scorer, + NumericDocValues norms, CheckedIntFunction, IOException> valueFetcher, String field, Query query ) { this.scorer = scorer; + this.norms = norms; this.valueFetcher = valueFetcher; this.field = field; this.query = query; @@ -386,12 +398,12 @@ public TwoPhaseIterator twoPhaseIterator() { @Override public float getMaxScore(int upTo) throws IOException { - return scorer.getSimScorer().score(Float.MAX_VALUE, 1L); + return scorer.score(Float.MAX_VALUE, 1L); } @Override public float score() throws IOException { - return scorer.score(docID(), freq()); + return scorer.score(freq(), getNormValue(norms, doc)); } @Override diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 68da320923898..2452265e336a9 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -57,6 +57,8 @@ tasks.named("precommit").configure { tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling") task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.collector.0.name", "TopScoreDocCollector", "dfs knn vector profiling") + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.collector.0.name", "TopScoreDocCollector", "dfs knn vector profiling with vector_operations_count") task.skipTest("cat.aliases/10_basic/Deprecated local parameter", "CAT APIs not covered by compatibility policy") task.skipTest("cat.shards/10_basic/Help", "sync_id is removed in 9.0") task.skipTest("search/500_date_range/from, to, include_lower, include_upper deprecated", "deprecated parameters are removed in 9.0") diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index 81ca84a06f815..c1fdb8adc8ee9 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -230,7 +230,6 @@ dfs knn vector profiling: - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.create_weight: 0 } - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.create_weight_count: 0 } - gt: { profile.shards.0.dfs.knn.0.rewrite_time: 0 } - - match: { profile.shards.0.dfs.knn.0.collector.0.name: "SimpleTopScoreDocCollector" } - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } @@ -275,6 +274,47 @@ dfs knn vector profiling description: - match: { hits.total.value: 1 } - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScoreQuery[0,...][0.009673266,...],0.009673266" } +--- +dfs knn vector profiling collector name: + - requires: + cluster_features: "lucene_10_1_upgrade" + reason: collector name changed with lucene 10.1 + + - do: + indices.create: + index: images + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + image: + type: "dense_vector" + dims: 3 + index: true + similarity: "l2_norm" + + - do: + index: + index: images + id: "1" + refresh: true + body: + image: [1, 5, -20] + + - do: + search: + index: images + body: + profile: true + knn: + field: "image" + query_vector: [-5, 9, -12] + k: 1 + num_candidates: 100 + + - match: { profile.shards.0.dfs.knn.0.collector.0.name: "TopScoreDocCollector" } + --- dfs knn vector profiling with vector_operations_count: - requires: @@ -335,7 +375,6 @@ dfs knn vector profiling with vector_operations_count: - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.create_weight: 0 } - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.create_weight_count: 0 } - gt: { profile.shards.0.dfs.knn.0.rewrite_time: 0 } - - match: { profile.shards.0.dfs.knn.0.collector.0.name: "SimpleTopScoreDocCollector" } - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java index 3d5120226ebed..3d05b0e953959 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java @@ -105,7 +105,7 @@ public void testFieldUsageStats() throws ExecutionException, InterruptedExceptio assertTrue(stats.hasField("field2")); // positions because of span query assertEquals( - Set.of(UsageContext.TERMS, UsageContext.POSTINGS, UsageContext.FREQS, UsageContext.POSITIONS), + Set.of(UsageContext.TERMS, UsageContext.POSTINGS, UsageContext.FREQS, UsageContext.POSITIONS, UsageContext.NORMS), stats.get("field2").keySet() ); assertEquals(1L * numShards, stats.get("field2").getTerms()); diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 73e9a2058b2b4..01acabca2bc00 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -461,7 +461,8 @@ with org.elasticsearch.index.codec.Elasticsearch814Codec, org.elasticsearch.index.codec.Elasticsearch816Codec, - org.elasticsearch.index.codec.Elasticsearch900Codec; + org.elasticsearch.index.codec.Elasticsearch900Codec, + org.elasticsearch.index.codec.Elasticsearch900Lucene101Codec; provides org.apache.logging.log4j.core.util.ContextDataProvider with org.elasticsearch.common.logging.DynamicContextDataProvider; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java index e668624440351..a9eb8d4f8ed66 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java @@ -13,6 +13,7 @@ import org.apache.lucene.backward_codecs.lucene50.Lucene50PostingsFormat; import org.apache.lucene.backward_codecs.lucene84.Lucene84PostingsFormat; import org.apache.lucene.backward_codecs.lucene90.Lucene90PostingsFormat; +import org.apache.lucene.backward_codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.backward_codecs.lucene99.Lucene99PostingsFormat; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.FieldsProducer; @@ -21,7 +22,7 @@ import org.apache.lucene.codecs.PointsReader; import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.codecs.TermVectorsReader; -import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; +import org.apache.lucene.codecs.lucene101.Lucene101PostingsFormat; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; @@ -306,6 +307,9 @@ private static void readProximity(Terms terms, PostingsEnum postings) throws IOE private static BlockTermState getBlockTermState(TermsEnum termsEnum, BytesRef term) throws IOException { if (term != null && termsEnum.seekExact(term)) { final TermState termState = termsEnum.termState(); + if (termState instanceof final Lucene101PostingsFormat.IntBlockTermState blockTermState) { + return new BlockTermState(blockTermState.docStartFP, blockTermState.posStartFP, blockTermState.payStartFP); + } if (termState instanceof final Lucene912PostingsFormat.IntBlockTermState blockTermState) { return new BlockTermState(blockTermState.docStartFP, blockTermState.posStartFP, blockTermState.payStartFP); } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index bd48572a8bc11..073000979918e 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -70,7 +70,6 @@ import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -89,10 +88,9 @@ import java.util.Map; import java.util.Objects; -import static org.apache.lucene.util.Version.LUCENE_10_0_0; - public class Lucene { - public static final String LATEST_CODEC = "Lucene100"; + + public static final String LATEST_CODEC = "Lucene101"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; @@ -156,25 +154,7 @@ public static SegmentInfos readSegmentInfos(IndexCommit commit) throws IOExcepti * Reads the segments infos from the given segments file name, failing if it fails to load */ private static SegmentInfos readSegmentInfos(String segmentsFileName, Directory directory) throws IOException { - // TODO Use readCommit(Directory directory, String segmentFileName, int minSupportedMajorVersion) once Lucene 10.1 is available - // and remove the try-catch block for IndexFormatTooOldException - assert IndexVersion.current().luceneVersion().equals(LUCENE_10_0_0) : "remove the try-catch block below"; - try { - return SegmentInfos.readCommit(directory, segmentsFileName); - } catch (IndexFormatTooOldException e) { - try { - // Temporary workaround until Lucene 10.1 is available: try to leverage min. read-only compatibility to read the last commit - // and then check if this is the commit we want. This should always work for the case we are interested in (archive and - // searchable snapshots indices in N-2 version) as no newer commit should be ever written. - var segmentInfos = readSegmentInfos(directory); - if (segmentsFileName.equals(segmentInfos.getSegmentsFileName())) { - return segmentInfos; - } - } catch (Exception suppressed) { - e.addSuppressed(suppressed); - } - throw e; - } + return SegmentInfos.readCommit(directory, segmentsFileName, IndexVersions.MINIMUM_READONLY_COMPATIBLE.luceneVersion().major); } /** diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 309b8a4f143d3..2470bfb7e5c56 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -144,6 +144,7 @@ private static Version parseUnchecked(String version) { public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME = def(9_006_00_0, Version.LUCENE_10_0_0); public static final IndexVersion SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP = def(9_007_00_0, Version.LUCENE_10_0_0); public static final IndexVersion HOSTNAME_DOC_VALUES_SPARSE_INDEX = def(9_008_00_0, Version.LUCENE_10_0_0); + public static final IndexVersion UPGRADE_TO_LUCENE_10_1_0 = def(9_009_00_0, Version.LUCENE_10_1_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index c1c392ac07f18..06949a967eccd 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -12,7 +12,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.core.Nullable; @@ -46,7 +46,7 @@ public class CodecService implements CodecProvider { public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) { final var codecs = new HashMap(); - Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_SPEED, mapperService, bigArrays); + Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene101Codec.Mode.BEST_SPEED, mapperService, bigArrays); if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, bigArrays)); } else { @@ -58,7 +58,7 @@ public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, mapperService, bigArrays) ); - Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); + Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene101Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); codecs.put(LEGACY_BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java index 9f46050f68f99..d58c4e2cdc34a 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java @@ -10,12 +10,12 @@ package org.elasticsearch.index.codec; import org.apache.lucene.backward_codecs.lucene912.Lucene912Codec; +import org.apache.lucene.backward_codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.StoredFieldsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java index 4154a242c15ed..04428d5b37fba 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java @@ -9,13 +9,13 @@ package org.elasticsearch.index.codec; +import org.apache.lucene.backward_codecs.lucene100.Lucene100Codec; +import org.apache.lucene.backward_codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.StoredFieldsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; @@ -23,8 +23,8 @@ import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; /** - * Elasticsearch codec as of 9.0. This extends the Lucene 10.0 codec to compressed stored fields with ZSTD instead of LZ4/DEFLATE. See - * {@link Zstd814StoredFieldsFormat}. + * Elasticsearch codec as of 9.0-snapshot relying on Lucene 10.0. This extends the Lucene 10.0 codec to compressed stored fields + * with ZSTD instead of LZ4/DEFLATE. See {@link Zstd814StoredFieldsFormat}. */ public class Elasticsearch900Codec extends CodecService.DeduplicateFieldInfosCodec { diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Lucene101Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Lucene101Codec.java new file mode 100644 index 0000000000000..ae7fa481a1caa --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Lucene101Codec.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; +import org.apache.lucene.codecs.lucene101.Lucene101PostingsFormat; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; +import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; + +/** + * Elasticsearch codec as of 9.0 relying on Lucene 10.1. This extends the Lucene 10.1 codec to compressed + * stored fields with ZSTD instead of LZ4/DEFLATE. See {@link Zstd814StoredFieldsFormat}. + */ +public class Elasticsearch900Lucene101Codec extends CodecService.DeduplicateFieldInfosCodec { + + private final StoredFieldsFormat storedFieldsFormat; + + private final PostingsFormat defaultPostingsFormat; + private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return Elasticsearch900Lucene101Codec.this.getPostingsFormatForField(field); + } + }; + + private final DocValuesFormat defaultDVFormat; + private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return Elasticsearch900Lucene101Codec.this.getDocValuesFormatForField(field); + } + }; + + private final KnnVectorsFormat defaultKnnVectorsFormat; + private final KnnVectorsFormat knnVectorsFormat = new PerFieldKnnVectorsFormat() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return Elasticsearch900Lucene101Codec.this.getKnnVectorsFormatForField(field); + } + }; + + /** Public no-arg constructor, needed for SPI loading at read-time. */ + public Elasticsearch900Lucene101Codec() { + this(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + } + + /** + * Constructor. Takes a {@link Zstd814StoredFieldsFormat.Mode} that describes whether to optimize for retrieval speed at the expense of + * worse space-efficiency or vice-versa. + */ + public Elasticsearch900Lucene101Codec(Zstd814StoredFieldsFormat.Mode mode) { + super("Elasticsearch900Lucene101", new Lucene101Codec()); + this.storedFieldsFormat = mode.getFormat(); + this.defaultPostingsFormat = new Lucene101PostingsFormat(); + this.defaultDVFormat = new Lucene90DocValuesFormat(); + this.defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); + } + + @Override + public StoredFieldsFormat storedFieldsFormat() { + return storedFieldsFormat; + } + + @Override + public final PostingsFormat postingsFormat() { + return postingsFormat; + } + + @Override + public final DocValuesFormat docValuesFormat() { + return docValuesFormat; + } + + @Override + public final KnnVectorsFormat knnVectorsFormat() { + return knnVectorsFormat; + } + + /** + * Returns the postings format that should be used for writing new segments of field. + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation, + */ + public PostingsFormat getPostingsFormatForField(String field) { + return defaultPostingsFormat; + } + + /** + * Returns the docvalues format that should be used for writing new segments of field + * . + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public DocValuesFormat getDocValuesFormatForField(String field) { + return defaultDVFormat; + } + + /** + * Returns the vectors format that should be used for writing new segments of field + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return defaultKnnVectorsFormat; + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java index bf2c5a9f01e29..9e4ecb1a46c17 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java @@ -13,7 +13,7 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.mapper.MapperService; @@ -22,11 +22,11 @@ * Legacy version of {@link PerFieldMapperCodec}. This codec is preserved to give an escape hatch in case we encounter issues with new * changes in {@link PerFieldMapperCodec}. */ -public final class LegacyPerFieldMapperCodec extends Lucene100Codec { +public final class LegacyPerFieldMapperCodec extends Lucene101Codec { private final PerFieldFormatSupplier formatSupplier; - public LegacyPerFieldMapperCodec(Lucene100Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + public LegacyPerFieldMapperCodec(Lucene101Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { super(compressionMode); this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java index 4d3d37ab4f3af..21721b68dbcac 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java @@ -60,18 +60,18 @@ private PostingsFormat internalGetPostingsFormatForField(String field) { if (mapperService != null) { Mapper mapper = mapperService.mappingLookup().getMapper(field); if (mapper instanceof CompletionFieldMapper) { - return PostingsFormatHolder.POSTINGS_FORMAT; + return CompletionPostingsFormatHolder.POSTINGS_FORMAT; } } // return our own posting format using PFOR return es812PostingsFormat; } - private static class PostingsFormatHolder { - private static final PostingsFormat POSTINGS_FORMAT = getPostingsFormat(); + private static class CompletionPostingsFormatHolder { + private static final PostingsFormat POSTINGS_FORMAT = getCompletionPostingsFormat(); - private static PostingsFormat getPostingsFormat() { - String defaultName = "Completion912"; // Caution: changing this name will result in exceptions if a field is created during a + private static PostingsFormat getCompletionPostingsFormat() { + String defaultName = "Completion101"; // Caution: changing this name will result in exceptions if a field is created during a // rolling upgrade and the new codec (specified by the name) is not available on all nodes in the cluster. String codecName = ExtensionLoader.loadSingleton(ServiceLoader.load(CompletionsPostingsFormatExtension.class)) .map(CompletionsPostingsFormatExtension::getFormatName) diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index b60b88da5949d..9a3055f96bba8 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -26,7 +26,7 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public final class PerFieldMapperCodec extends Elasticsearch900Codec { +public final class PerFieldMapperCodec extends Elasticsearch900Lucene101Codec { private final PerFieldFormatSupplier formatSupplier; diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java index b2b37ad834178..96c39ed356f90 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java @@ -126,7 +126,7 @@ protected static IntervalsSource combineSources(List sources, i if (maxGaps == 0 && ordered) { return Intervals.phrase(sourcesArray); } - IntervalsSource inner = ordered ? XIntervals.ordered(sourcesArray) : XIntervals.unordered(sourcesArray); + IntervalsSource inner = ordered ? Intervals.ordered(sourcesArray) : Intervals.unordered(sourcesArray); if (maxGaps == -1) { return inner; } diff --git a/server/src/main/java/org/elasticsearch/index/query/XIntervals.java b/server/src/main/java/org/elasticsearch/index/query/XIntervals.java deleted file mode 100644 index 7d8552e18f790..0000000000000 --- a/server/src/main/java/org/elasticsearch/index/query/XIntervals.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.index.query; - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.queries.intervals.IntervalIterator; -import org.apache.lucene.queries.intervals.IntervalMatchesIterator; -import org.apache.lucene.queries.intervals.Intervals; -import org.apache.lucene.queries.intervals.IntervalsSource; -import org.apache.lucene.search.QueryVisitor; - -import java.io.IOException; -import java.util.Collection; -import java.util.Objects; - -/** - * Copy of {@link Intervals} that exposes versions of {@link Intervals#ordered} and {@link Intervals#unordered} - * that preserve their inner gaps. - * NOTE: Remove this hack when a version of Lucene with https://github.com/apache/lucene/pull/13819 is used (10.1.0). - */ -public final class XIntervals { - - /** - * Create an ordered {@link IntervalsSource} - * - *

Returns intervals in which the subsources all appear in the given order - * - * @param subSources an ordered set of {@link IntervalsSource} objects - */ - public static IntervalsSource ordered(IntervalsSource... subSources) { - return new DelegateIntervalsSource(Intervals.ordered(subSources)); - } - - /** - * Create an ordered {@link IntervalsSource} - * - *

Returns intervals in which the subsources all appear in the given order - * - * @param subSources an ordered set of {@link IntervalsSource} objects - */ - public static IntervalsSource unordered(IntervalsSource... subSources) { - return new DelegateIntervalsSource(Intervals.unordered(subSources)); - } - - /** - * Wraps a source to avoid aggressive flattening of the ordered and unordered sources. - * The flattening modifies the final gap and is removed in the latest unreleased version of Lucene (10.1). - */ - private static class DelegateIntervalsSource extends IntervalsSource { - private final IntervalsSource delegate; - - private DelegateIntervalsSource(IntervalsSource delegate) { - this.delegate = delegate; - } - - @Override - public IntervalIterator intervals(String field, LeafReaderContext ctx) throws IOException { - return delegate.intervals(field, ctx); - } - - @Override - public IntervalMatchesIterator matches(String field, LeafReaderContext ctx, int doc) throws IOException { - return delegate.matches(field, ctx, doc); - } - - @Override - public void visit(String field, QueryVisitor visitor) { - delegate.visit(field, visitor); - } - - @Override - public int minExtent() { - return delegate.minExtent(); - } - - @Override - public Collection pullUpDisjunctions() { - return delegate.pullUpDisjunctions(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DelegateIntervalsSource that = (DelegateIntervalsSource) o; - return Objects.equals(delegate, that.delegate); - } - - @Override - public int hashCode() { - return Objects.hash(delegate); - } - - @Override - public String toString() { - return delegate.toString(); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java index 98dd7f9388c1f..8077da130c34e 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java +++ b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java @@ -17,10 +17,11 @@ public final class SearchFeatures implements FeatureSpecification { public static final NodeFeature LUCENE_10_0_0_UPGRADE = new NodeFeature("lucene_10_upgrade"); + public static final NodeFeature LUCENE_10_1_0_UPGRADE = new NodeFeature("lucene_10_1_upgrade"); @Override public Set getFeatures() { - return Set.of(LUCENE_10_0_0_UPGRADE); + return Set.of(LUCENE_10_0_0_UPGRADE, LUCENE_10_1_0_UPGRADE); } public static final NodeFeature RETRIEVER_RESCORER_ENABLED = new NodeFeature("search.retriever.rescorer.enabled"); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java index a9ec0ba878ec0..bf6fb39d43c4b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java @@ -462,7 +462,7 @@ private static class FilterMatchingDisiWrapper extends DisiWrapper { final int filterOrd; FilterMatchingDisiWrapper(Scorer scorer, int ord) { - super(scorer); + super(scorer, false); this.filterOrd = ord; } diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 33c8081971202..1fbdaea9c772a 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1,3 +1,4 @@ org.elasticsearch.index.codec.Elasticsearch814Codec org.elasticsearch.index.codec.Elasticsearch816Codec org.elasticsearch.index.codec.Elasticsearch900Codec +org.elasticsearch.index.codec.Elasticsearch900Lucene101Codec diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index bf4a28b9c60b2..d9edb5db52662 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -12,7 +12,7 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; @@ -55,12 +55,11 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; -import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion101PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionPostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; -import org.apache.lucene.store.IOContext; import org.apache.lucene.tests.geo.GeoTestUtil; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BitSetIterator; @@ -328,11 +327,11 @@ public void testTriangle() throws Exception { public void testCompletionField() throws Exception { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(false) - .setCodec(new Lucene100Codec(Lucene100Codec.Mode.BEST_SPEED) { + .setCodec(new Lucene101Codec(Lucene101Codec.Mode.BEST_SPEED) { @Override public PostingsFormat getPostingsFormatForField(String field) { if (field.startsWith("suggest_")) { - return new Completion912PostingsFormat(randomFrom(CompletionPostingsFormat.FSTLoadMode.values())); + return new Completion101PostingsFormat(randomFrom(CompletionPostingsFormat.FSTLoadMode.values())); } else { return super.postingsFormat(); } @@ -415,25 +414,25 @@ private static void addFieldsToDoc(Document doc, IndexableField[] fields) { enum CodecMode { BEST_SPEED { @Override - Lucene100Codec.Mode mode() { - return Lucene100Codec.Mode.BEST_SPEED; + Lucene101Codec.Mode mode() { + return Lucene101Codec.Mode.BEST_SPEED; } }, BEST_COMPRESSION { @Override - Lucene100Codec.Mode mode() { - return Lucene100Codec.Mode.BEST_COMPRESSION; + Lucene101Codec.Mode mode() { + return Lucene101Codec.Mode.BEST_COMPRESSION; } }; - abstract Lucene100Codec.Mode mode(); + abstract Lucene101Codec.Mode mode(); } static void indexRandomly(Directory directory, CodecMode codecMode, int numDocs, Consumer addFields) throws IOException { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene100Codec(codecMode.mode())); + .setCodec(new Lucene101Codec(codecMode.mode())); try (IndexWriter writer = new IndexWriter(directory, config)) { for (int i = 0; i < numDocs; i++) { final Document doc = new Document(); @@ -641,7 +640,7 @@ static void rewriteIndexWithPerFieldCodec(Directory source, CodecMode mode, Dire try (DirectoryReader reader = DirectoryReader.open(source)) { IndexWriterConfig config = new IndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene100Codec(mode.mode()) { + .setCodec(new Lucene101Codec(mode.mode()) { @Override public PostingsFormat getPostingsFormatForField(String field) { return new ES812PostingsFormat(); @@ -688,7 +687,7 @@ static void collectPerFieldStats(SegmentReader reader, IndexDiskUsageStats stats final String[] files; final Directory directory; if (sis.getUseCompoundFile()) { - directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis, IOContext.DEFAULT); + directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis); files = directory.listAll(); } else { directory = reader.directory(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 6b1ffc3693636..9c9492df24cf3 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -50,7 +50,7 @@ public void testResolveDefaultCodecs() throws Exception { assumeTrue("Only when zstd_stored_fields feature flag is enabled", CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()); CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Elasticsearch900Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Elasticsearch900Lucene101Codec.class)); } public void testDefault() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java index 57cca6eea86ec..69d2cc21a6a2d 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -24,7 +24,7 @@ public class ES813FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase { @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java index 9069b094ee483..3f750ab5d7cbc 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -24,7 +24,7 @@ public class ES813Int8FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813Int8FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java index 549a14ca6c31b..eaf59b7028b80 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.KnnFloatVectorField; @@ -42,7 +42,7 @@ public class ES814HnswScalarQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES814HnswScalarQuantizedVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java index 034d428b25209..a4c3697726cb2 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -19,7 +19,7 @@ public class ES815BitFlatVectorFormatTests extends BaseKnnBitVectorsFormatTestCa @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815BitFlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java index 4af6a405c7705..b5f56b6b42b7c 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -19,7 +19,7 @@ public class ES815HnswBitVectorsFormatTests extends BaseKnnBitVectorsFormatTestC @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815HnswBitVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsFormatTests.java index 48ba566353f5d..e11775e2cdedb 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.index.DirectoryReader; @@ -60,7 +60,7 @@ public class ES816BinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFormat @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES816BinaryQuantizedRWVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedVectorsFormatTests.java index 03aa847f3a5d4..5c78aa5367f23 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsReader; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; @@ -56,7 +56,7 @@ public class ES816HnswBinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES816HnswBinaryQuantizedRWVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormatTests.java index 397cc472592b6..6b8b64b235252 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818BinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.index.DirectoryReader; @@ -60,7 +60,7 @@ public class ES818BinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFormat @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES818BinaryQuantizedVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818HnswBinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818HnswBinaryQuantizedVectorsFormatTests.java index 09304b3ba4c91..c0f66adda4b94 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818HnswBinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es818/ES818HnswBinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsReader; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; @@ -56,7 +56,7 @@ public class ES818HnswBinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene100Codec() { + return new Lucene101Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES818HnswBinaryQuantizedVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java index 437ba1cecc11d..0e5732ec09e5b 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.index.codec.zstd; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; @@ -35,13 +35,13 @@ public class StoredFieldCodecDuelTests extends ESTestCase { private static final String DOUBLE_FIELD = "double_field_5"; public void testDuelBestSpeed() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene101Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } public void testDuelBestCompression() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene101Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java index 77a7585e3b518..b6fefcb9a4e98 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java @@ -11,11 +11,11 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch900Codec; +import org.elasticsearch.index.codec.Elasticsearch900Lucene101Codec; public class Zstd814BestCompressionStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); + private final Codec codec = new Elasticsearch900Lucene101Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java index 3d6cfea70d121..98318707f6c4b 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java @@ -11,11 +11,11 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch900Codec; +import org.elasticsearch.index.codec.Elasticsearch900Lucene101Codec; public class Zstd814BestSpeedStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + private final Codec codec = new Elasticsearch900Lucene101Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java index 6d205a22433b4..1343078906d6f 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java @@ -9,12 +9,12 @@ package org.elasticsearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene101.Lucene101Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion101PostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; import org.elasticsearch.ElasticsearchException; @@ -44,8 +44,8 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); - final PostingsFormat postingsFormat = new Completion912PostingsFormat(); - indexWriterConfig.setCodec(new Lucene100Codec() { + final PostingsFormat postingsFormat = new Completion101PostingsFormat(); + indexWriterConfig.setCodec(new Lucene101Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index c9fe314056331..755d5dde2f162 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -16,7 +16,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; -import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion101PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionAnalyzer; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; @@ -148,21 +148,19 @@ protected IndexAnalyzers createIndexAnalyzers(IndexSettings indexSettings) { } public void testPostingsFormat() throws IOException { + final Class latestLuceneCPClass = Completion101PostingsFormat.class; MapperService mapperService = createMapperService(fieldMapping(this::minimalMapping)); CodecService codecService = new CodecService(mapperService, BigArrays.NON_RECYCLING_INSTANCE); Codec codec = codecService.codec("default"); if (CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { assertThat(codec, instanceOf(PerFieldMapperCodec.class)); - assertThat(((PerFieldMapperCodec) codec).getPostingsFormatForField("field"), instanceOf(Completion912PostingsFormat.class)); + assertThat(((PerFieldMapperCodec) codec).getPostingsFormatForField("field"), instanceOf(latestLuceneCPClass)); } else { if (codec instanceof CodecService.DeduplicateFieldInfosCodec deduplicateFieldInfosCodec) { codec = deduplicateFieldInfosCodec.delegate(); } assertThat(codec, instanceOf(LegacyPerFieldMapperCodec.class)); - assertThat( - ((LegacyPerFieldMapperCodec) codec).getPostingsFormatForField("field"), - instanceOf(Completion912PostingsFormat.class) - ); + assertThat(((LegacyPerFieldMapperCodec) codec).getPostingsFormatForField("field"), instanceOf(latestLuceneCPClass)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java index 5da7d6100bf4b..75c984d6f4305 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java @@ -129,7 +129,6 @@ protected Settings getIndexSettings() { var resultFromParserContext = parserContext.createDynamicMapperBuilderContext(); assertEquals("foo.hey", resultFromParserContext.buildFullName("hey")); - assertTrue(resultFromParserContext.isSourceSynthetic()); assertTrue(resultFromParserContext.isDataStream()); assertTrue(resultFromParserContext.parentObjectContainsDimensions()); assertEquals(ObjectMapper.Defaults.DYNAMIC, resultFromParserContext.getDynamic()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java index 8e0cd97e518fa..b2379ba579204 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java @@ -102,10 +102,6 @@ public void testDefaults() throws Exception { List fields = doc1.rootDoc().getFields("field"); assertEquals(2, fields.size()); - if (IndexVersion.current().luceneVersion().major == 10) { - // TODO: Update to use Lucene's FeatureField after upgrading to Lucene 10.1. - assertThat(IndexVersion.current().luceneVersion().minor, equalTo(0)); - } assertThat(fields.get(0), Matchers.instanceOf(XFeatureField.class)); XFeatureField featureField1 = null; XFeatureField featureField2 = null; diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java index 7005f17663d0d..3476655c705ae 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalBuilderTests.java @@ -46,7 +46,7 @@ public void testOrdered() throws IOException { CannedTokenStream ts = new CannedTokenStream(new Token("term1", 1, 2), new Token("term2", 3, 4), new Token("term3", 5, 6)); IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); - IntervalsSource expected = XIntervals.ordered(Intervals.term("term1"), Intervals.term("term2"), Intervals.term("term3")); + IntervalsSource expected = Intervals.ordered(Intervals.term("term1"), Intervals.term("term2"), Intervals.term("term3")); assertEquals(expected, source); @@ -57,7 +57,7 @@ public void testUnordered() throws IOException { CannedTokenStream ts = new CannedTokenStream(new Token("term1", 1, 2), new Token("term2", 3, 4), new Token("term3", 5, 6)); IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, false); - IntervalsSource expected = XIntervals.unordered(Intervals.term("term1"), Intervals.term("term2"), Intervals.term("term3")); + IntervalsSource expected = Intervals.unordered(Intervals.term("term1"), Intervals.term("term2"), Intervals.term("term3")); assertEquals(expected, source); @@ -101,7 +101,7 @@ public void testSimpleSynonyms() throws IOException { ); IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); - IntervalsSource expected = XIntervals.ordered( + IntervalsSource expected = Intervals.ordered( Intervals.term("term1"), Intervals.or(Intervals.term("term2"), Intervals.term("term4")), Intervals.term("term3") @@ -122,7 +122,7 @@ public void testSimpleSynonymsWithGap() throws IOException { ); IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); - IntervalsSource expected = XIntervals.ordered( + IntervalsSource expected = Intervals.ordered( Intervals.term("term1"), Intervals.extend(Intervals.or(Intervals.term("term2"), Intervals.term("term3"), Intervals.term("term4")), 1, 0), Intervals.term("term5") @@ -143,7 +143,7 @@ public void testGraphSynonyms() throws IOException { ); IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); - IntervalsSource expected = XIntervals.ordered( + IntervalsSource expected = Intervals.ordered( Intervals.term("term1"), Intervals.or(Intervals.term("term2"), Intervals.phrase("term3", "term4")), Intervals.term("term5") @@ -166,7 +166,7 @@ public void testGraphSynonymsWithGaps() throws IOException { ); IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); - IntervalsSource expected = XIntervals.ordered( + IntervalsSource expected = Intervals.ordered( Intervals.term("term1"), Intervals.or( Intervals.extend(Intervals.term("term2"), 1, 0), @@ -190,7 +190,7 @@ public void testGraphTerminatesOnGap() throws IOException { ); IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, true); - IntervalsSource expected = XIntervals.ordered( + IntervalsSource expected = Intervals.ordered( Intervals.term("term1"), Intervals.or(Intervals.term("term2"), Intervals.phrase("term3", "term4")), Intervals.extend(Intervals.term("term5"), 1, 0) diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index f0084f4f24e98..aad8275f4749d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -203,7 +203,7 @@ public void testMatchInterval() throws IOException { }""", TEXT_FIELD_NAME); IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); - Query expected = new IntervalQuery(TEXT_FIELD_NAME, XIntervals.unordered(Intervals.term("hello"), Intervals.term("world"))); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.unordered(Intervals.term("hello"), Intervals.term("world"))); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); @@ -222,7 +222,7 @@ public void testMatchInterval() throws IOException { builder = (IntervalQueryBuilder) parseQuery(json); expected = new IntervalQuery( TEXT_FIELD_NAME, - Intervals.maxgaps(40, XIntervals.unordered(Intervals.term("hello"), Intervals.term("world"))) + Intervals.maxgaps(40, Intervals.unordered(Intervals.term("hello"), Intervals.term("world"))) ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); @@ -241,7 +241,7 @@ public void testMatchInterval() throws IOException { builder = (IntervalQueryBuilder) parseQuery(json); expected = new BoostQuery( - new IntervalQuery(TEXT_FIELD_NAME, XIntervals.ordered(Intervals.term("hello"), Intervals.term("world"))), + new IntervalQuery(TEXT_FIELD_NAME, Intervals.ordered(Intervals.term("hello"), Intervals.term("world"))), 2 ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); @@ -263,7 +263,7 @@ public void testMatchInterval() throws IOException { builder = (IntervalQueryBuilder) parseQuery(json); expected = new IntervalQuery( TEXT_FIELD_NAME, - Intervals.maxgaps(10, XIntervals.ordered(Intervals.term("Hello"), Intervals.term("world"))) + Intervals.maxgaps(10, Intervals.ordered(Intervals.term("Hello"), Intervals.term("world"))) ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); @@ -285,7 +285,7 @@ public void testMatchInterval() throws IOException { builder = (IntervalQueryBuilder) parseQuery(json); expected = new IntervalQuery( TEXT_FIELD_NAME, - Intervals.fixField(MASKED_FIELD, Intervals.maxgaps(10, XIntervals.ordered(Intervals.term("Hello"), Intervals.term("world")))) + Intervals.fixField(MASKED_FIELD, Intervals.maxgaps(10, Intervals.ordered(Intervals.term("Hello"), Intervals.term("world")))) ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); @@ -314,7 +314,7 @@ public void testMatchInterval() throws IOException { expected = new IntervalQuery( TEXT_FIELD_NAME, Intervals.containing( - Intervals.maxgaps(10, XIntervals.ordered(Intervals.term("Hello"), Intervals.term("world"))), + Intervals.maxgaps(10, Intervals.ordered(Intervals.term("Hello"), Intervals.term("world"))), Intervals.term("blah") ) ); @@ -426,7 +426,7 @@ public void testCombineInterval() throws IOException { Intervals.containedBy( Intervals.maxgaps( 30, - XIntervals.ordered(Intervals.term("one"), XIntervals.unordered(Intervals.term("two"), Intervals.term("three"))) + Intervals.ordered(Intervals.term("one"), Intervals.unordered(Intervals.term("two"), Intervals.term("three"))) ), Intervals.term("SENTENCE") ) @@ -486,7 +486,7 @@ public void testCombineDisjunctionInterval() throws IOException { Intervals.notContainedBy( Intervals.maxgaps( 30, - XIntervals.ordered(Intervals.term("atmosphere"), Intervals.or(Intervals.term("cold"), Intervals.term("outside"))) + Intervals.ordered(Intervals.term("atmosphere"), Intervals.or(Intervals.term("cold"), Intervals.term("outside"))) ), Intervals.term("freeze") ) diff --git a/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java b/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java index 0abf34d800dca..d28bb98547cec 100644 --- a/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java @@ -86,14 +86,14 @@ public void testSingleKnnSearch() throws IOException { List queryProfileShardResult = searchProfileDfsPhaseResult.getQueryProfileShardResult(); assertNotNull(queryProfileShardResult); CollectorResult collectorResult = queryProfileShardResult.get(0).getCollectorResult(); - assertEquals("SimpleTopScoreDocCollector", (collectorResult.getName())); + assertEquals("TopScoreDocCollector", (collectorResult.getName())); assertEquals("search_top_hits", (collectorResult.getReason())); assertTrue(collectorResult.getTime() > 0); List children = collectorResult.getChildrenResults(); if (children.size() > 0) { long totalTime = 0L; for (CollectorResult child : children) { - assertEquals("SimpleTopScoreDocCollector", (child.getName())); + assertEquals("TopScoreDocCollector", (child.getName())); assertEquals("search_top_hits", (child.getReason())); totalTime += child.getTime(); } diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java index b728d40900570..aa34968813229 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java @@ -135,7 +135,7 @@ public void testManagerWithSearcher() throws IOException { assertEquals(numDocs, topDocs.totalHits.value()); CollectorResult result = profileCollectorManager.getCollectorTree(); assertEquals("profiler_reason", result.getReason()); - assertEquals("SimpleTopScoreDocCollector", result.getName()); + assertEquals("TopScoreDocCollector", result.getName()); assertTrue(result.getTime() > 0); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java index 421a306babf29..c76af6b0cfa09 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java @@ -216,9 +216,7 @@ private SegmentCommitInfo syncSegment( Codec codec = si.getCodec(); Directory sourceDir = si.dir; if (si.getUseCompoundFile()) { - sourceDir = new LinkedFilesDirectory.CloseMePleaseWrapper( - codec.compoundFormat().getCompoundReader(sourceDir, si, IOContext.DEFAULT) - ); + sourceDir = new LinkedFilesDirectory.CloseMePleaseWrapper(codec.compoundFormat().getCompoundReader(sourceDir, si)); toClose = sourceDir; } final String segmentSuffix = ""; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractor.java index 08c141c0858ca..26d5125c94c32 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/QueryFeatureExtractor.java @@ -51,7 +51,7 @@ public void setNextReader(LeafReaderContext segmentContext) throws IOException { } Scorer scorer = weight.scorer(segmentContext); if (scorer != null) { - disiPriorityQueue.add(new DisiWrapper(scorer)); + disiPriorityQueue.add(new DisiWrapper(scorer, false)); } scorers.add(scorer); } diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index 0b31e96ece84a..b7a9b8af057e0 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -570,11 +570,11 @@ public void testRegexAcceleration() throws IOException, ParseException { { "(maynotexist)?foobar", "+eoo +ooa +oaa +aaq +aq_ +q__" }, { ".*/etc/passw.*", "+\\/es +esc +sc\\/ +c\\/o +\\/oa +oas +ass +ssw" }, { ".*etc/passwd", " +esc +sc\\/ +c\\/o +\\/oa +oas +ass +ssw +swc +wc_ +c__" }, - { "(http|ftp)://foo.*", "+((+gss +sso) eso) +(+\\/\\/\\/ +\\/\\/e +\\/eo +eoo)" }, + { "(http|ftp)://foo.*", "+\\/\\/\\/ +\\/\\/e +\\/eo +eoo +((+gss +sso) eso)" }, { "[Pp][Oo][Ww][Ee][Rr][Ss][Hh][Ee][Ll][Ll]\\.[Ee][Xx][Ee]", "+_oo +oow +owe +weq +eqs +qsg +sge +gek +ekk +kk\\/ +k\\/e +\\/ew +ewe +we_ +e__" }, - { "foo<1-100>bar", "+(+_eo +eoo) +(+aaq +aq_ +q__)" }, + { "foo<1-100>bar", "+_eo +eoo +aaq +aq_ +q__" }, { "(aaa.+&.+bbb)cat", "+cas +as_ +s__" }, { ".a", "a__" } }; for (String[] test : acceleratedTests) { From d98ee4e488b97d63ec80642512c7af1deac3ad20 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 00:48:53 +1100 Subject: [PATCH 273/383] Mute org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_357} #121287 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index ba8860e7bdd83..d879ef618c268 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -354,6 +354,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/121273 - class: org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySessionFactoryTests issue: https://github.com/elastic/elasticsearch/issues/121285 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_357} + issue: https://github.com/elastic/elasticsearch/issues/121287 # Examples: # From d4c2f8883239b2ac5ddd69bd3cd2b2f4bdb901d1 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 00:51:42 +1100 Subject: [PATCH 274/383] Mute org.elasticsearch.test.rest.yaml.RcsCcsCommonYamlTestSuiteIT test {p0=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only} #115475 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index d879ef618c268..a55dbe77418f5 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -357,6 +357,9 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_357} issue: https://github.com/elastic/elasticsearch/issues/121287 +- class: org.elasticsearch.test.rest.yaml.RcsCcsCommonYamlTestSuiteIT + method: test {p0=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only} + issue: https://github.com/elastic/elasticsearch/issues/115475 # Examples: # From 2e2fe7d5f94d302e689782df2274696c5c3c8624 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 00:54:57 +1100 Subject: [PATCH 275/383] Mute org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT test {yaml=reference/index-modules/slowlog/line_102} #121288 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index a55dbe77418f5..b84fa6f5c98a1 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -360,6 +360,9 @@ tests: - class: org.elasticsearch.test.rest.yaml.RcsCcsCommonYamlTestSuiteIT method: test {p0=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only} issue: https://github.com/elastic/elasticsearch/issues/115475 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/index-modules/slowlog/line_102} + issue: https://github.com/elastic/elasticsearch/issues/121288 # Examples: # From fb3c6666632cb3c92b98ad4ce5b96be79391b76a Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 30 Jan 2025 14:01:55 +0000 Subject: [PATCH 276/383] Remove outdated reference to internal semantic text format (#121276) The semantic text format was updated in #119183. This commit removes the last remaining reference to the old format from the documentation to ensure consistency. --- .../semantic-text-hybrid-search | 44 +++++++------------ 1 file changed, 15 insertions(+), 29 deletions(-) diff --git a/docs/reference/search/search-your-data/semantic-text-hybrid-search b/docs/reference/search/search-your-data/semantic-text-hybrid-search index 4b49a7c3155db..0dfe97628faf5 100644 --- a/docs/reference/search/search-your-data/semantic-text-hybrid-search +++ b/docs/reference/search/search-your-data/semantic-text-hybrid-search @@ -113,6 +113,7 @@ POST _tasks//_cancel ==== Perform hybrid search After reindexing the data into the `semantic-embeddings` index, you can perform hybrid search by using <>. RRF is a technique that merges the rankings from both semantic and lexical queries, giving more weight to results that rank high in either search. This ensures that the final results are balanced and relevant. +To extract the most relevant fragments from the original text and query, you can use the <>: [source,console] ------------------------------------------------------------ @@ -142,6 +143,13 @@ GET semantic-embeddings/_search } ] } + }, + "highlight": { + "fields": { + "semantic_text": { + "number_of_fragments": 2 <5> + } + } } } ------------------------------------------------------------ @@ -150,7 +158,7 @@ GET semantic-embeddings/_search <2> Lexical search is performed on the `content` field using the specified phrase. <3> The second `standard` retriever refers to the semantic search. <4> The `semantic_text` field is used to perform the semantic search. - +<5> Specifies the maximum number of fragments to return. See <> for a more complete example. After performing the hybrid search, the query will return the top 10 documents that match both semantic and lexical search criteria. The results include detailed information about each document: @@ -178,36 +186,14 @@ After performing the hybrid search, the query will return the top 10 documents t "_score": 0.032786883, "_rank": 1, "_source": { - "semantic_text": { - "inference": { - "inference_id": "my-elser-endpoint", - "model_settings": { - "task_type": "sparse_embedding" - }, - "chunks": [ - { - "text": "What so many out there do not realize is the importance of what you do after you work out. You may have done the majority of the work, but how you treat your body in the minutes and hours after you exercise has a direct effect on muscle soreness, muscle strength and growth, and staying hydrated. Cool Down. After your last exercise, your workout is not over. The first thing you need to do is cool down. Even if running was all that you did, you still should do light cardio for a few minutes. This brings your heart rate down at a slow and steady pace, which helps you avoid feeling sick after a workout.", - "embeddings": { - "exercise": 1.571044, - "after": 1.3603843, - "sick": 1.3281639, - "cool": 1.3227621, - "muscle": 1.2645415, - "sore": 1.2561599, - "cooling": 1.2335974, - "running": 1.1750668, - "hours": 1.1104802, - "out": 1.0991782, - "##io": 1.0794281, - "last": 1.0474665, - (...) - } - } - ] - } - }, "id": 8408852, "content": "What so many out there do not realize is the importance of (...)" + }, + "highlight" : { + "semantic_text" : [ + "... fragment_1 ...", + "... fragment_2 ..." + ] } } ] From 07b4461538a7f67b5511bb85d775729bb360f70d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 01:05:21 +1100 Subject: [PATCH 277/383] Mute org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT test {yaml=indices.get_alias/10_basic/Get aliases via /*/_alias/} #121290 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b84fa6f5c98a1..2e4df7c8c0556 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -363,6 +363,9 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/index-modules/slowlog/line_102} issue: https://github.com/elastic/elasticsearch/issues/121288 +- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT + method: test {yaml=indices.get_alias/10_basic/Get aliases via /*/_alias/} + issue: https://github.com/elastic/elasticsearch/issues/121290 # Examples: # From e84bc5d8d9eedbd3b3bd95b5d9724172a9a0c316 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 30 Jan 2025 09:14:24 -0500 Subject: [PATCH 278/383] Add 8.18 back to branches.json --- branches.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/branches.json b/branches.json index 95fbdb1efd655..01e79708d4b60 100644 --- a/branches.json +++ b/branches.json @@ -7,6 +7,9 @@ { "branch": "8.16" }, + { + "branch": "8.18" + }, { "branch": "8.17" }, From d920fee680fdbc581bfc632bb5a68dbfdfdd6541 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 01:16:45 +1100 Subject: [PATCH 279/383] Mute org.elasticsearch.xpack.inference.action.TransportInferenceActionTests testRerouting_HandlesTransportException_FromOtherNode #121292 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 2e4df7c8c0556..9f38374cef8d5 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -366,6 +366,9 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=indices.get_alias/10_basic/Get aliases via /*/_alias/} issue: https://github.com/elastic/elasticsearch/issues/121290 +- class: org.elasticsearch.xpack.inference.action.TransportInferenceActionTests + method: testRerouting_HandlesTransportException_FromOtherNode + issue: https://github.com/elastic/elasticsearch/issues/121292 # Examples: # From 2c83d8d90eef5edb549e1c638c0280933b00ff05 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 01:16:56 +1100 Subject: [PATCH 280/383] Mute org.elasticsearch.xpack.inference.action.TransportInferenceActionTests testRerouting_ToOtherNode #121293 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 9f38374cef8d5..6f9e1ecb1c68b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -369,6 +369,9 @@ tests: - class: org.elasticsearch.xpack.inference.action.TransportInferenceActionTests method: testRerouting_HandlesTransportException_FromOtherNode issue: https://github.com/elastic/elasticsearch/issues/121292 +- class: org.elasticsearch.xpack.inference.action.TransportInferenceActionTests + method: testRerouting_ToOtherNode + issue: https://github.com/elastic/elasticsearch/issues/121293 # Examples: # From 766cbb23eb7bf3f4214bcb01a1f55ec930c06b09 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 01:17:04 +1100 Subject: [PATCH 281/383] Mute org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculatorTests org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculatorTests #121294 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 6f9e1ecb1c68b..6093166a0d061 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -372,6 +372,8 @@ tests: - class: org.elasticsearch.xpack.inference.action.TransportInferenceActionTests method: testRerouting_ToOtherNode issue: https://github.com/elastic/elasticsearch/issues/121293 +- class: org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculatorTests + issue: https://github.com/elastic/elasticsearch/issues/121294 # Examples: # From a9522a0cbea7779f2914c5da36d83bd78c9ad31a Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 30 Jan 2025 09:36:03 -0500 Subject: [PATCH 282/383] Add 9.0 back to branches.json --- branches.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/branches.json b/branches.json index 01e79708d4b60..81d5a46991445 100644 --- a/branches.json +++ b/branches.json @@ -7,6 +7,9 @@ { "branch": "8.16" }, + { + "branch": "9.0" + }, { "branch": "8.18" }, From 329651a84cfd22520b550432ca43d20b30d24c28 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Thu, 30 Jan 2025 16:48:40 +0200 Subject: [PATCH 283/383] Skip tests in rest-compat that fail due to versioning issues (#121282) --- muted-tests.yml | 2 -- rest-api-spec/build.gradle | 12 ++++++++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 6093166a0d061..4355226ffadc7 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -341,8 +341,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/121253 - class: org.elasticsearch.lucene.FullClusterRestartLuceneIndexCompatibilityIT issue: https://github.com/elastic/elasticsearch/issues/121257 -- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT - issue: https://github.com/elastic/elasticsearch/issues/121269 - class: org.elasticsearch.upgrades.VectorSearchIT method: testBBQVectorSearch {upgradedNodes=1} issue: https://github.com/elastic/elasticsearch/issues/121271 diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 2452265e336a9..c8861ecaea4b2 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -100,4 +100,16 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("index/91_metrics_no_subobjects/Metrics object indexing with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("index/91_metrics_no_subobjects/Root without subobjects with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("logsdb/10_settings/routing path allowed in logs mode with routing on sort fields", "Unknown feature routing.logsb_route_on_sort_fields") + + task.skipTest("search/520_fetch_fields/fetch _seq_no via fields", "Skip until versioning between 8.x and 9.x is restored") + task.skipTest("search/90_search_after/_shard_doc sort", "Skip until versioning between 8.x and 9.x is restored") + task.skipTest("tsdb/90_unsupported_operations/search with routing", "Skip until versioning between 8.x and 9.x is restored") + task.skipTest("tsdb/90_unsupported_operations/noop update", "Skip until versioning between 8.x and 9.x is restored") + task.skipTest("tsdb/80_index_resize/split", "Skip until versioning between 8.x and 9.x is restored") + task.skipTest("tsdb/90_unsupported_operations/index with routing over _bulk", "Skip until versioning between 8.x and 9.x is restored") + task.skipTest("tsdb/90_unsupported_operations/index with routing", "Skip until versioning between 8.x and 9.x is restored") + task.skipTest("tsdb/90_unsupported_operations/update over _bulk", "Skip until versioning between 8.x and 9.x is restored") + task.skipTest("tsdb/90_unsupported_operations/regular update", "Skip until versioning between 8.x and 9.x is restored") + task.skipTest("tsdb/25_id_generation/delete over _bulk", "Skip until versioning between 8.x and 9.x is restored") + task.skipTest("tsdb/20_mapping/exact match object type", "Skip until versioning between 8.x and 9.x is restored") }) From 45c191ec554c76b37e1d37adf386f8b8e7d5b443 Mon Sep 17 00:00:00 2001 From: Patrick Doyle <810052+prdoyle@users.noreply.github.com> Date: Thu, 30 Jan 2025 10:00:12 -0500 Subject: [PATCH 284/383] Refactor: separate package for entitlement records (#121204) --- libs/entitlement/src/main/java/module-info.java | 1 + .../initialization/EntitlementInitialization.java | 12 ++++++------ .../runtime/policy/ExternalEntitlement.java | 2 ++ .../entitlement/runtime/policy/FileAccessTree.java | 11 ++++++++--- .../entitlement/runtime/policy/PolicyManager.java | 11 ++++++++++- .../entitlement/runtime/policy/PolicyParser.java | 8 ++++++++ .../runtime/policy/PolicyValidationException.java | 6 +++--- .../entitlement/runtime/policy/Scope.java | 2 ++ .../CreateClassLoaderEntitlement.java | 4 +++- .../policy/{ => entitlements}/Entitlement.java | 4 +++- .../{ => entitlements}/ExitVMEntitlement.java | 2 +- .../policy/{ => entitlements}/FileEntitlement.java | 5 ++++- .../InboundNetworkEntitlement.java | 4 +++- .../LoadNativeLibrariesEntitlement.java | 4 +++- .../OutboundNetworkEntitlement.java | 4 +++- .../SetHttpsConnectionPropertiesEntitlement.java | 4 +++- .../WriteAllSystemPropertiesEntitlement.java | 4 +++- .../WriteSystemPropertiesEntitlement.java | 4 +++- .../runtime/policy/FileAccessTreeTests.java | 13 +++++++------ .../runtime/policy/PolicyManagerTests.java | 3 +++ .../runtime/policy/PolicyParserTests.java | 8 ++++++++ .../org/elasticsearch/bootstrap/Elasticsearch.java | 2 +- .../elasticsearch/bootstrap/ElasticsearchTests.java | 6 +++--- 23 files changed, 91 insertions(+), 33 deletions(-) rename libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/{ => entitlements}/CreateClassLoaderEntitlement.java (81%) rename libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/{ => entitlements}/Entitlement.java (83%) rename libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/{ => entitlements}/ExitVMEntitlement.java (90%) rename libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/{ => entitlements}/FileEntitlement.java (86%) rename libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/{ => entitlements}/InboundNetworkEntitlement.java (83%) rename libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/{ => entitlements}/LoadNativeLibrariesEntitlement.java (83%) rename libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/{ => entitlements}/OutboundNetworkEntitlement.java (83%) rename libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/{ => entitlements}/SetHttpsConnectionPropertiesEntitlement.java (84%) rename libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/{ => entitlements}/WriteAllSystemPropertiesEntitlement.java (83%) rename libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/{ => entitlements}/WriteSystemPropertiesEntitlement.java (86%) diff --git a/libs/entitlement/src/main/java/module-info.java b/libs/entitlement/src/main/java/module-info.java index c0959f212558a..5c8441bcecb9c 100644 --- a/libs/entitlement/src/main/java/module-info.java +++ b/libs/entitlement/src/main/java/module-info.java @@ -19,6 +19,7 @@ exports org.elasticsearch.entitlement.runtime.api; exports org.elasticsearch.entitlement.runtime.policy; + exports org.elasticsearch.entitlement.runtime.policy.entitlements to org.elasticsearch.server; exports org.elasticsearch.entitlement.instrumentation; exports org.elasticsearch.entitlement.bootstrap to org.elasticsearch.server; exports org.elasticsearch.entitlement.initialization to java.base; diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 5b80afa521e57..cb3bc5c78eefb 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -18,15 +18,15 @@ import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.entitlement.instrumentation.Transformer; import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; -import org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement; -import org.elasticsearch.entitlement.runtime.policy.Entitlement; -import org.elasticsearch.entitlement.runtime.policy.ExitVMEntitlement; -import org.elasticsearch.entitlement.runtime.policy.InboundNetworkEntitlement; -import org.elasticsearch.entitlement.runtime.policy.LoadNativeLibrariesEntitlement; -import org.elasticsearch.entitlement.runtime.policy.OutboundNetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.entitlement.runtime.policy.PolicyManager; import org.elasticsearch.entitlement.runtime.policy.Scope; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ExitVMEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; import java.lang.instrument.Instrumentation; import java.lang.reflect.Constructor; diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java index 768babdb840f5..b58e0d2fb87e7 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java @@ -9,6 +9,8 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java index 55813df28b6f8..c16f776176d88 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java @@ -10,6 +10,7 @@ package org.elasticsearch.entitlement.runtime.policy; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FileEntitlement; import java.io.File; import java.nio.file.Path; @@ -18,13 +19,13 @@ import java.util.List; import java.util.Objects; -final class FileAccessTree { - static final FileAccessTree EMPTY = new FileAccessTree(List.of()); +public final class FileAccessTree { + public static final FileAccessTree EMPTY = new FileAccessTree(List.of()); private final String[] readPaths; private final String[] writePaths; - FileAccessTree(List fileEntitlements) { + private FileAccessTree(List fileEntitlements) { List readPaths = new ArrayList<>(); List writePaths = new ArrayList<>(); for (FileEntitlement fileEntitlement : fileEntitlements) { @@ -42,6 +43,10 @@ final class FileAccessTree { this.writePaths = writePaths.toArray(new String[0]); } + public static FileAccessTree of(List fileEntitlements) { + return new FileAccessTree(fileEntitlements); + } + boolean canRead(Path path) { return checkPath(normalize(path), readPaths); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index b08db25a2430b..2243d94911ca4 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -13,6 +13,15 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.runtime.api.NotEntitledException; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ExitVMEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FileEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -55,7 +64,7 @@ public static ModuleEntitlements from(List entitlements) { .toList(); return new ModuleEntitlements( entitlements.stream().collect(groupingBy(Entitlement::getClass)), - new FileAccessTree(fileEntitlements) + FileAccessTree.of(fileEntitlements) ); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java index 95437027239b0..992728b68186e 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java @@ -9,6 +9,14 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FileEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement; import org.elasticsearch.xcontent.XContentLocation; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java index a2bc49d99b44f..5f21db011884d 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java @@ -15,13 +15,13 @@ * parser is able to wrap this exception with a line/character number for * additional useful error information. */ -class PolicyValidationException extends RuntimeException { +public class PolicyValidationException extends RuntimeException { - PolicyValidationException(String message) { + public PolicyValidationException(String message) { super(message); } - PolicyValidationException(String message, Throwable cause) { + public PolicyValidationException(String message, Throwable cause) { super(message, cause); } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java index 55e257797d603..6342a155da940 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Scope.java @@ -9,6 +9,8 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; + import java.util.List; import java.util.Objects; diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/CreateClassLoaderEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/CreateClassLoaderEntitlement.java similarity index 81% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/CreateClassLoaderEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/CreateClassLoaderEntitlement.java index 55e4b66595642..4b7137f8c7cd6 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/CreateClassLoaderEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/CreateClassLoaderEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; public record CreateClassLoaderEntitlement() implements Entitlement { @ExternalEntitlement diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/Entitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/Entitlement.java index 5b53c399cc1b7..996b8a19ac8b0 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/Entitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/Entitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.Policy; /** * Marker interface to ensure that only {@link Entitlement} are diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExitVMEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ExitVMEntitlement.java similarity index 90% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExitVMEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ExitVMEntitlement.java index e5c836ea22b20..470277c482461 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExitVMEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/ExitVMEntitlement.java @@ -7,7 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; /** * Internal policy type (not-parseable -- not available to plugins). diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FileEntitlement.java similarity index 86% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FileEntitlement.java index 4bd1dc10c85bb..f3a0ee1758a04 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FileEntitlement.java @@ -7,7 +7,10 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; +import org.elasticsearch.entitlement.runtime.policy.PolicyValidationException; import java.nio.file.Paths; diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/InboundNetworkEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/InboundNetworkEntitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/InboundNetworkEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/InboundNetworkEntitlement.java index 482d4e5100c0b..7c00a53cc16cb 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/InboundNetworkEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/InboundNetworkEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * Describes an entitlement for inbound network actions (listen/accept/receive) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/LoadNativeLibrariesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/LoadNativeLibrariesEntitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/LoadNativeLibrariesEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/LoadNativeLibrariesEntitlement.java index 9a840c4e3e32e..b297685876925 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/LoadNativeLibrariesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/LoadNativeLibrariesEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * An Entitlement to allow loading native libraries diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/OutboundNetworkEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/OutboundNetworkEntitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/OutboundNetworkEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/OutboundNetworkEntitlement.java index 50d9a47f580e5..dbdd6840f2ebe 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/OutboundNetworkEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/OutboundNetworkEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * Describes an entitlement for outbound network actions (connect/send) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/SetHttpsConnectionPropertiesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/SetHttpsConnectionPropertiesEntitlement.java similarity index 84% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/SetHttpsConnectionPropertiesEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/SetHttpsConnectionPropertiesEntitlement.java index bb2f65def9e18..abfcfdf18db20 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/SetHttpsConnectionPropertiesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/SetHttpsConnectionPropertiesEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * An Entitlement to allow setting properties to a single Https connection after this has been created diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteAllSystemPropertiesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteAllSystemPropertiesEntitlement.java similarity index 83% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteAllSystemPropertiesEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteAllSystemPropertiesEntitlement.java index f0d1d14177332..f0b02e82d3cb5 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteAllSystemPropertiesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteAllSystemPropertiesEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; /** * An Entitlement to allow writing all properties such as system properties. diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteSystemPropertiesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteSystemPropertiesEntitlement.java similarity index 86% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteSystemPropertiesEntitlement.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteSystemPropertiesEntitlement.java index 654ebbda9dab3..b7818bb14030b 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteSystemPropertiesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/WriteSystemPropertiesEntitlement.java @@ -7,7 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.runtime.policy; +package org.elasticsearch.entitlement.runtime.policy.entitlements; + +import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement; import java.util.List; import java.util.Set; diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java index 1521c80341b9d..c133cf0f1242e 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FileEntitlement; import org.elasticsearch.test.ESTestCase; import org.junit.BeforeClass; @@ -31,13 +32,13 @@ private static Path path(String s) { } public void testEmpty() { - var tree = new FileAccessTree(List.of()); + var tree = FileAccessTree.of(List.of()); assertThat(tree.canRead(path("path")), is(false)); assertThat(tree.canWrite(path("path")), is(false)); } public void testRead() { - var tree = new FileAccessTree(List.of(entitlement("foo", "read"))); + var tree = FileAccessTree.of(List.of(entitlement("foo", "read"))); assertThat(tree.canRead(path("foo")), is(true)); assertThat(tree.canRead(path("foo/subdir")), is(true)); assertThat(tree.canWrite(path("foo")), is(false)); @@ -47,7 +48,7 @@ public void testRead() { } public void testWrite() { - var tree = new FileAccessTree(List.of(entitlement("foo", "read_write"))); + var tree = FileAccessTree.of(List.of(entitlement("foo", "read_write"))); assertThat(tree.canWrite(path("foo")), is(true)); assertThat(tree.canWrite(path("foo/subdir")), is(true)); assertThat(tree.canRead(path("foo")), is(true)); @@ -57,7 +58,7 @@ public void testWrite() { } public void testTwoPaths() { - var tree = new FileAccessTree(List.of(entitlement("foo", "read"), entitlement("bar", "read"))); + var tree = FileAccessTree.of(List.of(entitlement("foo", "read"), entitlement("bar", "read"))); assertThat(tree.canRead(path("a")), is(false)); assertThat(tree.canRead(path("bar")), is(true)); assertThat(tree.canRead(path("bar/subdir")), is(true)); @@ -68,7 +69,7 @@ public void testTwoPaths() { } public void testReadWriteUnderRead() { - var tree = new FileAccessTree(List.of(entitlement("foo", "read"), entitlement("foo/bar", "read_write"))); + var tree = FileAccessTree.of(List.of(entitlement("foo", "read"), entitlement("foo/bar", "read_write"))); assertThat(tree.canRead(path("foo")), is(true)); assertThat(tree.canWrite(path("foo")), is(false)); assertThat(tree.canRead(path("foo/bar")), is(true)); @@ -76,7 +77,7 @@ public void testReadWriteUnderRead() { } public void testNormalizePath() { - var tree = new FileAccessTree(List.of(entitlement("foo/../bar", "read"))); + var tree = FileAccessTree.of(List.of(entitlement("foo/../bar", "read"))); assertThat(tree.canRead(path("foo/../bar")), is(true)); assertThat(tree.canRead(path("foo")), is(false)); assertThat(tree.canRead(path("")), is(false)); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index 6854ef54ca5f0..dc13703d85d2e 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -12,6 +12,9 @@ import org.elasticsearch.entitlement.runtime.policy.PolicyManager.ModuleEntitlements; import org.elasticsearch.entitlement.runtime.policy.agent.TestAgent; import org.elasticsearch.entitlement.runtime.policy.agent.inner.TestInnerAgent; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.ExitVMEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FileEntitlement; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.compiler.InMemoryJavaCompiler; import org.elasticsearch.test.jar.JarUtils; diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java index 191b3afcdc674..53cd5ee8aae08 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java @@ -9,6 +9,14 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FileEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayInputStream; diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 311df05f9b07b..1c959d4157f9b 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -32,9 +32,9 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; -import org.elasticsearch.entitlement.runtime.policy.LoadNativeLibrariesEntitlement; import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.entitlement.runtime.policy.PolicyParserUtils; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.jdk.JarHell; diff --git a/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java b/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java index d84ee0267251a..8eaff521068e8 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java @@ -9,11 +9,11 @@ package org.elasticsearch.bootstrap; -import org.elasticsearch.entitlement.runtime.policy.InboundNetworkEntitlement; -import org.elasticsearch.entitlement.runtime.policy.LoadNativeLibrariesEntitlement; -import org.elasticsearch.entitlement.runtime.policy.OutboundNetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.entitlement.runtime.policy.Scope; +import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; import org.elasticsearch.test.ESTestCase; import java.util.List; From 70b397cf5718027410323c3dec3f3718df702de9 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Thu, 30 Jan 2025 16:08:49 +0100 Subject: [PATCH 285/383] ESQL: Enable LOOKUP JOIN in non-snapshot builds (#121193) This effectively releases LOOKUP JOIN into tech preview. Docs will follow in a separate PR. * Enable the lexing/grammar for LOOKUP JOIN in non-snapshot builds. * Remove the grammar for the unsupported | JOIN ... command (without LOOKUP as first keyword). The way the lexer modes work, otherwise we'd also have to enable | JOIN ... syntax on non-snapshot builds and would have to add additional validation to provide appropriate error messages. * Remove grammar for LOOKUP JOIN index AS ... because qualifiers are not yet supported. Otherwise we'd have to put in additional validation as well to prevent such queries. --------- Co-authored-by: Bogdan Pintea Co-authored-by: elasticsearchmachine Co-authored-by: Nik Everett --- docs/changelog/121193.yaml | 18 + muted-tests.yml | 2 - .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 2 +- .../xpack/esql/action/TelemetryIT.java | 8 +- .../esql/src/main/antlr/EsqlBaseLexer.g4 | 5 +- .../esql/src/main/antlr/EsqlBaseLexer.tokens | 294 +-- .../esql/src/main/antlr/EsqlBaseParser.g4 | 6 +- .../esql/src/main/antlr/EsqlBaseParser.tokens | 294 +-- .../xpack/esql/action/EsqlCapabilities.java | 2 +- .../xpack/esql/parser/EsqlBaseLexer.interp | 15 +- .../xpack/esql/parser/EsqlBaseLexer.java | 2099 ++++++++--------- .../xpack/esql/parser/EsqlBaseParser.interp | 10 +- .../xpack/esql/parser/EsqlBaseParser.java | 1569 ++++++------ .../xpack/esql/parser/LogicalPlanBuilder.java | 5 +- .../GrammarInDevelopmentParsingTests.java | 2 +- 15 files changed, 2144 insertions(+), 2187 deletions(-) create mode 100644 docs/changelog/121193.yaml diff --git a/docs/changelog/121193.yaml b/docs/changelog/121193.yaml new file mode 100644 index 0000000000000..af45b0656265f --- /dev/null +++ b/docs/changelog/121193.yaml @@ -0,0 +1,18 @@ +pr: 121193 +summary: Enable LOOKUP JOIN in non-snapshot builds +area: ES|QL +type: enhancement +issues: + - 121185 +highlight: + title: Enable LOOKUP JOIN in non-snapshot builds + body: |- + This effectively releases LOOKUP JOIN into tech preview. Docs will + follow in a separate PR. + + - Enable the lexing/grammar for LOOKUP JOIN in non-snapshot builds. + - Remove the grammar for the unsupported `| JOIN ...` command (without `LOOKUP` as first keyword). The way the lexer modes work, otherwise we'd also have to enable `| JOIN ...` syntax on non-snapshot builds and would have to add additional validation to provide appropriate error messages. + - Remove grammar for `LOOKUP JOIN index AS ...` because qualifiers are not yet supported. Otherwise we'd have to put in additional validation as well to prevent such queries. + + Also fix https://github.com/elastic/elasticsearch/issues/121185 + notable: true diff --git a/muted-tests.yml b/muted-tests.yml index 4355226ffadc7..d093c905bde51 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -314,8 +314,6 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testSetEnabled issue: https://github.com/elastic/elasticsearch/issues/121183 -- class: org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizerTests - issue: https://github.com/elastic/elasticsearch/issues/121185 - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cat.aliases/10_basic/Simple alias} issue: https://github.com/elastic/elasticsearch/issues/121186 diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 18bfb6b8676ce..5911da0326901 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -232,7 +232,7 @@ protected boolean supportsIndexModeLookup() throws IOException { protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); - if (testCase.query.toUpperCase(Locale.ROOT).contains("LOOKUP")) { + if (testCase.query.toUpperCase(Locale.ROOT).contains("LOOKUP_\uD83D\uDC14")) { builder.tables(tables()); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java index 7036216ebbbcf..c8dc134e0e706 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TelemetryIT.java @@ -142,11 +142,9 @@ public static Iterable parameters() { | EVAL y = to_str(host) | LOOKUP JOIN lookup_idx ON host """, - Build.current().isSnapshot() - ? Map.ofEntries(Map.entry("FROM", 1), Map.entry("EVAL", 1), Map.entry("LOOKUP JOIN", 1)) - : Collections.emptyMap(), - Build.current().isSnapshot() ? Map.ofEntries(Map.entry("TO_STRING", 1)) : Collections.emptyMap(), - Build.current().isSnapshot() + Map.ofEntries(Map.entry("FROM", 1), Map.entry("EVAL", 1), Map.entry("LOOKUP JOIN", 1)), + Map.ofEntries(Map.entry("TO_STRING", 1)), + true ) }, new Object[] { new Test( diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 5b731b5dac9d2..e8879f9cfdaa5 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -73,6 +73,7 @@ SHOW : 'show' -> pushMode(SHOW_MODE); SORT : 'sort' -> pushMode(EXPRESSION_MODE); STATS : 'stats' -> pushMode(EXPRESSION_MODE); WHERE : 'where' -> pushMode(EXPRESSION_MODE); +JOIN_LOOKUP : 'lookup' -> pushMode(JOIN_MODE); // // in development // @@ -88,11 +89,9 @@ DEV_INLINESTATS : {this.isDevVersion()}? 'inlinestats' -> pushMode(EXPRESSION_ DEV_LOOKUP : {this.isDevVersion()}? 'lookup_🐔' -> pushMode(LOOKUP_MODE); DEV_METRICS : {this.isDevVersion()}? 'metrics' -> pushMode(METRICS_MODE); // list of all JOIN commands -DEV_JOIN : {this.isDevVersion()}? 'join' -> pushMode(JOIN_MODE); DEV_JOIN_FULL : {this.isDevVersion()}? 'full' -> pushMode(JOIN_MODE); DEV_JOIN_LEFT : {this.isDevVersion()}? 'left' -> pushMode(JOIN_MODE); DEV_JOIN_RIGHT : {this.isDevVersion()}? 'right' -> pushMode(JOIN_MODE); -DEV_JOIN_LOOKUP : {this.isDevVersion()}? 'lookup' -> pushMode(JOIN_MODE); // @@ -556,7 +555,7 @@ LOOKUP_FIELD_WS // mode JOIN_MODE; JOIN_PIPE : PIPE -> type(PIPE), popMode; -JOIN_JOIN : DEV_JOIN -> type(DEV_JOIN); +JOIN : 'join'; JOIN_AS : AS -> type(AS); JOIN_ON : ON -> type(ON), popMode, pushMode(EXPRESSION_MODE); USING : 'USING' -> popMode, pushMode(EXPRESSION_MODE); diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 218884913960f..02af324872fc0 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -14,110 +14,110 @@ SHOW=13 SORT=14 STATS=15 WHERE=16 -DEV_INLINESTATS=17 -DEV_LOOKUP=18 -DEV_METRICS=19 -DEV_JOIN=20 +JOIN_LOOKUP=17 +DEV_INLINESTATS=18 +DEV_LOOKUP=19 +DEV_METRICS=20 DEV_JOIN_FULL=21 DEV_JOIN_LEFT=22 DEV_JOIN_RIGHT=23 -DEV_JOIN_LOOKUP=24 -UNKNOWN_CMD=25 -LINE_COMMENT=26 -MULTILINE_COMMENT=27 -WS=28 -PIPE=29 -QUOTED_STRING=30 -INTEGER_LITERAL=31 -DECIMAL_LITERAL=32 -BY=33 -AND=34 -ASC=35 -ASSIGN=36 -CAST_OP=37 -COLON=38 -COMMA=39 -DESC=40 -DOT=41 -FALSE=42 -FIRST=43 -IN=44 -IS=45 -LAST=46 -LIKE=47 -LP=48 -NOT=49 -NULL=50 -NULLS=51 -OR=52 -PARAM=53 -RLIKE=54 -RP=55 -TRUE=56 -EQ=57 -CIEQ=58 -NEQ=59 -LT=60 -LTE=61 -GT=62 -GTE=63 -PLUS=64 -MINUS=65 -ASTERISK=66 -SLASH=67 -PERCENT=68 -LEFT_BRACES=69 -RIGHT_BRACES=70 -NAMED_OR_POSITIONAL_PARAM=71 -OPENING_BRACKET=72 -CLOSING_BRACKET=73 -UNQUOTED_IDENTIFIER=74 -QUOTED_IDENTIFIER=75 -EXPR_LINE_COMMENT=76 -EXPR_MULTILINE_COMMENT=77 -EXPR_WS=78 -EXPLAIN_WS=79 -EXPLAIN_LINE_COMMENT=80 -EXPLAIN_MULTILINE_COMMENT=81 -METADATA=82 -UNQUOTED_SOURCE=83 -FROM_LINE_COMMENT=84 -FROM_MULTILINE_COMMENT=85 -FROM_WS=86 -ID_PATTERN=87 -PROJECT_LINE_COMMENT=88 -PROJECT_MULTILINE_COMMENT=89 -PROJECT_WS=90 -AS=91 -RENAME_LINE_COMMENT=92 -RENAME_MULTILINE_COMMENT=93 -RENAME_WS=94 -ON=95 -WITH=96 -ENRICH_POLICY_NAME=97 -ENRICH_LINE_COMMENT=98 -ENRICH_MULTILINE_COMMENT=99 -ENRICH_WS=100 -ENRICH_FIELD_LINE_COMMENT=101 -ENRICH_FIELD_MULTILINE_COMMENT=102 -ENRICH_FIELD_WS=103 -MVEXPAND_LINE_COMMENT=104 -MVEXPAND_MULTILINE_COMMENT=105 -MVEXPAND_WS=106 -INFO=107 -SHOW_LINE_COMMENT=108 -SHOW_MULTILINE_COMMENT=109 -SHOW_WS=110 -SETTING=111 -SETTING_LINE_COMMENT=112 -SETTTING_MULTILINE_COMMENT=113 -SETTING_WS=114 -LOOKUP_LINE_COMMENT=115 -LOOKUP_MULTILINE_COMMENT=116 -LOOKUP_WS=117 -LOOKUP_FIELD_LINE_COMMENT=118 -LOOKUP_FIELD_MULTILINE_COMMENT=119 -LOOKUP_FIELD_WS=120 +UNKNOWN_CMD=24 +LINE_COMMENT=25 +MULTILINE_COMMENT=26 +WS=27 +PIPE=28 +QUOTED_STRING=29 +INTEGER_LITERAL=30 +DECIMAL_LITERAL=31 +BY=32 +AND=33 +ASC=34 +ASSIGN=35 +CAST_OP=36 +COLON=37 +COMMA=38 +DESC=39 +DOT=40 +FALSE=41 +FIRST=42 +IN=43 +IS=44 +LAST=45 +LIKE=46 +LP=47 +NOT=48 +NULL=49 +NULLS=50 +OR=51 +PARAM=52 +RLIKE=53 +RP=54 +TRUE=55 +EQ=56 +CIEQ=57 +NEQ=58 +LT=59 +LTE=60 +GT=61 +GTE=62 +PLUS=63 +MINUS=64 +ASTERISK=65 +SLASH=66 +PERCENT=67 +LEFT_BRACES=68 +RIGHT_BRACES=69 +NAMED_OR_POSITIONAL_PARAM=70 +OPENING_BRACKET=71 +CLOSING_BRACKET=72 +UNQUOTED_IDENTIFIER=73 +QUOTED_IDENTIFIER=74 +EXPR_LINE_COMMENT=75 +EXPR_MULTILINE_COMMENT=76 +EXPR_WS=77 +EXPLAIN_WS=78 +EXPLAIN_LINE_COMMENT=79 +EXPLAIN_MULTILINE_COMMENT=80 +METADATA=81 +UNQUOTED_SOURCE=82 +FROM_LINE_COMMENT=83 +FROM_MULTILINE_COMMENT=84 +FROM_WS=85 +ID_PATTERN=86 +PROJECT_LINE_COMMENT=87 +PROJECT_MULTILINE_COMMENT=88 +PROJECT_WS=89 +AS=90 +RENAME_LINE_COMMENT=91 +RENAME_MULTILINE_COMMENT=92 +RENAME_WS=93 +ON=94 +WITH=95 +ENRICH_POLICY_NAME=96 +ENRICH_LINE_COMMENT=97 +ENRICH_MULTILINE_COMMENT=98 +ENRICH_WS=99 +ENRICH_FIELD_LINE_COMMENT=100 +ENRICH_FIELD_MULTILINE_COMMENT=101 +ENRICH_FIELD_WS=102 +MVEXPAND_LINE_COMMENT=103 +MVEXPAND_MULTILINE_COMMENT=104 +MVEXPAND_WS=105 +INFO=106 +SHOW_LINE_COMMENT=107 +SHOW_MULTILINE_COMMENT=108 +SHOW_WS=109 +SETTING=110 +SETTING_LINE_COMMENT=111 +SETTTING_MULTILINE_COMMENT=112 +SETTING_WS=113 +LOOKUP_LINE_COMMENT=114 +LOOKUP_MULTILINE_COMMENT=115 +LOOKUP_WS=116 +LOOKUP_FIELD_LINE_COMMENT=117 +LOOKUP_FIELD_MULTILINE_COMMENT=118 +LOOKUP_FIELD_WS=119 +JOIN=120 USING=121 JOIN_LINE_COMMENT=122 JOIN_MULTILINE_COMMENT=123 @@ -144,49 +144,51 @@ CLOSING_METRICS_WS=130 'sort'=14 'stats'=15 'where'=16 -'|'=29 -'by'=33 -'and'=34 -'asc'=35 -'='=36 -'::'=37 -':'=38 -','=39 -'desc'=40 -'.'=41 -'false'=42 -'first'=43 -'in'=44 -'is'=45 -'last'=46 -'like'=47 -'('=48 -'not'=49 -'null'=50 -'nulls'=51 -'or'=52 -'?'=53 -'rlike'=54 -')'=55 -'true'=56 -'=='=57 -'=~'=58 -'!='=59 -'<'=60 -'<='=61 -'>'=62 -'>='=63 -'+'=64 -'-'=65 -'*'=66 -'/'=67 -'%'=68 -'{'=69 -'}'=70 -']'=73 -'metadata'=82 -'as'=91 -'on'=95 -'with'=96 -'info'=107 +'lookup'=17 +'|'=28 +'by'=32 +'and'=33 +'asc'=34 +'='=35 +'::'=36 +':'=37 +','=38 +'desc'=39 +'.'=40 +'false'=41 +'first'=42 +'in'=43 +'is'=44 +'last'=45 +'like'=46 +'('=47 +'not'=48 +'null'=49 +'nulls'=50 +'or'=51 +'?'=52 +'rlike'=53 +')'=54 +'true'=55 +'=='=56 +'=~'=57 +'!='=58 +'<'=59 +'<='=60 +'>'=61 +'>='=62 +'+'=63 +'-'=64 +'*'=65 +'/'=66 +'%'=67 +'{'=68 +'}'=69 +']'=72 +'metadata'=81 +'as'=90 +'on'=94 +'with'=95 +'info'=106 +'join'=120 'USING'=121 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index c66da879a5709..9ea0d67b3801c 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -51,10 +51,10 @@ processingCommand | grokCommand | enrichCommand | mvExpandCommand + | joinCommand // in development | {this.isDevVersion()}? inlinestatsCommand | {this.isDevVersion()}? lookupCommand - | {this.isDevVersion()}? joinCommand ; whereCommand @@ -324,11 +324,11 @@ inlinestatsCommand ; joinCommand - : type=(DEV_JOIN_LOOKUP | DEV_JOIN_LEFT | DEV_JOIN_RIGHT)? DEV_JOIN joinTarget joinCondition + : type=(JOIN_LOOKUP | DEV_JOIN_LEFT | DEV_JOIN_RIGHT) JOIN joinTarget joinCondition ; joinTarget - : index=indexPattern (AS alias=identifier)? + : index=indexPattern ; joinCondition diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 218884913960f..02af324872fc0 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -14,110 +14,110 @@ SHOW=13 SORT=14 STATS=15 WHERE=16 -DEV_INLINESTATS=17 -DEV_LOOKUP=18 -DEV_METRICS=19 -DEV_JOIN=20 +JOIN_LOOKUP=17 +DEV_INLINESTATS=18 +DEV_LOOKUP=19 +DEV_METRICS=20 DEV_JOIN_FULL=21 DEV_JOIN_LEFT=22 DEV_JOIN_RIGHT=23 -DEV_JOIN_LOOKUP=24 -UNKNOWN_CMD=25 -LINE_COMMENT=26 -MULTILINE_COMMENT=27 -WS=28 -PIPE=29 -QUOTED_STRING=30 -INTEGER_LITERAL=31 -DECIMAL_LITERAL=32 -BY=33 -AND=34 -ASC=35 -ASSIGN=36 -CAST_OP=37 -COLON=38 -COMMA=39 -DESC=40 -DOT=41 -FALSE=42 -FIRST=43 -IN=44 -IS=45 -LAST=46 -LIKE=47 -LP=48 -NOT=49 -NULL=50 -NULLS=51 -OR=52 -PARAM=53 -RLIKE=54 -RP=55 -TRUE=56 -EQ=57 -CIEQ=58 -NEQ=59 -LT=60 -LTE=61 -GT=62 -GTE=63 -PLUS=64 -MINUS=65 -ASTERISK=66 -SLASH=67 -PERCENT=68 -LEFT_BRACES=69 -RIGHT_BRACES=70 -NAMED_OR_POSITIONAL_PARAM=71 -OPENING_BRACKET=72 -CLOSING_BRACKET=73 -UNQUOTED_IDENTIFIER=74 -QUOTED_IDENTIFIER=75 -EXPR_LINE_COMMENT=76 -EXPR_MULTILINE_COMMENT=77 -EXPR_WS=78 -EXPLAIN_WS=79 -EXPLAIN_LINE_COMMENT=80 -EXPLAIN_MULTILINE_COMMENT=81 -METADATA=82 -UNQUOTED_SOURCE=83 -FROM_LINE_COMMENT=84 -FROM_MULTILINE_COMMENT=85 -FROM_WS=86 -ID_PATTERN=87 -PROJECT_LINE_COMMENT=88 -PROJECT_MULTILINE_COMMENT=89 -PROJECT_WS=90 -AS=91 -RENAME_LINE_COMMENT=92 -RENAME_MULTILINE_COMMENT=93 -RENAME_WS=94 -ON=95 -WITH=96 -ENRICH_POLICY_NAME=97 -ENRICH_LINE_COMMENT=98 -ENRICH_MULTILINE_COMMENT=99 -ENRICH_WS=100 -ENRICH_FIELD_LINE_COMMENT=101 -ENRICH_FIELD_MULTILINE_COMMENT=102 -ENRICH_FIELD_WS=103 -MVEXPAND_LINE_COMMENT=104 -MVEXPAND_MULTILINE_COMMENT=105 -MVEXPAND_WS=106 -INFO=107 -SHOW_LINE_COMMENT=108 -SHOW_MULTILINE_COMMENT=109 -SHOW_WS=110 -SETTING=111 -SETTING_LINE_COMMENT=112 -SETTTING_MULTILINE_COMMENT=113 -SETTING_WS=114 -LOOKUP_LINE_COMMENT=115 -LOOKUP_MULTILINE_COMMENT=116 -LOOKUP_WS=117 -LOOKUP_FIELD_LINE_COMMENT=118 -LOOKUP_FIELD_MULTILINE_COMMENT=119 -LOOKUP_FIELD_WS=120 +UNKNOWN_CMD=24 +LINE_COMMENT=25 +MULTILINE_COMMENT=26 +WS=27 +PIPE=28 +QUOTED_STRING=29 +INTEGER_LITERAL=30 +DECIMAL_LITERAL=31 +BY=32 +AND=33 +ASC=34 +ASSIGN=35 +CAST_OP=36 +COLON=37 +COMMA=38 +DESC=39 +DOT=40 +FALSE=41 +FIRST=42 +IN=43 +IS=44 +LAST=45 +LIKE=46 +LP=47 +NOT=48 +NULL=49 +NULLS=50 +OR=51 +PARAM=52 +RLIKE=53 +RP=54 +TRUE=55 +EQ=56 +CIEQ=57 +NEQ=58 +LT=59 +LTE=60 +GT=61 +GTE=62 +PLUS=63 +MINUS=64 +ASTERISK=65 +SLASH=66 +PERCENT=67 +LEFT_BRACES=68 +RIGHT_BRACES=69 +NAMED_OR_POSITIONAL_PARAM=70 +OPENING_BRACKET=71 +CLOSING_BRACKET=72 +UNQUOTED_IDENTIFIER=73 +QUOTED_IDENTIFIER=74 +EXPR_LINE_COMMENT=75 +EXPR_MULTILINE_COMMENT=76 +EXPR_WS=77 +EXPLAIN_WS=78 +EXPLAIN_LINE_COMMENT=79 +EXPLAIN_MULTILINE_COMMENT=80 +METADATA=81 +UNQUOTED_SOURCE=82 +FROM_LINE_COMMENT=83 +FROM_MULTILINE_COMMENT=84 +FROM_WS=85 +ID_PATTERN=86 +PROJECT_LINE_COMMENT=87 +PROJECT_MULTILINE_COMMENT=88 +PROJECT_WS=89 +AS=90 +RENAME_LINE_COMMENT=91 +RENAME_MULTILINE_COMMENT=92 +RENAME_WS=93 +ON=94 +WITH=95 +ENRICH_POLICY_NAME=96 +ENRICH_LINE_COMMENT=97 +ENRICH_MULTILINE_COMMENT=98 +ENRICH_WS=99 +ENRICH_FIELD_LINE_COMMENT=100 +ENRICH_FIELD_MULTILINE_COMMENT=101 +ENRICH_FIELD_WS=102 +MVEXPAND_LINE_COMMENT=103 +MVEXPAND_MULTILINE_COMMENT=104 +MVEXPAND_WS=105 +INFO=106 +SHOW_LINE_COMMENT=107 +SHOW_MULTILINE_COMMENT=108 +SHOW_WS=109 +SETTING=110 +SETTING_LINE_COMMENT=111 +SETTTING_MULTILINE_COMMENT=112 +SETTING_WS=113 +LOOKUP_LINE_COMMENT=114 +LOOKUP_MULTILINE_COMMENT=115 +LOOKUP_WS=116 +LOOKUP_FIELD_LINE_COMMENT=117 +LOOKUP_FIELD_MULTILINE_COMMENT=118 +LOOKUP_FIELD_WS=119 +JOIN=120 USING=121 JOIN_LINE_COMMENT=122 JOIN_MULTILINE_COMMENT=123 @@ -144,49 +144,51 @@ CLOSING_METRICS_WS=130 'sort'=14 'stats'=15 'where'=16 -'|'=29 -'by'=33 -'and'=34 -'asc'=35 -'='=36 -'::'=37 -':'=38 -','=39 -'desc'=40 -'.'=41 -'false'=42 -'first'=43 -'in'=44 -'is'=45 -'last'=46 -'like'=47 -'('=48 -'not'=49 -'null'=50 -'nulls'=51 -'or'=52 -'?'=53 -'rlike'=54 -')'=55 -'true'=56 -'=='=57 -'=~'=58 -'!='=59 -'<'=60 -'<='=61 -'>'=62 -'>='=63 -'+'=64 -'-'=65 -'*'=66 -'/'=67 -'%'=68 -'{'=69 -'}'=70 -']'=73 -'metadata'=82 -'as'=91 -'on'=95 -'with'=96 -'info'=107 +'lookup'=17 +'|'=28 +'by'=32 +'and'=33 +'asc'=34 +'='=35 +'::'=36 +':'=37 +','=38 +'desc'=39 +'.'=40 +'false'=41 +'first'=42 +'in'=43 +'is'=44 +'last'=45 +'like'=46 +'('=47 +'not'=48 +'null'=49 +'nulls'=50 +'or'=51 +'?'=52 +'rlike'=53 +')'=54 +'true'=55 +'=='=56 +'=~'=57 +'!='=58 +'<'=59 +'<='=60 +'>'=61 +'>='=62 +'+'=63 +'-'=64 +'*'=65 +'/'=66 +'%'=67 +'{'=68 +'}'=69 +']'=72 +'metadata'=81 +'as'=90 +'on'=94 +'with'=95 +'info'=106 +'join'=120 'USING'=121 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 47a8a586bf1df..20de3e443107d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -694,7 +694,7 @@ public enum Cap { /** * LOOKUP JOIN */ - JOIN_LOOKUP_V12(Build.current().isSnapshot()), + JOIN_LOOKUP_V12, /** * LOOKUP JOIN with TEXT fields on the right (right side of the join) (#119473) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 1d050bd91e66c..f0df3817ac658 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -16,8 +16,7 @@ null 'sort' 'stats' 'where' -null -null +'lookup' null null null @@ -120,6 +119,7 @@ null null null null +'join' 'USING' null null @@ -149,14 +149,13 @@ SHOW SORT STATS WHERE +JOIN_LOOKUP DEV_INLINESTATS DEV_LOOKUP DEV_METRICS -DEV_JOIN DEV_JOIN_FULL DEV_JOIN_LEFT DEV_JOIN_RIGHT -DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT @@ -253,6 +252,7 @@ LOOKUP_WS LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +JOIN USING JOIN_LINE_COMMENT JOIN_MULTILINE_COMMENT @@ -281,14 +281,13 @@ SHOW SORT STATS WHERE +JOIN_LOOKUP DEV_INLINESTATS DEV_LOOKUP DEV_METRICS -DEV_JOIN DEV_JOIN_FULL DEV_JOIN_LEFT DEV_JOIN_RIGHT -DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT @@ -456,7 +455,7 @@ LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS JOIN_PIPE -JOIN_JOIN +JOIN JOIN_AS JOIN_ON USING @@ -507,4 +506,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 130, 1627, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 2, 214, 7, 214, 2, 215, 7, 215, 2, 216, 7, 216, 2, 217, 7, 217, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 664, 8, 24, 11, 24, 12, 24, 665, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 674, 8, 25, 10, 25, 12, 25, 677, 9, 25, 1, 25, 3, 25, 680, 8, 25, 1, 25, 3, 25, 683, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 692, 8, 26, 10, 26, 12, 26, 695, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 703, 8, 27, 11, 27, 12, 27, 704, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 724, 8, 33, 1, 33, 4, 33, 727, 8, 33, 11, 33, 12, 33, 728, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 738, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 745, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 750, 8, 39, 10, 39, 12, 39, 753, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 761, 8, 39, 10, 39, 12, 39, 764, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 771, 8, 39, 1, 39, 3, 39, 774, 8, 39, 3, 39, 776, 8, 39, 1, 40, 4, 40, 779, 8, 40, 11, 40, 12, 40, 780, 1, 41, 4, 41, 784, 8, 41, 11, 41, 12, 41, 785, 1, 41, 1, 41, 5, 41, 790, 8, 41, 10, 41, 12, 41, 793, 9, 41, 1, 41, 1, 41, 4, 41, 797, 8, 41, 11, 41, 12, 41, 798, 1, 41, 4, 41, 802, 8, 41, 11, 41, 12, 41, 803, 1, 41, 1, 41, 5, 41, 808, 8, 41, 10, 41, 12, 41, 811, 9, 41, 3, 41, 813, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 819, 8, 41, 11, 41, 12, 41, 820, 1, 41, 1, 41, 3, 41, 825, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 3, 81, 957, 8, 81, 1, 81, 5, 81, 960, 8, 81, 10, 81, 12, 81, 963, 9, 81, 1, 81, 1, 81, 4, 81, 967, 8, 81, 11, 81, 12, 81, 968, 3, 81, 971, 8, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 5, 84, 985, 8, 84, 10, 84, 12, 84, 988, 9, 84, 1, 84, 1, 84, 3, 84, 992, 8, 84, 1, 84, 4, 84, 995, 8, 84, 11, 84, 12, 84, 996, 3, 84, 999, 8, 84, 1, 85, 1, 85, 4, 85, 1003, 8, 85, 11, 85, 12, 85, 1004, 1, 85, 1, 85, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 3, 102, 1082, 8, 102, 1, 103, 4, 103, 1085, 8, 103, 11, 103, 12, 103, 1086, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 3, 114, 1136, 8, 114, 1, 115, 1, 115, 3, 115, 1140, 8, 115, 1, 115, 5, 115, 1143, 8, 115, 10, 115, 12, 115, 1146, 9, 115, 1, 115, 1, 115, 3, 115, 1150, 8, 115, 1, 115, 4, 115, 1153, 8, 115, 11, 115, 12, 115, 1154, 3, 115, 1157, 8, 115, 1, 116, 1, 116, 4, 116, 1161, 8, 116, 11, 116, 12, 116, 1162, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 136, 4, 136, 1248, 8, 136, 11, 136, 12, 136, 1249, 1, 136, 1, 136, 3, 136, 1254, 8, 136, 1, 136, 4, 136, 1257, 8, 136, 11, 136, 12, 136, 1258, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 4, 169, 1404, 8, 169, 11, 169, 12, 169, 1405, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 1, 217, 1, 217, 1, 217, 1, 217, 1, 217, 2, 693, 762, 0, 218, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 69, 174, 70, 176, 0, 178, 71, 180, 72, 182, 73, 184, 74, 186, 0, 188, 75, 190, 76, 192, 77, 194, 78, 196, 0, 198, 0, 200, 79, 202, 80, 204, 81, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 0, 218, 82, 220, 0, 222, 83, 224, 0, 226, 0, 228, 84, 230, 85, 232, 86, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 0, 246, 0, 248, 87, 250, 88, 252, 89, 254, 90, 256, 0, 258, 0, 260, 0, 262, 0, 264, 0, 266, 0, 268, 91, 270, 0, 272, 92, 274, 93, 276, 94, 278, 0, 280, 0, 282, 95, 284, 96, 286, 0, 288, 97, 290, 0, 292, 98, 294, 99, 296, 100, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 0, 314, 0, 316, 101, 318, 102, 320, 103, 322, 0, 324, 0, 326, 0, 328, 0, 330, 0, 332, 0, 334, 104, 336, 105, 338, 106, 340, 0, 342, 107, 344, 108, 346, 109, 348, 110, 350, 0, 352, 0, 354, 111, 356, 112, 358, 113, 360, 114, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 0, 374, 0, 376, 115, 378, 116, 380, 117, 382, 0, 384, 0, 386, 0, 388, 0, 390, 118, 392, 119, 394, 120, 396, 0, 398, 0, 400, 0, 402, 0, 404, 121, 406, 0, 408, 0, 410, 0, 412, 0, 414, 0, 416, 122, 418, 123, 420, 124, 422, 0, 424, 0, 426, 0, 428, 125, 430, 126, 432, 127, 434, 0, 436, 0, 438, 128, 440, 129, 442, 130, 444, 0, 446, 0, 448, 0, 450, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1654, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 182, 1, 0, 0, 0, 1, 184, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 1, 192, 1, 0, 0, 0, 1, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 2, 202, 1, 0, 0, 0, 2, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 3, 230, 1, 0, 0, 0, 3, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 240, 1, 0, 0, 0, 4, 242, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 4, 252, 1, 0, 0, 0, 4, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 5, 274, 1, 0, 0, 0, 5, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 282, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 6, 294, 1, 0, 0, 0, 6, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 7, 318, 1, 0, 0, 0, 7, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 8, 336, 1, 0, 0, 0, 8, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 9, 346, 1, 0, 0, 0, 9, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 10, 358, 1, 0, 0, 0, 10, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 11, 378, 1, 0, 0, 0, 11, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 12, 392, 1, 0, 0, 0, 12, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 13, 412, 1, 0, 0, 0, 13, 414, 1, 0, 0, 0, 13, 416, 1, 0, 0, 0, 13, 418, 1, 0, 0, 0, 13, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 14, 424, 1, 0, 0, 0, 14, 426, 1, 0, 0, 0, 14, 428, 1, 0, 0, 0, 14, 430, 1, 0, 0, 0, 14, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 15, 442, 1, 0, 0, 0, 15, 444, 1, 0, 0, 0, 15, 446, 1, 0, 0, 0, 15, 448, 1, 0, 0, 0, 15, 450, 1, 0, 0, 0, 16, 452, 1, 0, 0, 0, 18, 462, 1, 0, 0, 0, 20, 469, 1, 0, 0, 0, 22, 478, 1, 0, 0, 0, 24, 485, 1, 0, 0, 0, 26, 495, 1, 0, 0, 0, 28, 502, 1, 0, 0, 0, 30, 509, 1, 0, 0, 0, 32, 516, 1, 0, 0, 0, 34, 524, 1, 0, 0, 0, 36, 536, 1, 0, 0, 0, 38, 545, 1, 0, 0, 0, 40, 551, 1, 0, 0, 0, 42, 558, 1, 0, 0, 0, 44, 565, 1, 0, 0, 0, 46, 573, 1, 0, 0, 0, 48, 581, 1, 0, 0, 0, 50, 596, 1, 0, 0, 0, 52, 608, 1, 0, 0, 0, 54, 619, 1, 0, 0, 0, 56, 627, 1, 0, 0, 0, 58, 635, 1, 0, 0, 0, 60, 643, 1, 0, 0, 0, 62, 652, 1, 0, 0, 0, 64, 663, 1, 0, 0, 0, 66, 669, 1, 0, 0, 0, 68, 686, 1, 0, 0, 0, 70, 702, 1, 0, 0, 0, 72, 708, 1, 0, 0, 0, 74, 712, 1, 0, 0, 0, 76, 714, 1, 0, 0, 0, 78, 716, 1, 0, 0, 0, 80, 719, 1, 0, 0, 0, 82, 721, 1, 0, 0, 0, 84, 730, 1, 0, 0, 0, 86, 732, 1, 0, 0, 0, 88, 737, 1, 0, 0, 0, 90, 739, 1, 0, 0, 0, 92, 744, 1, 0, 0, 0, 94, 775, 1, 0, 0, 0, 96, 778, 1, 0, 0, 0, 98, 824, 1, 0, 0, 0, 100, 826, 1, 0, 0, 0, 102, 829, 1, 0, 0, 0, 104, 833, 1, 0, 0, 0, 106, 837, 1, 0, 0, 0, 108, 839, 1, 0, 0, 0, 110, 842, 1, 0, 0, 0, 112, 844, 1, 0, 0, 0, 114, 846, 1, 0, 0, 0, 116, 851, 1, 0, 0, 0, 118, 853, 1, 0, 0, 0, 120, 859, 1, 0, 0, 0, 122, 865, 1, 0, 0, 0, 124, 868, 1, 0, 0, 0, 126, 871, 1, 0, 0, 0, 128, 876, 1, 0, 0, 0, 130, 881, 1, 0, 0, 0, 132, 883, 1, 0, 0, 0, 134, 887, 1, 0, 0, 0, 136, 892, 1, 0, 0, 0, 138, 898, 1, 0, 0, 0, 140, 901, 1, 0, 0, 0, 142, 903, 1, 0, 0, 0, 144, 909, 1, 0, 0, 0, 146, 911, 1, 0, 0, 0, 148, 916, 1, 0, 0, 0, 150, 919, 1, 0, 0, 0, 152, 922, 1, 0, 0, 0, 154, 925, 1, 0, 0, 0, 156, 927, 1, 0, 0, 0, 158, 930, 1, 0, 0, 0, 160, 932, 1, 0, 0, 0, 162, 935, 1, 0, 0, 0, 164, 937, 1, 0, 0, 0, 166, 939, 1, 0, 0, 0, 168, 941, 1, 0, 0, 0, 170, 943, 1, 0, 0, 0, 172, 945, 1, 0, 0, 0, 174, 947, 1, 0, 0, 0, 176, 949, 1, 0, 0, 0, 178, 970, 1, 0, 0, 0, 180, 972, 1, 0, 0, 0, 182, 977, 1, 0, 0, 0, 184, 998, 1, 0, 0, 0, 186, 1000, 1, 0, 0, 0, 188, 1008, 1, 0, 0, 0, 190, 1010, 1, 0, 0, 0, 192, 1014, 1, 0, 0, 0, 194, 1018, 1, 0, 0, 0, 196, 1022, 1, 0, 0, 0, 198, 1027, 1, 0, 0, 0, 200, 1032, 1, 0, 0, 0, 202, 1036, 1, 0, 0, 0, 204, 1040, 1, 0, 0, 0, 206, 1044, 1, 0, 0, 0, 208, 1049, 1, 0, 0, 0, 210, 1053, 1, 0, 0, 0, 212, 1057, 1, 0, 0, 0, 214, 1061, 1, 0, 0, 0, 216, 1065, 1, 0, 0, 0, 218, 1069, 1, 0, 0, 0, 220, 1081, 1, 0, 0, 0, 222, 1084, 1, 0, 0, 0, 224, 1088, 1, 0, 0, 0, 226, 1092, 1, 0, 0, 0, 228, 1096, 1, 0, 0, 0, 230, 1100, 1, 0, 0, 0, 232, 1104, 1, 0, 0, 0, 234, 1108, 1, 0, 0, 0, 236, 1113, 1, 0, 0, 0, 238, 1117, 1, 0, 0, 0, 240, 1121, 1, 0, 0, 0, 242, 1126, 1, 0, 0, 0, 244, 1135, 1, 0, 0, 0, 246, 1156, 1, 0, 0, 0, 248, 1160, 1, 0, 0, 0, 250, 1164, 1, 0, 0, 0, 252, 1168, 1, 0, 0, 0, 254, 1172, 1, 0, 0, 0, 256, 1176, 1, 0, 0, 0, 258, 1181, 1, 0, 0, 0, 260, 1185, 1, 0, 0, 0, 262, 1189, 1, 0, 0, 0, 264, 1193, 1, 0, 0, 0, 266, 1198, 1, 0, 0, 0, 268, 1203, 1, 0, 0, 0, 270, 1206, 1, 0, 0, 0, 272, 1210, 1, 0, 0, 0, 274, 1214, 1, 0, 0, 0, 276, 1218, 1, 0, 0, 0, 278, 1222, 1, 0, 0, 0, 280, 1227, 1, 0, 0, 0, 282, 1232, 1, 0, 0, 0, 284, 1237, 1, 0, 0, 0, 286, 1244, 1, 0, 0, 0, 288, 1253, 1, 0, 0, 0, 290, 1260, 1, 0, 0, 0, 292, 1264, 1, 0, 0, 0, 294, 1268, 1, 0, 0, 0, 296, 1272, 1, 0, 0, 0, 298, 1276, 1, 0, 0, 0, 300, 1282, 1, 0, 0, 0, 302, 1286, 1, 0, 0, 0, 304, 1290, 1, 0, 0, 0, 306, 1294, 1, 0, 0, 0, 308, 1298, 1, 0, 0, 0, 310, 1302, 1, 0, 0, 0, 312, 1306, 1, 0, 0, 0, 314, 1311, 1, 0, 0, 0, 316, 1316, 1, 0, 0, 0, 318, 1320, 1, 0, 0, 0, 320, 1324, 1, 0, 0, 0, 322, 1328, 1, 0, 0, 0, 324, 1333, 1, 0, 0, 0, 326, 1337, 1, 0, 0, 0, 328, 1342, 1, 0, 0, 0, 330, 1347, 1, 0, 0, 0, 332, 1351, 1, 0, 0, 0, 334, 1355, 1, 0, 0, 0, 336, 1359, 1, 0, 0, 0, 338, 1363, 1, 0, 0, 0, 340, 1367, 1, 0, 0, 0, 342, 1372, 1, 0, 0, 0, 344, 1377, 1, 0, 0, 0, 346, 1381, 1, 0, 0, 0, 348, 1385, 1, 0, 0, 0, 350, 1389, 1, 0, 0, 0, 352, 1394, 1, 0, 0, 0, 354, 1403, 1, 0, 0, 0, 356, 1407, 1, 0, 0, 0, 358, 1411, 1, 0, 0, 0, 360, 1415, 1, 0, 0, 0, 362, 1419, 1, 0, 0, 0, 364, 1424, 1, 0, 0, 0, 366, 1428, 1, 0, 0, 0, 368, 1432, 1, 0, 0, 0, 370, 1436, 1, 0, 0, 0, 372, 1441, 1, 0, 0, 0, 374, 1445, 1, 0, 0, 0, 376, 1449, 1, 0, 0, 0, 378, 1453, 1, 0, 0, 0, 380, 1457, 1, 0, 0, 0, 382, 1461, 1, 0, 0, 0, 384, 1467, 1, 0, 0, 0, 386, 1471, 1, 0, 0, 0, 388, 1475, 1, 0, 0, 0, 390, 1479, 1, 0, 0, 0, 392, 1483, 1, 0, 0, 0, 394, 1487, 1, 0, 0, 0, 396, 1491, 1, 0, 0, 0, 398, 1496, 1, 0, 0, 0, 400, 1500, 1, 0, 0, 0, 402, 1504, 1, 0, 0, 0, 404, 1510, 1, 0, 0, 0, 406, 1519, 1, 0, 0, 0, 408, 1523, 1, 0, 0, 0, 410, 1527, 1, 0, 0, 0, 412, 1531, 1, 0, 0, 0, 414, 1535, 1, 0, 0, 0, 416, 1539, 1, 0, 0, 0, 418, 1543, 1, 0, 0, 0, 420, 1547, 1, 0, 0, 0, 422, 1551, 1, 0, 0, 0, 424, 1556, 1, 0, 0, 0, 426, 1562, 1, 0, 0, 0, 428, 1568, 1, 0, 0, 0, 430, 1572, 1, 0, 0, 0, 432, 1576, 1, 0, 0, 0, 434, 1580, 1, 0, 0, 0, 436, 1586, 1, 0, 0, 0, 438, 1592, 1, 0, 0, 0, 440, 1596, 1, 0, 0, 0, 442, 1600, 1, 0, 0, 0, 444, 1604, 1, 0, 0, 0, 446, 1610, 1, 0, 0, 0, 448, 1616, 1, 0, 0, 0, 450, 1622, 1, 0, 0, 0, 452, 453, 7, 0, 0, 0, 453, 454, 7, 1, 0, 0, 454, 455, 7, 2, 0, 0, 455, 456, 7, 2, 0, 0, 456, 457, 7, 3, 0, 0, 457, 458, 7, 4, 0, 0, 458, 459, 7, 5, 0, 0, 459, 460, 1, 0, 0, 0, 460, 461, 6, 0, 0, 0, 461, 17, 1, 0, 0, 0, 462, 463, 7, 0, 0, 0, 463, 464, 7, 6, 0, 0, 464, 465, 7, 7, 0, 0, 465, 466, 7, 8, 0, 0, 466, 467, 1, 0, 0, 0, 467, 468, 6, 1, 1, 0, 468, 19, 1, 0, 0, 0, 469, 470, 7, 3, 0, 0, 470, 471, 7, 9, 0, 0, 471, 472, 7, 6, 0, 0, 472, 473, 7, 1, 0, 0, 473, 474, 7, 4, 0, 0, 474, 475, 7, 10, 0, 0, 475, 476, 1, 0, 0, 0, 476, 477, 6, 2, 2, 0, 477, 21, 1, 0, 0, 0, 478, 479, 7, 3, 0, 0, 479, 480, 7, 11, 0, 0, 480, 481, 7, 12, 0, 0, 481, 482, 7, 13, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 3, 0, 0, 484, 23, 1, 0, 0, 0, 485, 486, 7, 3, 0, 0, 486, 487, 7, 14, 0, 0, 487, 488, 7, 8, 0, 0, 488, 489, 7, 13, 0, 0, 489, 490, 7, 12, 0, 0, 490, 491, 7, 1, 0, 0, 491, 492, 7, 9, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 4, 3, 0, 494, 25, 1, 0, 0, 0, 495, 496, 7, 15, 0, 0, 496, 497, 7, 6, 0, 0, 497, 498, 7, 7, 0, 0, 498, 499, 7, 16, 0, 0, 499, 500, 1, 0, 0, 0, 500, 501, 6, 5, 4, 0, 501, 27, 1, 0, 0, 0, 502, 503, 7, 17, 0, 0, 503, 504, 7, 6, 0, 0, 504, 505, 7, 7, 0, 0, 505, 506, 7, 18, 0, 0, 506, 507, 1, 0, 0, 0, 507, 508, 6, 6, 0, 0, 508, 29, 1, 0, 0, 0, 509, 510, 7, 18, 0, 0, 510, 511, 7, 3, 0, 0, 511, 512, 7, 3, 0, 0, 512, 513, 7, 8, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 6, 7, 1, 0, 515, 31, 1, 0, 0, 0, 516, 517, 7, 13, 0, 0, 517, 518, 7, 1, 0, 0, 518, 519, 7, 16, 0, 0, 519, 520, 7, 1, 0, 0, 520, 521, 7, 5, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 8, 0, 0, 523, 33, 1, 0, 0, 0, 524, 525, 7, 16, 0, 0, 525, 526, 7, 11, 0, 0, 526, 527, 5, 95, 0, 0, 527, 528, 7, 3, 0, 0, 528, 529, 7, 14, 0, 0, 529, 530, 7, 8, 0, 0, 530, 531, 7, 12, 0, 0, 531, 532, 7, 9, 0, 0, 532, 533, 7, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 535, 6, 9, 5, 0, 535, 35, 1, 0, 0, 0, 536, 537, 7, 6, 0, 0, 537, 538, 7, 3, 0, 0, 538, 539, 7, 9, 0, 0, 539, 540, 7, 12, 0, 0, 540, 541, 7, 16, 0, 0, 541, 542, 7, 3, 0, 0, 542, 543, 1, 0, 0, 0, 543, 544, 6, 10, 6, 0, 544, 37, 1, 0, 0, 0, 545, 546, 7, 6, 0, 0, 546, 547, 7, 7, 0, 0, 547, 548, 7, 19, 0, 0, 548, 549, 1, 0, 0, 0, 549, 550, 6, 11, 0, 0, 550, 39, 1, 0, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 7, 10, 0, 0, 553, 554, 7, 7, 0, 0, 554, 555, 7, 19, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 6, 12, 7, 0, 557, 41, 1, 0, 0, 0, 558, 559, 7, 2, 0, 0, 559, 560, 7, 7, 0, 0, 560, 561, 7, 6, 0, 0, 561, 562, 7, 5, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 13, 0, 0, 564, 43, 1, 0, 0, 0, 565, 566, 7, 2, 0, 0, 566, 567, 7, 5, 0, 0, 567, 568, 7, 12, 0, 0, 568, 569, 7, 5, 0, 0, 569, 570, 7, 2, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 6, 14, 0, 0, 572, 45, 1, 0, 0, 0, 573, 574, 7, 19, 0, 0, 574, 575, 7, 10, 0, 0, 575, 576, 7, 3, 0, 0, 576, 577, 7, 6, 0, 0, 577, 578, 7, 3, 0, 0, 578, 579, 1, 0, 0, 0, 579, 580, 6, 15, 0, 0, 580, 47, 1, 0, 0, 0, 581, 582, 4, 16, 0, 0, 582, 583, 7, 1, 0, 0, 583, 584, 7, 9, 0, 0, 584, 585, 7, 13, 0, 0, 585, 586, 7, 1, 0, 0, 586, 587, 7, 9, 0, 0, 587, 588, 7, 3, 0, 0, 588, 589, 7, 2, 0, 0, 589, 590, 7, 5, 0, 0, 590, 591, 7, 12, 0, 0, 591, 592, 7, 5, 0, 0, 592, 593, 7, 2, 0, 0, 593, 594, 1, 0, 0, 0, 594, 595, 6, 16, 0, 0, 595, 49, 1, 0, 0, 0, 596, 597, 4, 17, 1, 0, 597, 598, 7, 13, 0, 0, 598, 599, 7, 7, 0, 0, 599, 600, 7, 7, 0, 0, 600, 601, 7, 18, 0, 0, 601, 602, 7, 20, 0, 0, 602, 603, 7, 8, 0, 0, 603, 604, 5, 95, 0, 0, 604, 605, 5, 128020, 0, 0, 605, 606, 1, 0, 0, 0, 606, 607, 6, 17, 8, 0, 607, 51, 1, 0, 0, 0, 608, 609, 4, 18, 2, 0, 609, 610, 7, 16, 0, 0, 610, 611, 7, 3, 0, 0, 611, 612, 7, 5, 0, 0, 612, 613, 7, 6, 0, 0, 613, 614, 7, 1, 0, 0, 614, 615, 7, 4, 0, 0, 615, 616, 7, 2, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 6, 18, 9, 0, 618, 53, 1, 0, 0, 0, 619, 620, 4, 19, 3, 0, 620, 621, 7, 21, 0, 0, 621, 622, 7, 7, 0, 0, 622, 623, 7, 1, 0, 0, 623, 624, 7, 9, 0, 0, 624, 625, 1, 0, 0, 0, 625, 626, 6, 19, 10, 0, 626, 55, 1, 0, 0, 0, 627, 628, 4, 20, 4, 0, 628, 629, 7, 15, 0, 0, 629, 630, 7, 20, 0, 0, 630, 631, 7, 13, 0, 0, 631, 632, 7, 13, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 6, 20, 10, 0, 634, 57, 1, 0, 0, 0, 635, 636, 4, 21, 5, 0, 636, 637, 7, 13, 0, 0, 637, 638, 7, 3, 0, 0, 638, 639, 7, 15, 0, 0, 639, 640, 7, 5, 0, 0, 640, 641, 1, 0, 0, 0, 641, 642, 6, 21, 10, 0, 642, 59, 1, 0, 0, 0, 643, 644, 4, 22, 6, 0, 644, 645, 7, 6, 0, 0, 645, 646, 7, 1, 0, 0, 646, 647, 7, 17, 0, 0, 647, 648, 7, 10, 0, 0, 648, 649, 7, 5, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 6, 22, 10, 0, 651, 61, 1, 0, 0, 0, 652, 653, 4, 23, 7, 0, 653, 654, 7, 13, 0, 0, 654, 655, 7, 7, 0, 0, 655, 656, 7, 7, 0, 0, 656, 657, 7, 18, 0, 0, 657, 658, 7, 20, 0, 0, 658, 659, 7, 8, 0, 0, 659, 660, 1, 0, 0, 0, 660, 661, 6, 23, 10, 0, 661, 63, 1, 0, 0, 0, 662, 664, 8, 22, 0, 0, 663, 662, 1, 0, 0, 0, 664, 665, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 667, 1, 0, 0, 0, 667, 668, 6, 24, 0, 0, 668, 65, 1, 0, 0, 0, 669, 670, 5, 47, 0, 0, 670, 671, 5, 47, 0, 0, 671, 675, 1, 0, 0, 0, 672, 674, 8, 23, 0, 0, 673, 672, 1, 0, 0, 0, 674, 677, 1, 0, 0, 0, 675, 673, 1, 0, 0, 0, 675, 676, 1, 0, 0, 0, 676, 679, 1, 0, 0, 0, 677, 675, 1, 0, 0, 0, 678, 680, 5, 13, 0, 0, 679, 678, 1, 0, 0, 0, 679, 680, 1, 0, 0, 0, 680, 682, 1, 0, 0, 0, 681, 683, 5, 10, 0, 0, 682, 681, 1, 0, 0, 0, 682, 683, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 684, 685, 6, 25, 11, 0, 685, 67, 1, 0, 0, 0, 686, 687, 5, 47, 0, 0, 687, 688, 5, 42, 0, 0, 688, 693, 1, 0, 0, 0, 689, 692, 3, 68, 26, 0, 690, 692, 9, 0, 0, 0, 691, 689, 1, 0, 0, 0, 691, 690, 1, 0, 0, 0, 692, 695, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 693, 691, 1, 0, 0, 0, 694, 696, 1, 0, 0, 0, 695, 693, 1, 0, 0, 0, 696, 697, 5, 42, 0, 0, 697, 698, 5, 47, 0, 0, 698, 699, 1, 0, 0, 0, 699, 700, 6, 26, 11, 0, 700, 69, 1, 0, 0, 0, 701, 703, 7, 24, 0, 0, 702, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 702, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 707, 6, 27, 11, 0, 707, 71, 1, 0, 0, 0, 708, 709, 5, 124, 0, 0, 709, 710, 1, 0, 0, 0, 710, 711, 6, 28, 12, 0, 711, 73, 1, 0, 0, 0, 712, 713, 7, 25, 0, 0, 713, 75, 1, 0, 0, 0, 714, 715, 7, 26, 0, 0, 715, 77, 1, 0, 0, 0, 716, 717, 5, 92, 0, 0, 717, 718, 7, 27, 0, 0, 718, 79, 1, 0, 0, 0, 719, 720, 8, 28, 0, 0, 720, 81, 1, 0, 0, 0, 721, 723, 7, 3, 0, 0, 722, 724, 7, 29, 0, 0, 723, 722, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 726, 1, 0, 0, 0, 725, 727, 3, 74, 29, 0, 726, 725, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 726, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 83, 1, 0, 0, 0, 730, 731, 5, 64, 0, 0, 731, 85, 1, 0, 0, 0, 732, 733, 5, 96, 0, 0, 733, 87, 1, 0, 0, 0, 734, 738, 8, 30, 0, 0, 735, 736, 5, 96, 0, 0, 736, 738, 5, 96, 0, 0, 737, 734, 1, 0, 0, 0, 737, 735, 1, 0, 0, 0, 738, 89, 1, 0, 0, 0, 739, 740, 5, 95, 0, 0, 740, 91, 1, 0, 0, 0, 741, 745, 3, 76, 30, 0, 742, 745, 3, 74, 29, 0, 743, 745, 3, 90, 37, 0, 744, 741, 1, 0, 0, 0, 744, 742, 1, 0, 0, 0, 744, 743, 1, 0, 0, 0, 745, 93, 1, 0, 0, 0, 746, 751, 5, 34, 0, 0, 747, 750, 3, 78, 31, 0, 748, 750, 3, 80, 32, 0, 749, 747, 1, 0, 0, 0, 749, 748, 1, 0, 0, 0, 750, 753, 1, 0, 0, 0, 751, 749, 1, 0, 0, 0, 751, 752, 1, 0, 0, 0, 752, 754, 1, 0, 0, 0, 753, 751, 1, 0, 0, 0, 754, 776, 5, 34, 0, 0, 755, 756, 5, 34, 0, 0, 756, 757, 5, 34, 0, 0, 757, 758, 5, 34, 0, 0, 758, 762, 1, 0, 0, 0, 759, 761, 8, 23, 0, 0, 760, 759, 1, 0, 0, 0, 761, 764, 1, 0, 0, 0, 762, 763, 1, 0, 0, 0, 762, 760, 1, 0, 0, 0, 763, 765, 1, 0, 0, 0, 764, 762, 1, 0, 0, 0, 765, 766, 5, 34, 0, 0, 766, 767, 5, 34, 0, 0, 767, 768, 5, 34, 0, 0, 768, 770, 1, 0, 0, 0, 769, 771, 5, 34, 0, 0, 770, 769, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 773, 1, 0, 0, 0, 772, 774, 5, 34, 0, 0, 773, 772, 1, 0, 0, 0, 773, 774, 1, 0, 0, 0, 774, 776, 1, 0, 0, 0, 775, 746, 1, 0, 0, 0, 775, 755, 1, 0, 0, 0, 776, 95, 1, 0, 0, 0, 777, 779, 3, 74, 29, 0, 778, 777, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 778, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 97, 1, 0, 0, 0, 782, 784, 3, 74, 29, 0, 783, 782, 1, 0, 0, 0, 784, 785, 1, 0, 0, 0, 785, 783, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 787, 1, 0, 0, 0, 787, 791, 3, 116, 50, 0, 788, 790, 3, 74, 29, 0, 789, 788, 1, 0, 0, 0, 790, 793, 1, 0, 0, 0, 791, 789, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 825, 1, 0, 0, 0, 793, 791, 1, 0, 0, 0, 794, 796, 3, 116, 50, 0, 795, 797, 3, 74, 29, 0, 796, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 796, 1, 0, 0, 0, 798, 799, 1, 0, 0, 0, 799, 825, 1, 0, 0, 0, 800, 802, 3, 74, 29, 0, 801, 800, 1, 0, 0, 0, 802, 803, 1, 0, 0, 0, 803, 801, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 812, 1, 0, 0, 0, 805, 809, 3, 116, 50, 0, 806, 808, 3, 74, 29, 0, 807, 806, 1, 0, 0, 0, 808, 811, 1, 0, 0, 0, 809, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 813, 1, 0, 0, 0, 811, 809, 1, 0, 0, 0, 812, 805, 1, 0, 0, 0, 812, 813, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 815, 3, 82, 33, 0, 815, 825, 1, 0, 0, 0, 816, 818, 3, 116, 50, 0, 817, 819, 3, 74, 29, 0, 818, 817, 1, 0, 0, 0, 819, 820, 1, 0, 0, 0, 820, 818, 1, 0, 0, 0, 820, 821, 1, 0, 0, 0, 821, 822, 1, 0, 0, 0, 822, 823, 3, 82, 33, 0, 823, 825, 1, 0, 0, 0, 824, 783, 1, 0, 0, 0, 824, 794, 1, 0, 0, 0, 824, 801, 1, 0, 0, 0, 824, 816, 1, 0, 0, 0, 825, 99, 1, 0, 0, 0, 826, 827, 7, 31, 0, 0, 827, 828, 7, 32, 0, 0, 828, 101, 1, 0, 0, 0, 829, 830, 7, 12, 0, 0, 830, 831, 7, 9, 0, 0, 831, 832, 7, 0, 0, 0, 832, 103, 1, 0, 0, 0, 833, 834, 7, 12, 0, 0, 834, 835, 7, 2, 0, 0, 835, 836, 7, 4, 0, 0, 836, 105, 1, 0, 0, 0, 837, 838, 5, 61, 0, 0, 838, 107, 1, 0, 0, 0, 839, 840, 5, 58, 0, 0, 840, 841, 5, 58, 0, 0, 841, 109, 1, 0, 0, 0, 842, 843, 5, 58, 0, 0, 843, 111, 1, 0, 0, 0, 844, 845, 5, 44, 0, 0, 845, 113, 1, 0, 0, 0, 846, 847, 7, 0, 0, 0, 847, 848, 7, 3, 0, 0, 848, 849, 7, 2, 0, 0, 849, 850, 7, 4, 0, 0, 850, 115, 1, 0, 0, 0, 851, 852, 5, 46, 0, 0, 852, 117, 1, 0, 0, 0, 853, 854, 7, 15, 0, 0, 854, 855, 7, 12, 0, 0, 855, 856, 7, 13, 0, 0, 856, 857, 7, 2, 0, 0, 857, 858, 7, 3, 0, 0, 858, 119, 1, 0, 0, 0, 859, 860, 7, 15, 0, 0, 860, 861, 7, 1, 0, 0, 861, 862, 7, 6, 0, 0, 862, 863, 7, 2, 0, 0, 863, 864, 7, 5, 0, 0, 864, 121, 1, 0, 0, 0, 865, 866, 7, 1, 0, 0, 866, 867, 7, 9, 0, 0, 867, 123, 1, 0, 0, 0, 868, 869, 7, 1, 0, 0, 869, 870, 7, 2, 0, 0, 870, 125, 1, 0, 0, 0, 871, 872, 7, 13, 0, 0, 872, 873, 7, 12, 0, 0, 873, 874, 7, 2, 0, 0, 874, 875, 7, 5, 0, 0, 875, 127, 1, 0, 0, 0, 876, 877, 7, 13, 0, 0, 877, 878, 7, 1, 0, 0, 878, 879, 7, 18, 0, 0, 879, 880, 7, 3, 0, 0, 880, 129, 1, 0, 0, 0, 881, 882, 5, 40, 0, 0, 882, 131, 1, 0, 0, 0, 883, 884, 7, 9, 0, 0, 884, 885, 7, 7, 0, 0, 885, 886, 7, 5, 0, 0, 886, 133, 1, 0, 0, 0, 887, 888, 7, 9, 0, 0, 888, 889, 7, 20, 0, 0, 889, 890, 7, 13, 0, 0, 890, 891, 7, 13, 0, 0, 891, 135, 1, 0, 0, 0, 892, 893, 7, 9, 0, 0, 893, 894, 7, 20, 0, 0, 894, 895, 7, 13, 0, 0, 895, 896, 7, 13, 0, 0, 896, 897, 7, 2, 0, 0, 897, 137, 1, 0, 0, 0, 898, 899, 7, 7, 0, 0, 899, 900, 7, 6, 0, 0, 900, 139, 1, 0, 0, 0, 901, 902, 5, 63, 0, 0, 902, 141, 1, 0, 0, 0, 903, 904, 7, 6, 0, 0, 904, 905, 7, 13, 0, 0, 905, 906, 7, 1, 0, 0, 906, 907, 7, 18, 0, 0, 907, 908, 7, 3, 0, 0, 908, 143, 1, 0, 0, 0, 909, 910, 5, 41, 0, 0, 910, 145, 1, 0, 0, 0, 911, 912, 7, 5, 0, 0, 912, 913, 7, 6, 0, 0, 913, 914, 7, 20, 0, 0, 914, 915, 7, 3, 0, 0, 915, 147, 1, 0, 0, 0, 916, 917, 5, 61, 0, 0, 917, 918, 5, 61, 0, 0, 918, 149, 1, 0, 0, 0, 919, 920, 5, 61, 0, 0, 920, 921, 5, 126, 0, 0, 921, 151, 1, 0, 0, 0, 922, 923, 5, 33, 0, 0, 923, 924, 5, 61, 0, 0, 924, 153, 1, 0, 0, 0, 925, 926, 5, 60, 0, 0, 926, 155, 1, 0, 0, 0, 927, 928, 5, 60, 0, 0, 928, 929, 5, 61, 0, 0, 929, 157, 1, 0, 0, 0, 930, 931, 5, 62, 0, 0, 931, 159, 1, 0, 0, 0, 932, 933, 5, 62, 0, 0, 933, 934, 5, 61, 0, 0, 934, 161, 1, 0, 0, 0, 935, 936, 5, 43, 0, 0, 936, 163, 1, 0, 0, 0, 937, 938, 5, 45, 0, 0, 938, 165, 1, 0, 0, 0, 939, 940, 5, 42, 0, 0, 940, 167, 1, 0, 0, 0, 941, 942, 5, 47, 0, 0, 942, 169, 1, 0, 0, 0, 943, 944, 5, 37, 0, 0, 944, 171, 1, 0, 0, 0, 945, 946, 5, 123, 0, 0, 946, 173, 1, 0, 0, 0, 947, 948, 5, 125, 0, 0, 948, 175, 1, 0, 0, 0, 949, 950, 3, 46, 15, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 80, 13, 0, 952, 177, 1, 0, 0, 0, 953, 956, 3, 140, 62, 0, 954, 957, 3, 76, 30, 0, 955, 957, 3, 90, 37, 0, 956, 954, 1, 0, 0, 0, 956, 955, 1, 0, 0, 0, 957, 961, 1, 0, 0, 0, 958, 960, 3, 92, 38, 0, 959, 958, 1, 0, 0, 0, 960, 963, 1, 0, 0, 0, 961, 959, 1, 0, 0, 0, 961, 962, 1, 0, 0, 0, 962, 971, 1, 0, 0, 0, 963, 961, 1, 0, 0, 0, 964, 966, 3, 140, 62, 0, 965, 967, 3, 74, 29, 0, 966, 965, 1, 0, 0, 0, 967, 968, 1, 0, 0, 0, 968, 966, 1, 0, 0, 0, 968, 969, 1, 0, 0, 0, 969, 971, 1, 0, 0, 0, 970, 953, 1, 0, 0, 0, 970, 964, 1, 0, 0, 0, 971, 179, 1, 0, 0, 0, 972, 973, 5, 91, 0, 0, 973, 974, 1, 0, 0, 0, 974, 975, 6, 82, 0, 0, 975, 976, 6, 82, 0, 0, 976, 181, 1, 0, 0, 0, 977, 978, 5, 93, 0, 0, 978, 979, 1, 0, 0, 0, 979, 980, 6, 83, 12, 0, 980, 981, 6, 83, 12, 0, 981, 183, 1, 0, 0, 0, 982, 986, 3, 76, 30, 0, 983, 985, 3, 92, 38, 0, 984, 983, 1, 0, 0, 0, 985, 988, 1, 0, 0, 0, 986, 984, 1, 0, 0, 0, 986, 987, 1, 0, 0, 0, 987, 999, 1, 0, 0, 0, 988, 986, 1, 0, 0, 0, 989, 992, 3, 90, 37, 0, 990, 992, 3, 84, 34, 0, 991, 989, 1, 0, 0, 0, 991, 990, 1, 0, 0, 0, 992, 994, 1, 0, 0, 0, 993, 995, 3, 92, 38, 0, 994, 993, 1, 0, 0, 0, 995, 996, 1, 0, 0, 0, 996, 994, 1, 0, 0, 0, 996, 997, 1, 0, 0, 0, 997, 999, 1, 0, 0, 0, 998, 982, 1, 0, 0, 0, 998, 991, 1, 0, 0, 0, 999, 185, 1, 0, 0, 0, 1000, 1002, 3, 86, 35, 0, 1001, 1003, 3, 88, 36, 0, 1002, 1001, 1, 0, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1002, 1, 0, 0, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1007, 3, 86, 35, 0, 1007, 187, 1, 0, 0, 0, 1008, 1009, 3, 186, 85, 0, 1009, 189, 1, 0, 0, 0, 1010, 1011, 3, 66, 25, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 87, 11, 0, 1013, 191, 1, 0, 0, 0, 1014, 1015, 3, 68, 26, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 88, 11, 0, 1017, 193, 1, 0, 0, 0, 1018, 1019, 3, 70, 27, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 89, 11, 0, 1021, 195, 1, 0, 0, 0, 1022, 1023, 3, 180, 82, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 90, 14, 0, 1025, 1026, 6, 90, 15, 0, 1026, 197, 1, 0, 0, 0, 1027, 1028, 3, 72, 28, 0, 1028, 1029, 1, 0, 0, 0, 1029, 1030, 6, 91, 16, 0, 1030, 1031, 6, 91, 12, 0, 1031, 199, 1, 0, 0, 0, 1032, 1033, 3, 70, 27, 0, 1033, 1034, 1, 0, 0, 0, 1034, 1035, 6, 92, 11, 0, 1035, 201, 1, 0, 0, 0, 1036, 1037, 3, 66, 25, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1039, 6, 93, 11, 0, 1039, 203, 1, 0, 0, 0, 1040, 1041, 3, 68, 26, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 94, 11, 0, 1043, 205, 1, 0, 0, 0, 1044, 1045, 3, 72, 28, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 95, 16, 0, 1047, 1048, 6, 95, 12, 0, 1048, 207, 1, 0, 0, 0, 1049, 1050, 3, 180, 82, 0, 1050, 1051, 1, 0, 0, 0, 1051, 1052, 6, 96, 14, 0, 1052, 209, 1, 0, 0, 0, 1053, 1054, 3, 182, 83, 0, 1054, 1055, 1, 0, 0, 0, 1055, 1056, 6, 97, 17, 0, 1056, 211, 1, 0, 0, 0, 1057, 1058, 3, 110, 47, 0, 1058, 1059, 1, 0, 0, 0, 1059, 1060, 6, 98, 18, 0, 1060, 213, 1, 0, 0, 0, 1061, 1062, 3, 112, 48, 0, 1062, 1063, 1, 0, 0, 0, 1063, 1064, 6, 99, 19, 0, 1064, 215, 1, 0, 0, 0, 1065, 1066, 3, 106, 45, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1068, 6, 100, 20, 0, 1068, 217, 1, 0, 0, 0, 1069, 1070, 7, 16, 0, 0, 1070, 1071, 7, 3, 0, 0, 1071, 1072, 7, 5, 0, 0, 1072, 1073, 7, 12, 0, 0, 1073, 1074, 7, 0, 0, 0, 1074, 1075, 7, 12, 0, 0, 1075, 1076, 7, 5, 0, 0, 1076, 1077, 7, 12, 0, 0, 1077, 219, 1, 0, 0, 0, 1078, 1082, 8, 33, 0, 0, 1079, 1080, 5, 47, 0, 0, 1080, 1082, 8, 34, 0, 0, 1081, 1078, 1, 0, 0, 0, 1081, 1079, 1, 0, 0, 0, 1082, 221, 1, 0, 0, 0, 1083, 1085, 3, 220, 102, 0, 1084, 1083, 1, 0, 0, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1084, 1, 0, 0, 0, 1086, 1087, 1, 0, 0, 0, 1087, 223, 1, 0, 0, 0, 1088, 1089, 3, 222, 103, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 104, 21, 0, 1091, 225, 1, 0, 0, 0, 1092, 1093, 3, 94, 39, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 105, 22, 0, 1095, 227, 1, 0, 0, 0, 1096, 1097, 3, 66, 25, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 106, 11, 0, 1099, 229, 1, 0, 0, 0, 1100, 1101, 3, 68, 26, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 107, 11, 0, 1103, 231, 1, 0, 0, 0, 1104, 1105, 3, 70, 27, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1107, 6, 108, 11, 0, 1107, 233, 1, 0, 0, 0, 1108, 1109, 3, 72, 28, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 109, 16, 0, 1111, 1112, 6, 109, 12, 0, 1112, 235, 1, 0, 0, 0, 1113, 1114, 3, 116, 50, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 110, 23, 0, 1116, 237, 1, 0, 0, 0, 1117, 1118, 3, 112, 48, 0, 1118, 1119, 1, 0, 0, 0, 1119, 1120, 6, 111, 19, 0, 1120, 239, 1, 0, 0, 0, 1121, 1122, 4, 112, 8, 0, 1122, 1123, 3, 140, 62, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1125, 6, 112, 24, 0, 1125, 241, 1, 0, 0, 0, 1126, 1127, 4, 113, 9, 0, 1127, 1128, 3, 178, 81, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 6, 113, 25, 0, 1130, 243, 1, 0, 0, 0, 1131, 1136, 3, 76, 30, 0, 1132, 1136, 3, 74, 29, 0, 1133, 1136, 3, 90, 37, 0, 1134, 1136, 3, 166, 75, 0, 1135, 1131, 1, 0, 0, 0, 1135, 1132, 1, 0, 0, 0, 1135, 1133, 1, 0, 0, 0, 1135, 1134, 1, 0, 0, 0, 1136, 245, 1, 0, 0, 0, 1137, 1140, 3, 76, 30, 0, 1138, 1140, 3, 166, 75, 0, 1139, 1137, 1, 0, 0, 0, 1139, 1138, 1, 0, 0, 0, 1140, 1144, 1, 0, 0, 0, 1141, 1143, 3, 244, 114, 0, 1142, 1141, 1, 0, 0, 0, 1143, 1146, 1, 0, 0, 0, 1144, 1142, 1, 0, 0, 0, 1144, 1145, 1, 0, 0, 0, 1145, 1157, 1, 0, 0, 0, 1146, 1144, 1, 0, 0, 0, 1147, 1150, 3, 90, 37, 0, 1148, 1150, 3, 84, 34, 0, 1149, 1147, 1, 0, 0, 0, 1149, 1148, 1, 0, 0, 0, 1150, 1152, 1, 0, 0, 0, 1151, 1153, 3, 244, 114, 0, 1152, 1151, 1, 0, 0, 0, 1153, 1154, 1, 0, 0, 0, 1154, 1152, 1, 0, 0, 0, 1154, 1155, 1, 0, 0, 0, 1155, 1157, 1, 0, 0, 0, 1156, 1139, 1, 0, 0, 0, 1156, 1149, 1, 0, 0, 0, 1157, 247, 1, 0, 0, 0, 1158, 1161, 3, 246, 115, 0, 1159, 1161, 3, 186, 85, 0, 1160, 1158, 1, 0, 0, 0, 1160, 1159, 1, 0, 0, 0, 1161, 1162, 1, 0, 0, 0, 1162, 1160, 1, 0, 0, 0, 1162, 1163, 1, 0, 0, 0, 1163, 249, 1, 0, 0, 0, 1164, 1165, 3, 66, 25, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1167, 6, 117, 11, 0, 1167, 251, 1, 0, 0, 0, 1168, 1169, 3, 68, 26, 0, 1169, 1170, 1, 0, 0, 0, 1170, 1171, 6, 118, 11, 0, 1171, 253, 1, 0, 0, 0, 1172, 1173, 3, 70, 27, 0, 1173, 1174, 1, 0, 0, 0, 1174, 1175, 6, 119, 11, 0, 1175, 255, 1, 0, 0, 0, 1176, 1177, 3, 72, 28, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 120, 16, 0, 1179, 1180, 6, 120, 12, 0, 1180, 257, 1, 0, 0, 0, 1181, 1182, 3, 106, 45, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1184, 6, 121, 20, 0, 1184, 259, 1, 0, 0, 0, 1185, 1186, 3, 112, 48, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 122, 19, 0, 1188, 261, 1, 0, 0, 0, 1189, 1190, 3, 116, 50, 0, 1190, 1191, 1, 0, 0, 0, 1191, 1192, 6, 123, 23, 0, 1192, 263, 1, 0, 0, 0, 1193, 1194, 4, 124, 10, 0, 1194, 1195, 3, 140, 62, 0, 1195, 1196, 1, 0, 0, 0, 1196, 1197, 6, 124, 24, 0, 1197, 265, 1, 0, 0, 0, 1198, 1199, 4, 125, 11, 0, 1199, 1200, 3, 178, 81, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 125, 25, 0, 1202, 267, 1, 0, 0, 0, 1203, 1204, 7, 12, 0, 0, 1204, 1205, 7, 2, 0, 0, 1205, 269, 1, 0, 0, 0, 1206, 1207, 3, 248, 116, 0, 1207, 1208, 1, 0, 0, 0, 1208, 1209, 6, 127, 26, 0, 1209, 271, 1, 0, 0, 0, 1210, 1211, 3, 66, 25, 0, 1211, 1212, 1, 0, 0, 0, 1212, 1213, 6, 128, 11, 0, 1213, 273, 1, 0, 0, 0, 1214, 1215, 3, 68, 26, 0, 1215, 1216, 1, 0, 0, 0, 1216, 1217, 6, 129, 11, 0, 1217, 275, 1, 0, 0, 0, 1218, 1219, 3, 70, 27, 0, 1219, 1220, 1, 0, 0, 0, 1220, 1221, 6, 130, 11, 0, 1221, 277, 1, 0, 0, 0, 1222, 1223, 3, 72, 28, 0, 1223, 1224, 1, 0, 0, 0, 1224, 1225, 6, 131, 16, 0, 1225, 1226, 6, 131, 12, 0, 1226, 279, 1, 0, 0, 0, 1227, 1228, 3, 180, 82, 0, 1228, 1229, 1, 0, 0, 0, 1229, 1230, 6, 132, 14, 0, 1230, 1231, 6, 132, 27, 0, 1231, 281, 1, 0, 0, 0, 1232, 1233, 7, 7, 0, 0, 1233, 1234, 7, 9, 0, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1236, 6, 133, 28, 0, 1236, 283, 1, 0, 0, 0, 1237, 1238, 7, 19, 0, 0, 1238, 1239, 7, 1, 0, 0, 1239, 1240, 7, 5, 0, 0, 1240, 1241, 7, 10, 0, 0, 1241, 1242, 1, 0, 0, 0, 1242, 1243, 6, 134, 28, 0, 1243, 285, 1, 0, 0, 0, 1244, 1245, 8, 35, 0, 0, 1245, 287, 1, 0, 0, 0, 1246, 1248, 3, 286, 135, 0, 1247, 1246, 1, 0, 0, 0, 1248, 1249, 1, 0, 0, 0, 1249, 1247, 1, 0, 0, 0, 1249, 1250, 1, 0, 0, 0, 1250, 1251, 1, 0, 0, 0, 1251, 1252, 3, 110, 47, 0, 1252, 1254, 1, 0, 0, 0, 1253, 1247, 1, 0, 0, 0, 1253, 1254, 1, 0, 0, 0, 1254, 1256, 1, 0, 0, 0, 1255, 1257, 3, 286, 135, 0, 1256, 1255, 1, 0, 0, 0, 1257, 1258, 1, 0, 0, 0, 1258, 1256, 1, 0, 0, 0, 1258, 1259, 1, 0, 0, 0, 1259, 289, 1, 0, 0, 0, 1260, 1261, 3, 288, 136, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 137, 29, 0, 1263, 291, 1, 0, 0, 0, 1264, 1265, 3, 66, 25, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 138, 11, 0, 1267, 293, 1, 0, 0, 0, 1268, 1269, 3, 68, 26, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 139, 11, 0, 1271, 295, 1, 0, 0, 0, 1272, 1273, 3, 70, 27, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 140, 11, 0, 1275, 297, 1, 0, 0, 0, 1276, 1277, 3, 72, 28, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 141, 16, 0, 1279, 1280, 6, 141, 12, 0, 1280, 1281, 6, 141, 12, 0, 1281, 299, 1, 0, 0, 0, 1282, 1283, 3, 106, 45, 0, 1283, 1284, 1, 0, 0, 0, 1284, 1285, 6, 142, 20, 0, 1285, 301, 1, 0, 0, 0, 1286, 1287, 3, 112, 48, 0, 1287, 1288, 1, 0, 0, 0, 1288, 1289, 6, 143, 19, 0, 1289, 303, 1, 0, 0, 0, 1290, 1291, 3, 116, 50, 0, 1291, 1292, 1, 0, 0, 0, 1292, 1293, 6, 144, 23, 0, 1293, 305, 1, 0, 0, 0, 1294, 1295, 3, 284, 134, 0, 1295, 1296, 1, 0, 0, 0, 1296, 1297, 6, 145, 30, 0, 1297, 307, 1, 0, 0, 0, 1298, 1299, 3, 248, 116, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 146, 26, 0, 1301, 309, 1, 0, 0, 0, 1302, 1303, 3, 188, 86, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 147, 31, 0, 1305, 311, 1, 0, 0, 0, 1306, 1307, 4, 148, 12, 0, 1307, 1308, 3, 140, 62, 0, 1308, 1309, 1, 0, 0, 0, 1309, 1310, 6, 148, 24, 0, 1310, 313, 1, 0, 0, 0, 1311, 1312, 4, 149, 13, 0, 1312, 1313, 3, 178, 81, 0, 1313, 1314, 1, 0, 0, 0, 1314, 1315, 6, 149, 25, 0, 1315, 315, 1, 0, 0, 0, 1316, 1317, 3, 66, 25, 0, 1317, 1318, 1, 0, 0, 0, 1318, 1319, 6, 150, 11, 0, 1319, 317, 1, 0, 0, 0, 1320, 1321, 3, 68, 26, 0, 1321, 1322, 1, 0, 0, 0, 1322, 1323, 6, 151, 11, 0, 1323, 319, 1, 0, 0, 0, 1324, 1325, 3, 70, 27, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 152, 11, 0, 1327, 321, 1, 0, 0, 0, 1328, 1329, 3, 72, 28, 0, 1329, 1330, 1, 0, 0, 0, 1330, 1331, 6, 153, 16, 0, 1331, 1332, 6, 153, 12, 0, 1332, 323, 1, 0, 0, 0, 1333, 1334, 3, 116, 50, 0, 1334, 1335, 1, 0, 0, 0, 1335, 1336, 6, 154, 23, 0, 1336, 325, 1, 0, 0, 0, 1337, 1338, 4, 155, 14, 0, 1338, 1339, 3, 140, 62, 0, 1339, 1340, 1, 0, 0, 0, 1340, 1341, 6, 155, 24, 0, 1341, 327, 1, 0, 0, 0, 1342, 1343, 4, 156, 15, 0, 1343, 1344, 3, 178, 81, 0, 1344, 1345, 1, 0, 0, 0, 1345, 1346, 6, 156, 25, 0, 1346, 329, 1, 0, 0, 0, 1347, 1348, 3, 188, 86, 0, 1348, 1349, 1, 0, 0, 0, 1349, 1350, 6, 157, 31, 0, 1350, 331, 1, 0, 0, 0, 1351, 1352, 3, 184, 84, 0, 1352, 1353, 1, 0, 0, 0, 1353, 1354, 6, 158, 32, 0, 1354, 333, 1, 0, 0, 0, 1355, 1356, 3, 66, 25, 0, 1356, 1357, 1, 0, 0, 0, 1357, 1358, 6, 159, 11, 0, 1358, 335, 1, 0, 0, 0, 1359, 1360, 3, 68, 26, 0, 1360, 1361, 1, 0, 0, 0, 1361, 1362, 6, 160, 11, 0, 1362, 337, 1, 0, 0, 0, 1363, 1364, 3, 70, 27, 0, 1364, 1365, 1, 0, 0, 0, 1365, 1366, 6, 161, 11, 0, 1366, 339, 1, 0, 0, 0, 1367, 1368, 3, 72, 28, 0, 1368, 1369, 1, 0, 0, 0, 1369, 1370, 6, 162, 16, 0, 1370, 1371, 6, 162, 12, 0, 1371, 341, 1, 0, 0, 0, 1372, 1373, 7, 1, 0, 0, 1373, 1374, 7, 9, 0, 0, 1374, 1375, 7, 15, 0, 0, 1375, 1376, 7, 7, 0, 0, 1376, 343, 1, 0, 0, 0, 1377, 1378, 3, 66, 25, 0, 1378, 1379, 1, 0, 0, 0, 1379, 1380, 6, 164, 11, 0, 1380, 345, 1, 0, 0, 0, 1381, 1382, 3, 68, 26, 0, 1382, 1383, 1, 0, 0, 0, 1383, 1384, 6, 165, 11, 0, 1384, 347, 1, 0, 0, 0, 1385, 1386, 3, 70, 27, 0, 1386, 1387, 1, 0, 0, 0, 1387, 1388, 6, 166, 11, 0, 1388, 349, 1, 0, 0, 0, 1389, 1390, 3, 182, 83, 0, 1390, 1391, 1, 0, 0, 0, 1391, 1392, 6, 167, 17, 0, 1392, 1393, 6, 167, 12, 0, 1393, 351, 1, 0, 0, 0, 1394, 1395, 3, 110, 47, 0, 1395, 1396, 1, 0, 0, 0, 1396, 1397, 6, 168, 18, 0, 1397, 353, 1, 0, 0, 0, 1398, 1404, 3, 84, 34, 0, 1399, 1404, 3, 74, 29, 0, 1400, 1404, 3, 116, 50, 0, 1401, 1404, 3, 76, 30, 0, 1402, 1404, 3, 90, 37, 0, 1403, 1398, 1, 0, 0, 0, 1403, 1399, 1, 0, 0, 0, 1403, 1400, 1, 0, 0, 0, 1403, 1401, 1, 0, 0, 0, 1403, 1402, 1, 0, 0, 0, 1404, 1405, 1, 0, 0, 0, 1405, 1403, 1, 0, 0, 0, 1405, 1406, 1, 0, 0, 0, 1406, 355, 1, 0, 0, 0, 1407, 1408, 3, 66, 25, 0, 1408, 1409, 1, 0, 0, 0, 1409, 1410, 6, 170, 11, 0, 1410, 357, 1, 0, 0, 0, 1411, 1412, 3, 68, 26, 0, 1412, 1413, 1, 0, 0, 0, 1413, 1414, 6, 171, 11, 0, 1414, 359, 1, 0, 0, 0, 1415, 1416, 3, 70, 27, 0, 1416, 1417, 1, 0, 0, 0, 1417, 1418, 6, 172, 11, 0, 1418, 361, 1, 0, 0, 0, 1419, 1420, 3, 72, 28, 0, 1420, 1421, 1, 0, 0, 0, 1421, 1422, 6, 173, 16, 0, 1422, 1423, 6, 173, 12, 0, 1423, 363, 1, 0, 0, 0, 1424, 1425, 3, 110, 47, 0, 1425, 1426, 1, 0, 0, 0, 1426, 1427, 6, 174, 18, 0, 1427, 365, 1, 0, 0, 0, 1428, 1429, 3, 112, 48, 0, 1429, 1430, 1, 0, 0, 0, 1430, 1431, 6, 175, 19, 0, 1431, 367, 1, 0, 0, 0, 1432, 1433, 3, 116, 50, 0, 1433, 1434, 1, 0, 0, 0, 1434, 1435, 6, 176, 23, 0, 1435, 369, 1, 0, 0, 0, 1436, 1437, 3, 282, 133, 0, 1437, 1438, 1, 0, 0, 0, 1438, 1439, 6, 177, 33, 0, 1439, 1440, 6, 177, 34, 0, 1440, 371, 1, 0, 0, 0, 1441, 1442, 3, 222, 103, 0, 1442, 1443, 1, 0, 0, 0, 1443, 1444, 6, 178, 21, 0, 1444, 373, 1, 0, 0, 0, 1445, 1446, 3, 94, 39, 0, 1446, 1447, 1, 0, 0, 0, 1447, 1448, 6, 179, 22, 0, 1448, 375, 1, 0, 0, 0, 1449, 1450, 3, 66, 25, 0, 1450, 1451, 1, 0, 0, 0, 1451, 1452, 6, 180, 11, 0, 1452, 377, 1, 0, 0, 0, 1453, 1454, 3, 68, 26, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 181, 11, 0, 1456, 379, 1, 0, 0, 0, 1457, 1458, 3, 70, 27, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 182, 11, 0, 1460, 381, 1, 0, 0, 0, 1461, 1462, 3, 72, 28, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 183, 16, 0, 1464, 1465, 6, 183, 12, 0, 1465, 1466, 6, 183, 12, 0, 1466, 383, 1, 0, 0, 0, 1467, 1468, 3, 112, 48, 0, 1468, 1469, 1, 0, 0, 0, 1469, 1470, 6, 184, 19, 0, 1470, 385, 1, 0, 0, 0, 1471, 1472, 3, 116, 50, 0, 1472, 1473, 1, 0, 0, 0, 1473, 1474, 6, 185, 23, 0, 1474, 387, 1, 0, 0, 0, 1475, 1476, 3, 248, 116, 0, 1476, 1477, 1, 0, 0, 0, 1477, 1478, 6, 186, 26, 0, 1478, 389, 1, 0, 0, 0, 1479, 1480, 3, 66, 25, 0, 1480, 1481, 1, 0, 0, 0, 1481, 1482, 6, 187, 11, 0, 1482, 391, 1, 0, 0, 0, 1483, 1484, 3, 68, 26, 0, 1484, 1485, 1, 0, 0, 0, 1485, 1486, 6, 188, 11, 0, 1486, 393, 1, 0, 0, 0, 1487, 1488, 3, 70, 27, 0, 1488, 1489, 1, 0, 0, 0, 1489, 1490, 6, 189, 11, 0, 1490, 395, 1, 0, 0, 0, 1491, 1492, 3, 72, 28, 0, 1492, 1493, 1, 0, 0, 0, 1493, 1494, 6, 190, 16, 0, 1494, 1495, 6, 190, 12, 0, 1495, 397, 1, 0, 0, 0, 1496, 1497, 3, 54, 19, 0, 1497, 1498, 1, 0, 0, 0, 1498, 1499, 6, 191, 35, 0, 1499, 399, 1, 0, 0, 0, 1500, 1501, 3, 268, 126, 0, 1501, 1502, 1, 0, 0, 0, 1502, 1503, 6, 192, 36, 0, 1503, 401, 1, 0, 0, 0, 1504, 1505, 3, 282, 133, 0, 1505, 1506, 1, 0, 0, 0, 1506, 1507, 6, 193, 33, 0, 1507, 1508, 6, 193, 12, 0, 1508, 1509, 6, 193, 0, 0, 1509, 403, 1, 0, 0, 0, 1510, 1511, 7, 20, 0, 0, 1511, 1512, 7, 2, 0, 0, 1512, 1513, 7, 1, 0, 0, 1513, 1514, 7, 9, 0, 0, 1514, 1515, 7, 17, 0, 0, 1515, 1516, 1, 0, 0, 0, 1516, 1517, 6, 194, 12, 0, 1517, 1518, 6, 194, 0, 0, 1518, 405, 1, 0, 0, 0, 1519, 1520, 3, 222, 103, 0, 1520, 1521, 1, 0, 0, 0, 1521, 1522, 6, 195, 21, 0, 1522, 407, 1, 0, 0, 0, 1523, 1524, 3, 94, 39, 0, 1524, 1525, 1, 0, 0, 0, 1525, 1526, 6, 196, 22, 0, 1526, 409, 1, 0, 0, 0, 1527, 1528, 3, 110, 47, 0, 1528, 1529, 1, 0, 0, 0, 1529, 1530, 6, 197, 18, 0, 1530, 411, 1, 0, 0, 0, 1531, 1532, 3, 184, 84, 0, 1532, 1533, 1, 0, 0, 0, 1533, 1534, 6, 198, 32, 0, 1534, 413, 1, 0, 0, 0, 1535, 1536, 3, 188, 86, 0, 1536, 1537, 1, 0, 0, 0, 1537, 1538, 6, 199, 31, 0, 1538, 415, 1, 0, 0, 0, 1539, 1540, 3, 66, 25, 0, 1540, 1541, 1, 0, 0, 0, 1541, 1542, 6, 200, 11, 0, 1542, 417, 1, 0, 0, 0, 1543, 1544, 3, 68, 26, 0, 1544, 1545, 1, 0, 0, 0, 1545, 1546, 6, 201, 11, 0, 1546, 419, 1, 0, 0, 0, 1547, 1548, 3, 70, 27, 0, 1548, 1549, 1, 0, 0, 0, 1549, 1550, 6, 202, 11, 0, 1550, 421, 1, 0, 0, 0, 1551, 1552, 3, 72, 28, 0, 1552, 1553, 1, 0, 0, 0, 1553, 1554, 6, 203, 16, 0, 1554, 1555, 6, 203, 12, 0, 1555, 423, 1, 0, 0, 0, 1556, 1557, 3, 222, 103, 0, 1557, 1558, 1, 0, 0, 0, 1558, 1559, 6, 204, 21, 0, 1559, 1560, 6, 204, 12, 0, 1560, 1561, 6, 204, 37, 0, 1561, 425, 1, 0, 0, 0, 1562, 1563, 3, 94, 39, 0, 1563, 1564, 1, 0, 0, 0, 1564, 1565, 6, 205, 22, 0, 1565, 1566, 6, 205, 12, 0, 1566, 1567, 6, 205, 37, 0, 1567, 427, 1, 0, 0, 0, 1568, 1569, 3, 66, 25, 0, 1569, 1570, 1, 0, 0, 0, 1570, 1571, 6, 206, 11, 0, 1571, 429, 1, 0, 0, 0, 1572, 1573, 3, 68, 26, 0, 1573, 1574, 1, 0, 0, 0, 1574, 1575, 6, 207, 11, 0, 1575, 431, 1, 0, 0, 0, 1576, 1577, 3, 70, 27, 0, 1577, 1578, 1, 0, 0, 0, 1578, 1579, 6, 208, 11, 0, 1579, 433, 1, 0, 0, 0, 1580, 1581, 3, 110, 47, 0, 1581, 1582, 1, 0, 0, 0, 1582, 1583, 6, 209, 18, 0, 1583, 1584, 6, 209, 12, 0, 1584, 1585, 6, 209, 9, 0, 1585, 435, 1, 0, 0, 0, 1586, 1587, 3, 112, 48, 0, 1587, 1588, 1, 0, 0, 0, 1588, 1589, 6, 210, 19, 0, 1589, 1590, 6, 210, 12, 0, 1590, 1591, 6, 210, 9, 0, 1591, 437, 1, 0, 0, 0, 1592, 1593, 3, 66, 25, 0, 1593, 1594, 1, 0, 0, 0, 1594, 1595, 6, 211, 11, 0, 1595, 439, 1, 0, 0, 0, 1596, 1597, 3, 68, 26, 0, 1597, 1598, 1, 0, 0, 0, 1598, 1599, 6, 212, 11, 0, 1599, 441, 1, 0, 0, 0, 1600, 1601, 3, 70, 27, 0, 1601, 1602, 1, 0, 0, 0, 1602, 1603, 6, 213, 11, 0, 1603, 443, 1, 0, 0, 0, 1604, 1605, 3, 188, 86, 0, 1605, 1606, 1, 0, 0, 0, 1606, 1607, 6, 214, 12, 0, 1607, 1608, 6, 214, 0, 0, 1608, 1609, 6, 214, 31, 0, 1609, 445, 1, 0, 0, 0, 1610, 1611, 3, 184, 84, 0, 1611, 1612, 1, 0, 0, 0, 1612, 1613, 6, 215, 12, 0, 1613, 1614, 6, 215, 0, 0, 1614, 1615, 6, 215, 32, 0, 1615, 447, 1, 0, 0, 0, 1616, 1617, 3, 100, 42, 0, 1617, 1618, 1, 0, 0, 0, 1618, 1619, 6, 216, 12, 0, 1619, 1620, 6, 216, 0, 0, 1620, 1621, 6, 216, 38, 0, 1621, 449, 1, 0, 0, 0, 1622, 1623, 3, 72, 28, 0, 1623, 1624, 1, 0, 0, 0, 1624, 1625, 6, 217, 16, 0, 1625, 1626, 6, 217, 12, 0, 1626, 451, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 665, 675, 679, 682, 691, 693, 704, 723, 728, 737, 744, 749, 751, 762, 770, 773, 775, 780, 785, 791, 798, 803, 809, 812, 820, 824, 956, 961, 968, 970, 986, 991, 996, 998, 1004, 1081, 1086, 1135, 1139, 1144, 1149, 1154, 1156, 1160, 1162, 1249, 1253, 1258, 1403, 1405, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 72, 0, 5, 0, 0, 7, 29, 0, 7, 73, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 83, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 71, 0, 7, 87, 0, 5, 10, 0, 5, 7, 0, 7, 97, 0, 7, 96, 0, 7, 75, 0, 7, 74, 0, 7, 95, 0, 5, 12, 0, 7, 20, 0, 7, 91, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file +[4, 0, 130, 1617, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 2, 214, 7, 214, 2, 215, 7, 215, 2, 216, 7, 216, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 4, 23, 653, 8, 23, 11, 23, 12, 23, 654, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 663, 8, 24, 10, 24, 12, 24, 666, 9, 24, 1, 24, 3, 24, 669, 8, 24, 1, 24, 3, 24, 672, 8, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 681, 8, 25, 10, 25, 12, 25, 684, 9, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 4, 26, 692, 8, 26, 11, 26, 12, 26, 693, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 713, 8, 32, 1, 32, 4, 32, 716, 8, 32, 11, 32, 12, 32, 717, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 727, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 734, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 739, 8, 38, 10, 38, 12, 38, 742, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 750, 8, 38, 10, 38, 12, 38, 753, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 760, 8, 38, 1, 38, 3, 38, 763, 8, 38, 3, 38, 765, 8, 38, 1, 39, 4, 39, 768, 8, 39, 11, 39, 12, 39, 769, 1, 40, 4, 40, 773, 8, 40, 11, 40, 12, 40, 774, 1, 40, 1, 40, 5, 40, 779, 8, 40, 10, 40, 12, 40, 782, 9, 40, 1, 40, 1, 40, 4, 40, 786, 8, 40, 11, 40, 12, 40, 787, 1, 40, 4, 40, 791, 8, 40, 11, 40, 12, 40, 792, 1, 40, 1, 40, 5, 40, 797, 8, 40, 10, 40, 12, 40, 800, 9, 40, 3, 40, 802, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 808, 8, 40, 11, 40, 12, 40, 809, 1, 40, 1, 40, 3, 40, 814, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 3, 80, 946, 8, 80, 1, 80, 5, 80, 949, 8, 80, 10, 80, 12, 80, 952, 9, 80, 1, 80, 1, 80, 4, 80, 956, 8, 80, 11, 80, 12, 80, 957, 3, 80, 960, 8, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 5, 83, 974, 8, 83, 10, 83, 12, 83, 977, 9, 83, 1, 83, 1, 83, 3, 83, 981, 8, 83, 1, 83, 4, 83, 984, 8, 83, 11, 83, 12, 83, 985, 3, 83, 988, 8, 83, 1, 84, 1, 84, 4, 84, 992, 8, 84, 11, 84, 12, 84, 993, 1, 84, 1, 84, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 3, 101, 1071, 8, 101, 1, 102, 4, 102, 1074, 8, 102, 11, 102, 12, 102, 1075, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 3, 113, 1125, 8, 113, 1, 114, 1, 114, 3, 114, 1129, 8, 114, 1, 114, 5, 114, 1132, 8, 114, 10, 114, 12, 114, 1135, 9, 114, 1, 114, 1, 114, 3, 114, 1139, 8, 114, 1, 114, 4, 114, 1142, 8, 114, 11, 114, 12, 114, 1143, 3, 114, 1146, 8, 114, 1, 115, 1, 115, 4, 115, 1150, 8, 115, 11, 115, 12, 115, 1151, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 135, 4, 135, 1237, 8, 135, 11, 135, 12, 135, 1238, 1, 135, 1, 135, 3, 135, 1243, 8, 135, 1, 135, 4, 135, 1246, 8, 135, 11, 135, 12, 135, 1247, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 4, 168, 1393, 8, 168, 11, 168, 12, 168, 1394, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 213, 1, 213, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 215, 1, 216, 1, 216, 1, 216, 1, 216, 1, 216, 2, 682, 751, 0, 217, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 69, 174, 0, 176, 70, 178, 71, 180, 72, 182, 73, 184, 0, 186, 74, 188, 75, 190, 76, 192, 77, 194, 0, 196, 0, 198, 78, 200, 79, 202, 80, 204, 0, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 81, 218, 0, 220, 82, 222, 0, 224, 0, 226, 83, 228, 84, 230, 85, 232, 0, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 0, 246, 86, 248, 87, 250, 88, 252, 89, 254, 0, 256, 0, 258, 0, 260, 0, 262, 0, 264, 0, 266, 90, 268, 0, 270, 91, 272, 92, 274, 93, 276, 0, 278, 0, 280, 94, 282, 95, 284, 0, 286, 96, 288, 0, 290, 97, 292, 98, 294, 99, 296, 0, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 0, 314, 100, 316, 101, 318, 102, 320, 0, 322, 0, 324, 0, 326, 0, 328, 0, 330, 0, 332, 103, 334, 104, 336, 105, 338, 0, 340, 106, 342, 107, 344, 108, 346, 109, 348, 0, 350, 0, 352, 110, 354, 111, 356, 112, 358, 113, 360, 0, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 0, 374, 114, 376, 115, 378, 116, 380, 0, 382, 0, 384, 0, 386, 0, 388, 117, 390, 118, 392, 119, 394, 0, 396, 120, 398, 0, 400, 0, 402, 121, 404, 0, 406, 0, 408, 0, 410, 0, 412, 0, 414, 122, 416, 123, 418, 124, 420, 0, 422, 0, 424, 0, 426, 125, 428, 126, 430, 127, 432, 0, 434, 0, 436, 128, 438, 129, 440, 130, 442, 0, 444, 0, 446, 0, 448, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 2, 0, 74, 74, 106, 106, 1644, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 1, 70, 1, 0, 0, 0, 1, 92, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 182, 1, 0, 0, 0, 1, 186, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 1, 192, 1, 0, 0, 0, 2, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 2, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 3, 220, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 3, 230, 1, 0, 0, 0, 4, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 240, 1, 0, 0, 0, 4, 246, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 4, 252, 1, 0, 0, 0, 5, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 5, 274, 1, 0, 0, 0, 6, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 282, 1, 0, 0, 0, 6, 286, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 6, 294, 1, 0, 0, 0, 7, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 7, 318, 1, 0, 0, 0, 8, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 8, 336, 1, 0, 0, 0, 9, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 9, 346, 1, 0, 0, 0, 10, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 10, 358, 1, 0, 0, 0, 11, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 11, 378, 1, 0, 0, 0, 12, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 12, 392, 1, 0, 0, 0, 13, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 13, 412, 1, 0, 0, 0, 13, 414, 1, 0, 0, 0, 13, 416, 1, 0, 0, 0, 13, 418, 1, 0, 0, 0, 14, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 14, 424, 1, 0, 0, 0, 14, 426, 1, 0, 0, 0, 14, 428, 1, 0, 0, 0, 14, 430, 1, 0, 0, 0, 15, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 15, 442, 1, 0, 0, 0, 15, 444, 1, 0, 0, 0, 15, 446, 1, 0, 0, 0, 15, 448, 1, 0, 0, 0, 16, 450, 1, 0, 0, 0, 18, 460, 1, 0, 0, 0, 20, 467, 1, 0, 0, 0, 22, 476, 1, 0, 0, 0, 24, 483, 1, 0, 0, 0, 26, 493, 1, 0, 0, 0, 28, 500, 1, 0, 0, 0, 30, 507, 1, 0, 0, 0, 32, 514, 1, 0, 0, 0, 34, 522, 1, 0, 0, 0, 36, 534, 1, 0, 0, 0, 38, 543, 1, 0, 0, 0, 40, 549, 1, 0, 0, 0, 42, 556, 1, 0, 0, 0, 44, 563, 1, 0, 0, 0, 46, 571, 1, 0, 0, 0, 48, 579, 1, 0, 0, 0, 50, 588, 1, 0, 0, 0, 52, 603, 1, 0, 0, 0, 54, 615, 1, 0, 0, 0, 56, 626, 1, 0, 0, 0, 58, 634, 1, 0, 0, 0, 60, 642, 1, 0, 0, 0, 62, 652, 1, 0, 0, 0, 64, 658, 1, 0, 0, 0, 66, 675, 1, 0, 0, 0, 68, 691, 1, 0, 0, 0, 70, 697, 1, 0, 0, 0, 72, 701, 1, 0, 0, 0, 74, 703, 1, 0, 0, 0, 76, 705, 1, 0, 0, 0, 78, 708, 1, 0, 0, 0, 80, 710, 1, 0, 0, 0, 82, 719, 1, 0, 0, 0, 84, 721, 1, 0, 0, 0, 86, 726, 1, 0, 0, 0, 88, 728, 1, 0, 0, 0, 90, 733, 1, 0, 0, 0, 92, 764, 1, 0, 0, 0, 94, 767, 1, 0, 0, 0, 96, 813, 1, 0, 0, 0, 98, 815, 1, 0, 0, 0, 100, 818, 1, 0, 0, 0, 102, 822, 1, 0, 0, 0, 104, 826, 1, 0, 0, 0, 106, 828, 1, 0, 0, 0, 108, 831, 1, 0, 0, 0, 110, 833, 1, 0, 0, 0, 112, 835, 1, 0, 0, 0, 114, 840, 1, 0, 0, 0, 116, 842, 1, 0, 0, 0, 118, 848, 1, 0, 0, 0, 120, 854, 1, 0, 0, 0, 122, 857, 1, 0, 0, 0, 124, 860, 1, 0, 0, 0, 126, 865, 1, 0, 0, 0, 128, 870, 1, 0, 0, 0, 130, 872, 1, 0, 0, 0, 132, 876, 1, 0, 0, 0, 134, 881, 1, 0, 0, 0, 136, 887, 1, 0, 0, 0, 138, 890, 1, 0, 0, 0, 140, 892, 1, 0, 0, 0, 142, 898, 1, 0, 0, 0, 144, 900, 1, 0, 0, 0, 146, 905, 1, 0, 0, 0, 148, 908, 1, 0, 0, 0, 150, 911, 1, 0, 0, 0, 152, 914, 1, 0, 0, 0, 154, 916, 1, 0, 0, 0, 156, 919, 1, 0, 0, 0, 158, 921, 1, 0, 0, 0, 160, 924, 1, 0, 0, 0, 162, 926, 1, 0, 0, 0, 164, 928, 1, 0, 0, 0, 166, 930, 1, 0, 0, 0, 168, 932, 1, 0, 0, 0, 170, 934, 1, 0, 0, 0, 172, 936, 1, 0, 0, 0, 174, 938, 1, 0, 0, 0, 176, 959, 1, 0, 0, 0, 178, 961, 1, 0, 0, 0, 180, 966, 1, 0, 0, 0, 182, 987, 1, 0, 0, 0, 184, 989, 1, 0, 0, 0, 186, 997, 1, 0, 0, 0, 188, 999, 1, 0, 0, 0, 190, 1003, 1, 0, 0, 0, 192, 1007, 1, 0, 0, 0, 194, 1011, 1, 0, 0, 0, 196, 1016, 1, 0, 0, 0, 198, 1021, 1, 0, 0, 0, 200, 1025, 1, 0, 0, 0, 202, 1029, 1, 0, 0, 0, 204, 1033, 1, 0, 0, 0, 206, 1038, 1, 0, 0, 0, 208, 1042, 1, 0, 0, 0, 210, 1046, 1, 0, 0, 0, 212, 1050, 1, 0, 0, 0, 214, 1054, 1, 0, 0, 0, 216, 1058, 1, 0, 0, 0, 218, 1070, 1, 0, 0, 0, 220, 1073, 1, 0, 0, 0, 222, 1077, 1, 0, 0, 0, 224, 1081, 1, 0, 0, 0, 226, 1085, 1, 0, 0, 0, 228, 1089, 1, 0, 0, 0, 230, 1093, 1, 0, 0, 0, 232, 1097, 1, 0, 0, 0, 234, 1102, 1, 0, 0, 0, 236, 1106, 1, 0, 0, 0, 238, 1110, 1, 0, 0, 0, 240, 1115, 1, 0, 0, 0, 242, 1124, 1, 0, 0, 0, 244, 1145, 1, 0, 0, 0, 246, 1149, 1, 0, 0, 0, 248, 1153, 1, 0, 0, 0, 250, 1157, 1, 0, 0, 0, 252, 1161, 1, 0, 0, 0, 254, 1165, 1, 0, 0, 0, 256, 1170, 1, 0, 0, 0, 258, 1174, 1, 0, 0, 0, 260, 1178, 1, 0, 0, 0, 262, 1182, 1, 0, 0, 0, 264, 1187, 1, 0, 0, 0, 266, 1192, 1, 0, 0, 0, 268, 1195, 1, 0, 0, 0, 270, 1199, 1, 0, 0, 0, 272, 1203, 1, 0, 0, 0, 274, 1207, 1, 0, 0, 0, 276, 1211, 1, 0, 0, 0, 278, 1216, 1, 0, 0, 0, 280, 1221, 1, 0, 0, 0, 282, 1226, 1, 0, 0, 0, 284, 1233, 1, 0, 0, 0, 286, 1242, 1, 0, 0, 0, 288, 1249, 1, 0, 0, 0, 290, 1253, 1, 0, 0, 0, 292, 1257, 1, 0, 0, 0, 294, 1261, 1, 0, 0, 0, 296, 1265, 1, 0, 0, 0, 298, 1271, 1, 0, 0, 0, 300, 1275, 1, 0, 0, 0, 302, 1279, 1, 0, 0, 0, 304, 1283, 1, 0, 0, 0, 306, 1287, 1, 0, 0, 0, 308, 1291, 1, 0, 0, 0, 310, 1295, 1, 0, 0, 0, 312, 1300, 1, 0, 0, 0, 314, 1305, 1, 0, 0, 0, 316, 1309, 1, 0, 0, 0, 318, 1313, 1, 0, 0, 0, 320, 1317, 1, 0, 0, 0, 322, 1322, 1, 0, 0, 0, 324, 1326, 1, 0, 0, 0, 326, 1331, 1, 0, 0, 0, 328, 1336, 1, 0, 0, 0, 330, 1340, 1, 0, 0, 0, 332, 1344, 1, 0, 0, 0, 334, 1348, 1, 0, 0, 0, 336, 1352, 1, 0, 0, 0, 338, 1356, 1, 0, 0, 0, 340, 1361, 1, 0, 0, 0, 342, 1366, 1, 0, 0, 0, 344, 1370, 1, 0, 0, 0, 346, 1374, 1, 0, 0, 0, 348, 1378, 1, 0, 0, 0, 350, 1383, 1, 0, 0, 0, 352, 1392, 1, 0, 0, 0, 354, 1396, 1, 0, 0, 0, 356, 1400, 1, 0, 0, 0, 358, 1404, 1, 0, 0, 0, 360, 1408, 1, 0, 0, 0, 362, 1413, 1, 0, 0, 0, 364, 1417, 1, 0, 0, 0, 366, 1421, 1, 0, 0, 0, 368, 1425, 1, 0, 0, 0, 370, 1430, 1, 0, 0, 0, 372, 1434, 1, 0, 0, 0, 374, 1438, 1, 0, 0, 0, 376, 1442, 1, 0, 0, 0, 378, 1446, 1, 0, 0, 0, 380, 1450, 1, 0, 0, 0, 382, 1456, 1, 0, 0, 0, 384, 1460, 1, 0, 0, 0, 386, 1464, 1, 0, 0, 0, 388, 1468, 1, 0, 0, 0, 390, 1472, 1, 0, 0, 0, 392, 1476, 1, 0, 0, 0, 394, 1480, 1, 0, 0, 0, 396, 1485, 1, 0, 0, 0, 398, 1490, 1, 0, 0, 0, 400, 1494, 1, 0, 0, 0, 402, 1500, 1, 0, 0, 0, 404, 1509, 1, 0, 0, 0, 406, 1513, 1, 0, 0, 0, 408, 1517, 1, 0, 0, 0, 410, 1521, 1, 0, 0, 0, 412, 1525, 1, 0, 0, 0, 414, 1529, 1, 0, 0, 0, 416, 1533, 1, 0, 0, 0, 418, 1537, 1, 0, 0, 0, 420, 1541, 1, 0, 0, 0, 422, 1546, 1, 0, 0, 0, 424, 1552, 1, 0, 0, 0, 426, 1558, 1, 0, 0, 0, 428, 1562, 1, 0, 0, 0, 430, 1566, 1, 0, 0, 0, 432, 1570, 1, 0, 0, 0, 434, 1576, 1, 0, 0, 0, 436, 1582, 1, 0, 0, 0, 438, 1586, 1, 0, 0, 0, 440, 1590, 1, 0, 0, 0, 442, 1594, 1, 0, 0, 0, 444, 1600, 1, 0, 0, 0, 446, 1606, 1, 0, 0, 0, 448, 1612, 1, 0, 0, 0, 450, 451, 7, 0, 0, 0, 451, 452, 7, 1, 0, 0, 452, 453, 7, 2, 0, 0, 453, 454, 7, 2, 0, 0, 454, 455, 7, 3, 0, 0, 455, 456, 7, 4, 0, 0, 456, 457, 7, 5, 0, 0, 457, 458, 1, 0, 0, 0, 458, 459, 6, 0, 0, 0, 459, 17, 1, 0, 0, 0, 460, 461, 7, 0, 0, 0, 461, 462, 7, 6, 0, 0, 462, 463, 7, 7, 0, 0, 463, 464, 7, 8, 0, 0, 464, 465, 1, 0, 0, 0, 465, 466, 6, 1, 1, 0, 466, 19, 1, 0, 0, 0, 467, 468, 7, 3, 0, 0, 468, 469, 7, 9, 0, 0, 469, 470, 7, 6, 0, 0, 470, 471, 7, 1, 0, 0, 471, 472, 7, 4, 0, 0, 472, 473, 7, 10, 0, 0, 473, 474, 1, 0, 0, 0, 474, 475, 6, 2, 2, 0, 475, 21, 1, 0, 0, 0, 476, 477, 7, 3, 0, 0, 477, 478, 7, 11, 0, 0, 478, 479, 7, 12, 0, 0, 479, 480, 7, 13, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 6, 3, 0, 0, 482, 23, 1, 0, 0, 0, 483, 484, 7, 3, 0, 0, 484, 485, 7, 14, 0, 0, 485, 486, 7, 8, 0, 0, 486, 487, 7, 13, 0, 0, 487, 488, 7, 12, 0, 0, 488, 489, 7, 1, 0, 0, 489, 490, 7, 9, 0, 0, 490, 491, 1, 0, 0, 0, 491, 492, 6, 4, 3, 0, 492, 25, 1, 0, 0, 0, 493, 494, 7, 15, 0, 0, 494, 495, 7, 6, 0, 0, 495, 496, 7, 7, 0, 0, 496, 497, 7, 16, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 6, 5, 4, 0, 499, 27, 1, 0, 0, 0, 500, 501, 7, 17, 0, 0, 501, 502, 7, 6, 0, 0, 502, 503, 7, 7, 0, 0, 503, 504, 7, 18, 0, 0, 504, 505, 1, 0, 0, 0, 505, 506, 6, 6, 0, 0, 506, 29, 1, 0, 0, 0, 507, 508, 7, 18, 0, 0, 508, 509, 7, 3, 0, 0, 509, 510, 7, 3, 0, 0, 510, 511, 7, 8, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 6, 7, 1, 0, 513, 31, 1, 0, 0, 0, 514, 515, 7, 13, 0, 0, 515, 516, 7, 1, 0, 0, 516, 517, 7, 16, 0, 0, 517, 518, 7, 1, 0, 0, 518, 519, 7, 5, 0, 0, 519, 520, 1, 0, 0, 0, 520, 521, 6, 8, 0, 0, 521, 33, 1, 0, 0, 0, 522, 523, 7, 16, 0, 0, 523, 524, 7, 11, 0, 0, 524, 525, 5, 95, 0, 0, 525, 526, 7, 3, 0, 0, 526, 527, 7, 14, 0, 0, 527, 528, 7, 8, 0, 0, 528, 529, 7, 12, 0, 0, 529, 530, 7, 9, 0, 0, 530, 531, 7, 0, 0, 0, 531, 532, 1, 0, 0, 0, 532, 533, 6, 9, 5, 0, 533, 35, 1, 0, 0, 0, 534, 535, 7, 6, 0, 0, 535, 536, 7, 3, 0, 0, 536, 537, 7, 9, 0, 0, 537, 538, 7, 12, 0, 0, 538, 539, 7, 16, 0, 0, 539, 540, 7, 3, 0, 0, 540, 541, 1, 0, 0, 0, 541, 542, 6, 10, 6, 0, 542, 37, 1, 0, 0, 0, 543, 544, 7, 6, 0, 0, 544, 545, 7, 7, 0, 0, 545, 546, 7, 19, 0, 0, 546, 547, 1, 0, 0, 0, 547, 548, 6, 11, 0, 0, 548, 39, 1, 0, 0, 0, 549, 550, 7, 2, 0, 0, 550, 551, 7, 10, 0, 0, 551, 552, 7, 7, 0, 0, 552, 553, 7, 19, 0, 0, 553, 554, 1, 0, 0, 0, 554, 555, 6, 12, 7, 0, 555, 41, 1, 0, 0, 0, 556, 557, 7, 2, 0, 0, 557, 558, 7, 7, 0, 0, 558, 559, 7, 6, 0, 0, 559, 560, 7, 5, 0, 0, 560, 561, 1, 0, 0, 0, 561, 562, 6, 13, 0, 0, 562, 43, 1, 0, 0, 0, 563, 564, 7, 2, 0, 0, 564, 565, 7, 5, 0, 0, 565, 566, 7, 12, 0, 0, 566, 567, 7, 5, 0, 0, 567, 568, 7, 2, 0, 0, 568, 569, 1, 0, 0, 0, 569, 570, 6, 14, 0, 0, 570, 45, 1, 0, 0, 0, 571, 572, 7, 19, 0, 0, 572, 573, 7, 10, 0, 0, 573, 574, 7, 3, 0, 0, 574, 575, 7, 6, 0, 0, 575, 576, 7, 3, 0, 0, 576, 577, 1, 0, 0, 0, 577, 578, 6, 15, 0, 0, 578, 47, 1, 0, 0, 0, 579, 580, 7, 13, 0, 0, 580, 581, 7, 7, 0, 0, 581, 582, 7, 7, 0, 0, 582, 583, 7, 18, 0, 0, 583, 584, 7, 20, 0, 0, 584, 585, 7, 8, 0, 0, 585, 586, 1, 0, 0, 0, 586, 587, 6, 16, 8, 0, 587, 49, 1, 0, 0, 0, 588, 589, 4, 17, 0, 0, 589, 590, 7, 1, 0, 0, 590, 591, 7, 9, 0, 0, 591, 592, 7, 13, 0, 0, 592, 593, 7, 1, 0, 0, 593, 594, 7, 9, 0, 0, 594, 595, 7, 3, 0, 0, 595, 596, 7, 2, 0, 0, 596, 597, 7, 5, 0, 0, 597, 598, 7, 12, 0, 0, 598, 599, 7, 5, 0, 0, 599, 600, 7, 2, 0, 0, 600, 601, 1, 0, 0, 0, 601, 602, 6, 17, 0, 0, 602, 51, 1, 0, 0, 0, 603, 604, 4, 18, 1, 0, 604, 605, 7, 13, 0, 0, 605, 606, 7, 7, 0, 0, 606, 607, 7, 7, 0, 0, 607, 608, 7, 18, 0, 0, 608, 609, 7, 20, 0, 0, 609, 610, 7, 8, 0, 0, 610, 611, 5, 95, 0, 0, 611, 612, 5, 128020, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 6, 18, 9, 0, 614, 53, 1, 0, 0, 0, 615, 616, 4, 19, 2, 0, 616, 617, 7, 16, 0, 0, 617, 618, 7, 3, 0, 0, 618, 619, 7, 5, 0, 0, 619, 620, 7, 6, 0, 0, 620, 621, 7, 1, 0, 0, 621, 622, 7, 4, 0, 0, 622, 623, 7, 2, 0, 0, 623, 624, 1, 0, 0, 0, 624, 625, 6, 19, 10, 0, 625, 55, 1, 0, 0, 0, 626, 627, 4, 20, 3, 0, 627, 628, 7, 15, 0, 0, 628, 629, 7, 20, 0, 0, 629, 630, 7, 13, 0, 0, 630, 631, 7, 13, 0, 0, 631, 632, 1, 0, 0, 0, 632, 633, 6, 20, 8, 0, 633, 57, 1, 0, 0, 0, 634, 635, 4, 21, 4, 0, 635, 636, 7, 13, 0, 0, 636, 637, 7, 3, 0, 0, 637, 638, 7, 15, 0, 0, 638, 639, 7, 5, 0, 0, 639, 640, 1, 0, 0, 0, 640, 641, 6, 21, 8, 0, 641, 59, 1, 0, 0, 0, 642, 643, 4, 22, 5, 0, 643, 644, 7, 6, 0, 0, 644, 645, 7, 1, 0, 0, 645, 646, 7, 17, 0, 0, 646, 647, 7, 10, 0, 0, 647, 648, 7, 5, 0, 0, 648, 649, 1, 0, 0, 0, 649, 650, 6, 22, 8, 0, 650, 61, 1, 0, 0, 0, 651, 653, 8, 21, 0, 0, 652, 651, 1, 0, 0, 0, 653, 654, 1, 0, 0, 0, 654, 652, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 657, 6, 23, 0, 0, 657, 63, 1, 0, 0, 0, 658, 659, 5, 47, 0, 0, 659, 660, 5, 47, 0, 0, 660, 664, 1, 0, 0, 0, 661, 663, 8, 22, 0, 0, 662, 661, 1, 0, 0, 0, 663, 666, 1, 0, 0, 0, 664, 662, 1, 0, 0, 0, 664, 665, 1, 0, 0, 0, 665, 668, 1, 0, 0, 0, 666, 664, 1, 0, 0, 0, 667, 669, 5, 13, 0, 0, 668, 667, 1, 0, 0, 0, 668, 669, 1, 0, 0, 0, 669, 671, 1, 0, 0, 0, 670, 672, 5, 10, 0, 0, 671, 670, 1, 0, 0, 0, 671, 672, 1, 0, 0, 0, 672, 673, 1, 0, 0, 0, 673, 674, 6, 24, 11, 0, 674, 65, 1, 0, 0, 0, 675, 676, 5, 47, 0, 0, 676, 677, 5, 42, 0, 0, 677, 682, 1, 0, 0, 0, 678, 681, 3, 66, 25, 0, 679, 681, 9, 0, 0, 0, 680, 678, 1, 0, 0, 0, 680, 679, 1, 0, 0, 0, 681, 684, 1, 0, 0, 0, 682, 683, 1, 0, 0, 0, 682, 680, 1, 0, 0, 0, 683, 685, 1, 0, 0, 0, 684, 682, 1, 0, 0, 0, 685, 686, 5, 42, 0, 0, 686, 687, 5, 47, 0, 0, 687, 688, 1, 0, 0, 0, 688, 689, 6, 25, 11, 0, 689, 67, 1, 0, 0, 0, 690, 692, 7, 23, 0, 0, 691, 690, 1, 0, 0, 0, 692, 693, 1, 0, 0, 0, 693, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 696, 6, 26, 11, 0, 696, 69, 1, 0, 0, 0, 697, 698, 5, 124, 0, 0, 698, 699, 1, 0, 0, 0, 699, 700, 6, 27, 12, 0, 700, 71, 1, 0, 0, 0, 701, 702, 7, 24, 0, 0, 702, 73, 1, 0, 0, 0, 703, 704, 7, 25, 0, 0, 704, 75, 1, 0, 0, 0, 705, 706, 5, 92, 0, 0, 706, 707, 7, 26, 0, 0, 707, 77, 1, 0, 0, 0, 708, 709, 8, 27, 0, 0, 709, 79, 1, 0, 0, 0, 710, 712, 7, 3, 0, 0, 711, 713, 7, 28, 0, 0, 712, 711, 1, 0, 0, 0, 712, 713, 1, 0, 0, 0, 713, 715, 1, 0, 0, 0, 714, 716, 3, 72, 28, 0, 715, 714, 1, 0, 0, 0, 716, 717, 1, 0, 0, 0, 717, 715, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 81, 1, 0, 0, 0, 719, 720, 5, 64, 0, 0, 720, 83, 1, 0, 0, 0, 721, 722, 5, 96, 0, 0, 722, 85, 1, 0, 0, 0, 723, 727, 8, 29, 0, 0, 724, 725, 5, 96, 0, 0, 725, 727, 5, 96, 0, 0, 726, 723, 1, 0, 0, 0, 726, 724, 1, 0, 0, 0, 727, 87, 1, 0, 0, 0, 728, 729, 5, 95, 0, 0, 729, 89, 1, 0, 0, 0, 730, 734, 3, 74, 29, 0, 731, 734, 3, 72, 28, 0, 732, 734, 3, 88, 36, 0, 733, 730, 1, 0, 0, 0, 733, 731, 1, 0, 0, 0, 733, 732, 1, 0, 0, 0, 734, 91, 1, 0, 0, 0, 735, 740, 5, 34, 0, 0, 736, 739, 3, 76, 30, 0, 737, 739, 3, 78, 31, 0, 738, 736, 1, 0, 0, 0, 738, 737, 1, 0, 0, 0, 739, 742, 1, 0, 0, 0, 740, 738, 1, 0, 0, 0, 740, 741, 1, 0, 0, 0, 741, 743, 1, 0, 0, 0, 742, 740, 1, 0, 0, 0, 743, 765, 5, 34, 0, 0, 744, 745, 5, 34, 0, 0, 745, 746, 5, 34, 0, 0, 746, 747, 5, 34, 0, 0, 747, 751, 1, 0, 0, 0, 748, 750, 8, 22, 0, 0, 749, 748, 1, 0, 0, 0, 750, 753, 1, 0, 0, 0, 751, 752, 1, 0, 0, 0, 751, 749, 1, 0, 0, 0, 752, 754, 1, 0, 0, 0, 753, 751, 1, 0, 0, 0, 754, 755, 5, 34, 0, 0, 755, 756, 5, 34, 0, 0, 756, 757, 5, 34, 0, 0, 757, 759, 1, 0, 0, 0, 758, 760, 5, 34, 0, 0, 759, 758, 1, 0, 0, 0, 759, 760, 1, 0, 0, 0, 760, 762, 1, 0, 0, 0, 761, 763, 5, 34, 0, 0, 762, 761, 1, 0, 0, 0, 762, 763, 1, 0, 0, 0, 763, 765, 1, 0, 0, 0, 764, 735, 1, 0, 0, 0, 764, 744, 1, 0, 0, 0, 765, 93, 1, 0, 0, 0, 766, 768, 3, 72, 28, 0, 767, 766, 1, 0, 0, 0, 768, 769, 1, 0, 0, 0, 769, 767, 1, 0, 0, 0, 769, 770, 1, 0, 0, 0, 770, 95, 1, 0, 0, 0, 771, 773, 3, 72, 28, 0, 772, 771, 1, 0, 0, 0, 773, 774, 1, 0, 0, 0, 774, 772, 1, 0, 0, 0, 774, 775, 1, 0, 0, 0, 775, 776, 1, 0, 0, 0, 776, 780, 3, 114, 49, 0, 777, 779, 3, 72, 28, 0, 778, 777, 1, 0, 0, 0, 779, 782, 1, 0, 0, 0, 780, 778, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 814, 1, 0, 0, 0, 782, 780, 1, 0, 0, 0, 783, 785, 3, 114, 49, 0, 784, 786, 3, 72, 28, 0, 785, 784, 1, 0, 0, 0, 786, 787, 1, 0, 0, 0, 787, 785, 1, 0, 0, 0, 787, 788, 1, 0, 0, 0, 788, 814, 1, 0, 0, 0, 789, 791, 3, 72, 28, 0, 790, 789, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 790, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 801, 1, 0, 0, 0, 794, 798, 3, 114, 49, 0, 795, 797, 3, 72, 28, 0, 796, 795, 1, 0, 0, 0, 797, 800, 1, 0, 0, 0, 798, 796, 1, 0, 0, 0, 798, 799, 1, 0, 0, 0, 799, 802, 1, 0, 0, 0, 800, 798, 1, 0, 0, 0, 801, 794, 1, 0, 0, 0, 801, 802, 1, 0, 0, 0, 802, 803, 1, 0, 0, 0, 803, 804, 3, 80, 32, 0, 804, 814, 1, 0, 0, 0, 805, 807, 3, 114, 49, 0, 806, 808, 3, 72, 28, 0, 807, 806, 1, 0, 0, 0, 808, 809, 1, 0, 0, 0, 809, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 811, 1, 0, 0, 0, 811, 812, 3, 80, 32, 0, 812, 814, 1, 0, 0, 0, 813, 772, 1, 0, 0, 0, 813, 783, 1, 0, 0, 0, 813, 790, 1, 0, 0, 0, 813, 805, 1, 0, 0, 0, 814, 97, 1, 0, 0, 0, 815, 816, 7, 30, 0, 0, 816, 817, 7, 31, 0, 0, 817, 99, 1, 0, 0, 0, 818, 819, 7, 12, 0, 0, 819, 820, 7, 9, 0, 0, 820, 821, 7, 0, 0, 0, 821, 101, 1, 0, 0, 0, 822, 823, 7, 12, 0, 0, 823, 824, 7, 2, 0, 0, 824, 825, 7, 4, 0, 0, 825, 103, 1, 0, 0, 0, 826, 827, 5, 61, 0, 0, 827, 105, 1, 0, 0, 0, 828, 829, 5, 58, 0, 0, 829, 830, 5, 58, 0, 0, 830, 107, 1, 0, 0, 0, 831, 832, 5, 58, 0, 0, 832, 109, 1, 0, 0, 0, 833, 834, 5, 44, 0, 0, 834, 111, 1, 0, 0, 0, 835, 836, 7, 0, 0, 0, 836, 837, 7, 3, 0, 0, 837, 838, 7, 2, 0, 0, 838, 839, 7, 4, 0, 0, 839, 113, 1, 0, 0, 0, 840, 841, 5, 46, 0, 0, 841, 115, 1, 0, 0, 0, 842, 843, 7, 15, 0, 0, 843, 844, 7, 12, 0, 0, 844, 845, 7, 13, 0, 0, 845, 846, 7, 2, 0, 0, 846, 847, 7, 3, 0, 0, 847, 117, 1, 0, 0, 0, 848, 849, 7, 15, 0, 0, 849, 850, 7, 1, 0, 0, 850, 851, 7, 6, 0, 0, 851, 852, 7, 2, 0, 0, 852, 853, 7, 5, 0, 0, 853, 119, 1, 0, 0, 0, 854, 855, 7, 1, 0, 0, 855, 856, 7, 9, 0, 0, 856, 121, 1, 0, 0, 0, 857, 858, 7, 1, 0, 0, 858, 859, 7, 2, 0, 0, 859, 123, 1, 0, 0, 0, 860, 861, 7, 13, 0, 0, 861, 862, 7, 12, 0, 0, 862, 863, 7, 2, 0, 0, 863, 864, 7, 5, 0, 0, 864, 125, 1, 0, 0, 0, 865, 866, 7, 13, 0, 0, 866, 867, 7, 1, 0, 0, 867, 868, 7, 18, 0, 0, 868, 869, 7, 3, 0, 0, 869, 127, 1, 0, 0, 0, 870, 871, 5, 40, 0, 0, 871, 129, 1, 0, 0, 0, 872, 873, 7, 9, 0, 0, 873, 874, 7, 7, 0, 0, 874, 875, 7, 5, 0, 0, 875, 131, 1, 0, 0, 0, 876, 877, 7, 9, 0, 0, 877, 878, 7, 20, 0, 0, 878, 879, 7, 13, 0, 0, 879, 880, 7, 13, 0, 0, 880, 133, 1, 0, 0, 0, 881, 882, 7, 9, 0, 0, 882, 883, 7, 20, 0, 0, 883, 884, 7, 13, 0, 0, 884, 885, 7, 13, 0, 0, 885, 886, 7, 2, 0, 0, 886, 135, 1, 0, 0, 0, 887, 888, 7, 7, 0, 0, 888, 889, 7, 6, 0, 0, 889, 137, 1, 0, 0, 0, 890, 891, 5, 63, 0, 0, 891, 139, 1, 0, 0, 0, 892, 893, 7, 6, 0, 0, 893, 894, 7, 13, 0, 0, 894, 895, 7, 1, 0, 0, 895, 896, 7, 18, 0, 0, 896, 897, 7, 3, 0, 0, 897, 141, 1, 0, 0, 0, 898, 899, 5, 41, 0, 0, 899, 143, 1, 0, 0, 0, 900, 901, 7, 5, 0, 0, 901, 902, 7, 6, 0, 0, 902, 903, 7, 20, 0, 0, 903, 904, 7, 3, 0, 0, 904, 145, 1, 0, 0, 0, 905, 906, 5, 61, 0, 0, 906, 907, 5, 61, 0, 0, 907, 147, 1, 0, 0, 0, 908, 909, 5, 61, 0, 0, 909, 910, 5, 126, 0, 0, 910, 149, 1, 0, 0, 0, 911, 912, 5, 33, 0, 0, 912, 913, 5, 61, 0, 0, 913, 151, 1, 0, 0, 0, 914, 915, 5, 60, 0, 0, 915, 153, 1, 0, 0, 0, 916, 917, 5, 60, 0, 0, 917, 918, 5, 61, 0, 0, 918, 155, 1, 0, 0, 0, 919, 920, 5, 62, 0, 0, 920, 157, 1, 0, 0, 0, 921, 922, 5, 62, 0, 0, 922, 923, 5, 61, 0, 0, 923, 159, 1, 0, 0, 0, 924, 925, 5, 43, 0, 0, 925, 161, 1, 0, 0, 0, 926, 927, 5, 45, 0, 0, 927, 163, 1, 0, 0, 0, 928, 929, 5, 42, 0, 0, 929, 165, 1, 0, 0, 0, 930, 931, 5, 47, 0, 0, 931, 167, 1, 0, 0, 0, 932, 933, 5, 37, 0, 0, 933, 169, 1, 0, 0, 0, 934, 935, 5, 123, 0, 0, 935, 171, 1, 0, 0, 0, 936, 937, 5, 125, 0, 0, 937, 173, 1, 0, 0, 0, 938, 939, 3, 46, 15, 0, 939, 940, 1, 0, 0, 0, 940, 941, 6, 79, 13, 0, 941, 175, 1, 0, 0, 0, 942, 945, 3, 138, 61, 0, 943, 946, 3, 74, 29, 0, 944, 946, 3, 88, 36, 0, 945, 943, 1, 0, 0, 0, 945, 944, 1, 0, 0, 0, 946, 950, 1, 0, 0, 0, 947, 949, 3, 90, 37, 0, 948, 947, 1, 0, 0, 0, 949, 952, 1, 0, 0, 0, 950, 948, 1, 0, 0, 0, 950, 951, 1, 0, 0, 0, 951, 960, 1, 0, 0, 0, 952, 950, 1, 0, 0, 0, 953, 955, 3, 138, 61, 0, 954, 956, 3, 72, 28, 0, 955, 954, 1, 0, 0, 0, 956, 957, 1, 0, 0, 0, 957, 955, 1, 0, 0, 0, 957, 958, 1, 0, 0, 0, 958, 960, 1, 0, 0, 0, 959, 942, 1, 0, 0, 0, 959, 953, 1, 0, 0, 0, 960, 177, 1, 0, 0, 0, 961, 962, 5, 91, 0, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 81, 0, 0, 964, 965, 6, 81, 0, 0, 965, 179, 1, 0, 0, 0, 966, 967, 5, 93, 0, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 82, 12, 0, 969, 970, 6, 82, 12, 0, 970, 181, 1, 0, 0, 0, 971, 975, 3, 74, 29, 0, 972, 974, 3, 90, 37, 0, 973, 972, 1, 0, 0, 0, 974, 977, 1, 0, 0, 0, 975, 973, 1, 0, 0, 0, 975, 976, 1, 0, 0, 0, 976, 988, 1, 0, 0, 0, 977, 975, 1, 0, 0, 0, 978, 981, 3, 88, 36, 0, 979, 981, 3, 82, 33, 0, 980, 978, 1, 0, 0, 0, 980, 979, 1, 0, 0, 0, 981, 983, 1, 0, 0, 0, 982, 984, 3, 90, 37, 0, 983, 982, 1, 0, 0, 0, 984, 985, 1, 0, 0, 0, 985, 983, 1, 0, 0, 0, 985, 986, 1, 0, 0, 0, 986, 988, 1, 0, 0, 0, 987, 971, 1, 0, 0, 0, 987, 980, 1, 0, 0, 0, 988, 183, 1, 0, 0, 0, 989, 991, 3, 84, 34, 0, 990, 992, 3, 86, 35, 0, 991, 990, 1, 0, 0, 0, 992, 993, 1, 0, 0, 0, 993, 991, 1, 0, 0, 0, 993, 994, 1, 0, 0, 0, 994, 995, 1, 0, 0, 0, 995, 996, 3, 84, 34, 0, 996, 185, 1, 0, 0, 0, 997, 998, 3, 184, 84, 0, 998, 187, 1, 0, 0, 0, 999, 1000, 3, 64, 24, 0, 1000, 1001, 1, 0, 0, 0, 1001, 1002, 6, 86, 11, 0, 1002, 189, 1, 0, 0, 0, 1003, 1004, 3, 66, 25, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 6, 87, 11, 0, 1006, 191, 1, 0, 0, 0, 1007, 1008, 3, 68, 26, 0, 1008, 1009, 1, 0, 0, 0, 1009, 1010, 6, 88, 11, 0, 1010, 193, 1, 0, 0, 0, 1011, 1012, 3, 178, 81, 0, 1012, 1013, 1, 0, 0, 0, 1013, 1014, 6, 89, 14, 0, 1014, 1015, 6, 89, 15, 0, 1015, 195, 1, 0, 0, 0, 1016, 1017, 3, 70, 27, 0, 1017, 1018, 1, 0, 0, 0, 1018, 1019, 6, 90, 16, 0, 1019, 1020, 6, 90, 12, 0, 1020, 197, 1, 0, 0, 0, 1021, 1022, 3, 68, 26, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 6, 91, 11, 0, 1024, 199, 1, 0, 0, 0, 1025, 1026, 3, 64, 24, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 92, 11, 0, 1028, 201, 1, 0, 0, 0, 1029, 1030, 3, 66, 25, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 93, 11, 0, 1032, 203, 1, 0, 0, 0, 1033, 1034, 3, 70, 27, 0, 1034, 1035, 1, 0, 0, 0, 1035, 1036, 6, 94, 16, 0, 1036, 1037, 6, 94, 12, 0, 1037, 205, 1, 0, 0, 0, 1038, 1039, 3, 178, 81, 0, 1039, 1040, 1, 0, 0, 0, 1040, 1041, 6, 95, 14, 0, 1041, 207, 1, 0, 0, 0, 1042, 1043, 3, 180, 82, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 96, 17, 0, 1045, 209, 1, 0, 0, 0, 1046, 1047, 3, 108, 46, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 97, 18, 0, 1049, 211, 1, 0, 0, 0, 1050, 1051, 3, 110, 47, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1053, 6, 98, 19, 0, 1053, 213, 1, 0, 0, 0, 1054, 1055, 3, 104, 44, 0, 1055, 1056, 1, 0, 0, 0, 1056, 1057, 6, 99, 20, 0, 1057, 215, 1, 0, 0, 0, 1058, 1059, 7, 16, 0, 0, 1059, 1060, 7, 3, 0, 0, 1060, 1061, 7, 5, 0, 0, 1061, 1062, 7, 12, 0, 0, 1062, 1063, 7, 0, 0, 0, 1063, 1064, 7, 12, 0, 0, 1064, 1065, 7, 5, 0, 0, 1065, 1066, 7, 12, 0, 0, 1066, 217, 1, 0, 0, 0, 1067, 1071, 8, 32, 0, 0, 1068, 1069, 5, 47, 0, 0, 1069, 1071, 8, 33, 0, 0, 1070, 1067, 1, 0, 0, 0, 1070, 1068, 1, 0, 0, 0, 1071, 219, 1, 0, 0, 0, 1072, 1074, 3, 218, 101, 0, 1073, 1072, 1, 0, 0, 0, 1074, 1075, 1, 0, 0, 0, 1075, 1073, 1, 0, 0, 0, 1075, 1076, 1, 0, 0, 0, 1076, 221, 1, 0, 0, 0, 1077, 1078, 3, 220, 102, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 103, 21, 0, 1080, 223, 1, 0, 0, 0, 1081, 1082, 3, 92, 38, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 104, 22, 0, 1084, 225, 1, 0, 0, 0, 1085, 1086, 3, 64, 24, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 105, 11, 0, 1088, 227, 1, 0, 0, 0, 1089, 1090, 3, 66, 25, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 106, 11, 0, 1092, 229, 1, 0, 0, 0, 1093, 1094, 3, 68, 26, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 107, 11, 0, 1096, 231, 1, 0, 0, 0, 1097, 1098, 3, 70, 27, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 108, 16, 0, 1100, 1101, 6, 108, 12, 0, 1101, 233, 1, 0, 0, 0, 1102, 1103, 3, 114, 49, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 109, 23, 0, 1105, 235, 1, 0, 0, 0, 1106, 1107, 3, 110, 47, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 110, 19, 0, 1109, 237, 1, 0, 0, 0, 1110, 1111, 4, 111, 6, 0, 1111, 1112, 3, 138, 61, 0, 1112, 1113, 1, 0, 0, 0, 1113, 1114, 6, 111, 24, 0, 1114, 239, 1, 0, 0, 0, 1115, 1116, 4, 112, 7, 0, 1116, 1117, 3, 176, 80, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 112, 25, 0, 1119, 241, 1, 0, 0, 0, 1120, 1125, 3, 74, 29, 0, 1121, 1125, 3, 72, 28, 0, 1122, 1125, 3, 88, 36, 0, 1123, 1125, 3, 164, 74, 0, 1124, 1120, 1, 0, 0, 0, 1124, 1121, 1, 0, 0, 0, 1124, 1122, 1, 0, 0, 0, 1124, 1123, 1, 0, 0, 0, 1125, 243, 1, 0, 0, 0, 1126, 1129, 3, 74, 29, 0, 1127, 1129, 3, 164, 74, 0, 1128, 1126, 1, 0, 0, 0, 1128, 1127, 1, 0, 0, 0, 1129, 1133, 1, 0, 0, 0, 1130, 1132, 3, 242, 113, 0, 1131, 1130, 1, 0, 0, 0, 1132, 1135, 1, 0, 0, 0, 1133, 1131, 1, 0, 0, 0, 1133, 1134, 1, 0, 0, 0, 1134, 1146, 1, 0, 0, 0, 1135, 1133, 1, 0, 0, 0, 1136, 1139, 3, 88, 36, 0, 1137, 1139, 3, 82, 33, 0, 1138, 1136, 1, 0, 0, 0, 1138, 1137, 1, 0, 0, 0, 1139, 1141, 1, 0, 0, 0, 1140, 1142, 3, 242, 113, 0, 1141, 1140, 1, 0, 0, 0, 1142, 1143, 1, 0, 0, 0, 1143, 1141, 1, 0, 0, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1146, 1, 0, 0, 0, 1145, 1128, 1, 0, 0, 0, 1145, 1138, 1, 0, 0, 0, 1146, 245, 1, 0, 0, 0, 1147, 1150, 3, 244, 114, 0, 1148, 1150, 3, 184, 84, 0, 1149, 1147, 1, 0, 0, 0, 1149, 1148, 1, 0, 0, 0, 1150, 1151, 1, 0, 0, 0, 1151, 1149, 1, 0, 0, 0, 1151, 1152, 1, 0, 0, 0, 1152, 247, 1, 0, 0, 0, 1153, 1154, 3, 64, 24, 0, 1154, 1155, 1, 0, 0, 0, 1155, 1156, 6, 116, 11, 0, 1156, 249, 1, 0, 0, 0, 1157, 1158, 3, 66, 25, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1160, 6, 117, 11, 0, 1160, 251, 1, 0, 0, 0, 1161, 1162, 3, 68, 26, 0, 1162, 1163, 1, 0, 0, 0, 1163, 1164, 6, 118, 11, 0, 1164, 253, 1, 0, 0, 0, 1165, 1166, 3, 70, 27, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 6, 119, 16, 0, 1168, 1169, 6, 119, 12, 0, 1169, 255, 1, 0, 0, 0, 1170, 1171, 3, 104, 44, 0, 1171, 1172, 1, 0, 0, 0, 1172, 1173, 6, 120, 20, 0, 1173, 257, 1, 0, 0, 0, 1174, 1175, 3, 110, 47, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 121, 19, 0, 1177, 259, 1, 0, 0, 0, 1178, 1179, 3, 114, 49, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 122, 23, 0, 1181, 261, 1, 0, 0, 0, 1182, 1183, 4, 123, 8, 0, 1183, 1184, 3, 138, 61, 0, 1184, 1185, 1, 0, 0, 0, 1185, 1186, 6, 123, 24, 0, 1186, 263, 1, 0, 0, 0, 1187, 1188, 4, 124, 9, 0, 1188, 1189, 3, 176, 80, 0, 1189, 1190, 1, 0, 0, 0, 1190, 1191, 6, 124, 25, 0, 1191, 265, 1, 0, 0, 0, 1192, 1193, 7, 12, 0, 0, 1193, 1194, 7, 2, 0, 0, 1194, 267, 1, 0, 0, 0, 1195, 1196, 3, 246, 115, 0, 1196, 1197, 1, 0, 0, 0, 1197, 1198, 6, 126, 26, 0, 1198, 269, 1, 0, 0, 0, 1199, 1200, 3, 64, 24, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 127, 11, 0, 1202, 271, 1, 0, 0, 0, 1203, 1204, 3, 66, 25, 0, 1204, 1205, 1, 0, 0, 0, 1205, 1206, 6, 128, 11, 0, 1206, 273, 1, 0, 0, 0, 1207, 1208, 3, 68, 26, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 6, 129, 11, 0, 1210, 275, 1, 0, 0, 0, 1211, 1212, 3, 70, 27, 0, 1212, 1213, 1, 0, 0, 0, 1213, 1214, 6, 130, 16, 0, 1214, 1215, 6, 130, 12, 0, 1215, 277, 1, 0, 0, 0, 1216, 1217, 3, 178, 81, 0, 1217, 1218, 1, 0, 0, 0, 1218, 1219, 6, 131, 14, 0, 1219, 1220, 6, 131, 27, 0, 1220, 279, 1, 0, 0, 0, 1221, 1222, 7, 7, 0, 0, 1222, 1223, 7, 9, 0, 0, 1223, 1224, 1, 0, 0, 0, 1224, 1225, 6, 132, 28, 0, 1225, 281, 1, 0, 0, 0, 1226, 1227, 7, 19, 0, 0, 1227, 1228, 7, 1, 0, 0, 1228, 1229, 7, 5, 0, 0, 1229, 1230, 7, 10, 0, 0, 1230, 1231, 1, 0, 0, 0, 1231, 1232, 6, 133, 28, 0, 1232, 283, 1, 0, 0, 0, 1233, 1234, 8, 34, 0, 0, 1234, 285, 1, 0, 0, 0, 1235, 1237, 3, 284, 134, 0, 1236, 1235, 1, 0, 0, 0, 1237, 1238, 1, 0, 0, 0, 1238, 1236, 1, 0, 0, 0, 1238, 1239, 1, 0, 0, 0, 1239, 1240, 1, 0, 0, 0, 1240, 1241, 3, 108, 46, 0, 1241, 1243, 1, 0, 0, 0, 1242, 1236, 1, 0, 0, 0, 1242, 1243, 1, 0, 0, 0, 1243, 1245, 1, 0, 0, 0, 1244, 1246, 3, 284, 134, 0, 1245, 1244, 1, 0, 0, 0, 1246, 1247, 1, 0, 0, 0, 1247, 1245, 1, 0, 0, 0, 1247, 1248, 1, 0, 0, 0, 1248, 287, 1, 0, 0, 0, 1249, 1250, 3, 286, 135, 0, 1250, 1251, 1, 0, 0, 0, 1251, 1252, 6, 136, 29, 0, 1252, 289, 1, 0, 0, 0, 1253, 1254, 3, 64, 24, 0, 1254, 1255, 1, 0, 0, 0, 1255, 1256, 6, 137, 11, 0, 1256, 291, 1, 0, 0, 0, 1257, 1258, 3, 66, 25, 0, 1258, 1259, 1, 0, 0, 0, 1259, 1260, 6, 138, 11, 0, 1260, 293, 1, 0, 0, 0, 1261, 1262, 3, 68, 26, 0, 1262, 1263, 1, 0, 0, 0, 1263, 1264, 6, 139, 11, 0, 1264, 295, 1, 0, 0, 0, 1265, 1266, 3, 70, 27, 0, 1266, 1267, 1, 0, 0, 0, 1267, 1268, 6, 140, 16, 0, 1268, 1269, 6, 140, 12, 0, 1269, 1270, 6, 140, 12, 0, 1270, 297, 1, 0, 0, 0, 1271, 1272, 3, 104, 44, 0, 1272, 1273, 1, 0, 0, 0, 1273, 1274, 6, 141, 20, 0, 1274, 299, 1, 0, 0, 0, 1275, 1276, 3, 110, 47, 0, 1276, 1277, 1, 0, 0, 0, 1277, 1278, 6, 142, 19, 0, 1278, 301, 1, 0, 0, 0, 1279, 1280, 3, 114, 49, 0, 1280, 1281, 1, 0, 0, 0, 1281, 1282, 6, 143, 23, 0, 1282, 303, 1, 0, 0, 0, 1283, 1284, 3, 282, 133, 0, 1284, 1285, 1, 0, 0, 0, 1285, 1286, 6, 144, 30, 0, 1286, 305, 1, 0, 0, 0, 1287, 1288, 3, 246, 115, 0, 1288, 1289, 1, 0, 0, 0, 1289, 1290, 6, 145, 26, 0, 1290, 307, 1, 0, 0, 0, 1291, 1292, 3, 186, 85, 0, 1292, 1293, 1, 0, 0, 0, 1293, 1294, 6, 146, 31, 0, 1294, 309, 1, 0, 0, 0, 1295, 1296, 4, 147, 10, 0, 1296, 1297, 3, 138, 61, 0, 1297, 1298, 1, 0, 0, 0, 1298, 1299, 6, 147, 24, 0, 1299, 311, 1, 0, 0, 0, 1300, 1301, 4, 148, 11, 0, 1301, 1302, 3, 176, 80, 0, 1302, 1303, 1, 0, 0, 0, 1303, 1304, 6, 148, 25, 0, 1304, 313, 1, 0, 0, 0, 1305, 1306, 3, 64, 24, 0, 1306, 1307, 1, 0, 0, 0, 1307, 1308, 6, 149, 11, 0, 1308, 315, 1, 0, 0, 0, 1309, 1310, 3, 66, 25, 0, 1310, 1311, 1, 0, 0, 0, 1311, 1312, 6, 150, 11, 0, 1312, 317, 1, 0, 0, 0, 1313, 1314, 3, 68, 26, 0, 1314, 1315, 1, 0, 0, 0, 1315, 1316, 6, 151, 11, 0, 1316, 319, 1, 0, 0, 0, 1317, 1318, 3, 70, 27, 0, 1318, 1319, 1, 0, 0, 0, 1319, 1320, 6, 152, 16, 0, 1320, 1321, 6, 152, 12, 0, 1321, 321, 1, 0, 0, 0, 1322, 1323, 3, 114, 49, 0, 1323, 1324, 1, 0, 0, 0, 1324, 1325, 6, 153, 23, 0, 1325, 323, 1, 0, 0, 0, 1326, 1327, 4, 154, 12, 0, 1327, 1328, 3, 138, 61, 0, 1328, 1329, 1, 0, 0, 0, 1329, 1330, 6, 154, 24, 0, 1330, 325, 1, 0, 0, 0, 1331, 1332, 4, 155, 13, 0, 1332, 1333, 3, 176, 80, 0, 1333, 1334, 1, 0, 0, 0, 1334, 1335, 6, 155, 25, 0, 1335, 327, 1, 0, 0, 0, 1336, 1337, 3, 186, 85, 0, 1337, 1338, 1, 0, 0, 0, 1338, 1339, 6, 156, 31, 0, 1339, 329, 1, 0, 0, 0, 1340, 1341, 3, 182, 83, 0, 1341, 1342, 1, 0, 0, 0, 1342, 1343, 6, 157, 32, 0, 1343, 331, 1, 0, 0, 0, 1344, 1345, 3, 64, 24, 0, 1345, 1346, 1, 0, 0, 0, 1346, 1347, 6, 158, 11, 0, 1347, 333, 1, 0, 0, 0, 1348, 1349, 3, 66, 25, 0, 1349, 1350, 1, 0, 0, 0, 1350, 1351, 6, 159, 11, 0, 1351, 335, 1, 0, 0, 0, 1352, 1353, 3, 68, 26, 0, 1353, 1354, 1, 0, 0, 0, 1354, 1355, 6, 160, 11, 0, 1355, 337, 1, 0, 0, 0, 1356, 1357, 3, 70, 27, 0, 1357, 1358, 1, 0, 0, 0, 1358, 1359, 6, 161, 16, 0, 1359, 1360, 6, 161, 12, 0, 1360, 339, 1, 0, 0, 0, 1361, 1362, 7, 1, 0, 0, 1362, 1363, 7, 9, 0, 0, 1363, 1364, 7, 15, 0, 0, 1364, 1365, 7, 7, 0, 0, 1365, 341, 1, 0, 0, 0, 1366, 1367, 3, 64, 24, 0, 1367, 1368, 1, 0, 0, 0, 1368, 1369, 6, 163, 11, 0, 1369, 343, 1, 0, 0, 0, 1370, 1371, 3, 66, 25, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1373, 6, 164, 11, 0, 1373, 345, 1, 0, 0, 0, 1374, 1375, 3, 68, 26, 0, 1375, 1376, 1, 0, 0, 0, 1376, 1377, 6, 165, 11, 0, 1377, 347, 1, 0, 0, 0, 1378, 1379, 3, 180, 82, 0, 1379, 1380, 1, 0, 0, 0, 1380, 1381, 6, 166, 17, 0, 1381, 1382, 6, 166, 12, 0, 1382, 349, 1, 0, 0, 0, 1383, 1384, 3, 108, 46, 0, 1384, 1385, 1, 0, 0, 0, 1385, 1386, 6, 167, 18, 0, 1386, 351, 1, 0, 0, 0, 1387, 1393, 3, 82, 33, 0, 1388, 1393, 3, 72, 28, 0, 1389, 1393, 3, 114, 49, 0, 1390, 1393, 3, 74, 29, 0, 1391, 1393, 3, 88, 36, 0, 1392, 1387, 1, 0, 0, 0, 1392, 1388, 1, 0, 0, 0, 1392, 1389, 1, 0, 0, 0, 1392, 1390, 1, 0, 0, 0, 1392, 1391, 1, 0, 0, 0, 1393, 1394, 1, 0, 0, 0, 1394, 1392, 1, 0, 0, 0, 1394, 1395, 1, 0, 0, 0, 1395, 353, 1, 0, 0, 0, 1396, 1397, 3, 64, 24, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1399, 6, 169, 11, 0, 1399, 355, 1, 0, 0, 0, 1400, 1401, 3, 66, 25, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 170, 11, 0, 1403, 357, 1, 0, 0, 0, 1404, 1405, 3, 68, 26, 0, 1405, 1406, 1, 0, 0, 0, 1406, 1407, 6, 171, 11, 0, 1407, 359, 1, 0, 0, 0, 1408, 1409, 3, 70, 27, 0, 1409, 1410, 1, 0, 0, 0, 1410, 1411, 6, 172, 16, 0, 1411, 1412, 6, 172, 12, 0, 1412, 361, 1, 0, 0, 0, 1413, 1414, 3, 108, 46, 0, 1414, 1415, 1, 0, 0, 0, 1415, 1416, 6, 173, 18, 0, 1416, 363, 1, 0, 0, 0, 1417, 1418, 3, 110, 47, 0, 1418, 1419, 1, 0, 0, 0, 1419, 1420, 6, 174, 19, 0, 1420, 365, 1, 0, 0, 0, 1421, 1422, 3, 114, 49, 0, 1422, 1423, 1, 0, 0, 0, 1423, 1424, 6, 175, 23, 0, 1424, 367, 1, 0, 0, 0, 1425, 1426, 3, 280, 132, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1428, 6, 176, 33, 0, 1428, 1429, 6, 176, 34, 0, 1429, 369, 1, 0, 0, 0, 1430, 1431, 3, 220, 102, 0, 1431, 1432, 1, 0, 0, 0, 1432, 1433, 6, 177, 21, 0, 1433, 371, 1, 0, 0, 0, 1434, 1435, 3, 92, 38, 0, 1435, 1436, 1, 0, 0, 0, 1436, 1437, 6, 178, 22, 0, 1437, 373, 1, 0, 0, 0, 1438, 1439, 3, 64, 24, 0, 1439, 1440, 1, 0, 0, 0, 1440, 1441, 6, 179, 11, 0, 1441, 375, 1, 0, 0, 0, 1442, 1443, 3, 66, 25, 0, 1443, 1444, 1, 0, 0, 0, 1444, 1445, 6, 180, 11, 0, 1445, 377, 1, 0, 0, 0, 1446, 1447, 3, 68, 26, 0, 1447, 1448, 1, 0, 0, 0, 1448, 1449, 6, 181, 11, 0, 1449, 379, 1, 0, 0, 0, 1450, 1451, 3, 70, 27, 0, 1451, 1452, 1, 0, 0, 0, 1452, 1453, 6, 182, 16, 0, 1453, 1454, 6, 182, 12, 0, 1454, 1455, 6, 182, 12, 0, 1455, 381, 1, 0, 0, 0, 1456, 1457, 3, 110, 47, 0, 1457, 1458, 1, 0, 0, 0, 1458, 1459, 6, 183, 19, 0, 1459, 383, 1, 0, 0, 0, 1460, 1461, 3, 114, 49, 0, 1461, 1462, 1, 0, 0, 0, 1462, 1463, 6, 184, 23, 0, 1463, 385, 1, 0, 0, 0, 1464, 1465, 3, 246, 115, 0, 1465, 1466, 1, 0, 0, 0, 1466, 1467, 6, 185, 26, 0, 1467, 387, 1, 0, 0, 0, 1468, 1469, 3, 64, 24, 0, 1469, 1470, 1, 0, 0, 0, 1470, 1471, 6, 186, 11, 0, 1471, 389, 1, 0, 0, 0, 1472, 1473, 3, 66, 25, 0, 1473, 1474, 1, 0, 0, 0, 1474, 1475, 6, 187, 11, 0, 1475, 391, 1, 0, 0, 0, 1476, 1477, 3, 68, 26, 0, 1477, 1478, 1, 0, 0, 0, 1478, 1479, 6, 188, 11, 0, 1479, 393, 1, 0, 0, 0, 1480, 1481, 3, 70, 27, 0, 1481, 1482, 1, 0, 0, 0, 1482, 1483, 6, 189, 16, 0, 1483, 1484, 6, 189, 12, 0, 1484, 395, 1, 0, 0, 0, 1485, 1486, 7, 35, 0, 0, 1486, 1487, 7, 7, 0, 0, 1487, 1488, 7, 1, 0, 0, 1488, 1489, 7, 9, 0, 0, 1489, 397, 1, 0, 0, 0, 1490, 1491, 3, 266, 125, 0, 1491, 1492, 1, 0, 0, 0, 1492, 1493, 6, 191, 35, 0, 1493, 399, 1, 0, 0, 0, 1494, 1495, 3, 280, 132, 0, 1495, 1496, 1, 0, 0, 0, 1496, 1497, 6, 192, 33, 0, 1497, 1498, 6, 192, 12, 0, 1498, 1499, 6, 192, 0, 0, 1499, 401, 1, 0, 0, 0, 1500, 1501, 7, 20, 0, 0, 1501, 1502, 7, 2, 0, 0, 1502, 1503, 7, 1, 0, 0, 1503, 1504, 7, 9, 0, 0, 1504, 1505, 7, 17, 0, 0, 1505, 1506, 1, 0, 0, 0, 1506, 1507, 6, 193, 12, 0, 1507, 1508, 6, 193, 0, 0, 1508, 403, 1, 0, 0, 0, 1509, 1510, 3, 220, 102, 0, 1510, 1511, 1, 0, 0, 0, 1511, 1512, 6, 194, 21, 0, 1512, 405, 1, 0, 0, 0, 1513, 1514, 3, 92, 38, 0, 1514, 1515, 1, 0, 0, 0, 1515, 1516, 6, 195, 22, 0, 1516, 407, 1, 0, 0, 0, 1517, 1518, 3, 108, 46, 0, 1518, 1519, 1, 0, 0, 0, 1519, 1520, 6, 196, 18, 0, 1520, 409, 1, 0, 0, 0, 1521, 1522, 3, 182, 83, 0, 1522, 1523, 1, 0, 0, 0, 1523, 1524, 6, 197, 32, 0, 1524, 411, 1, 0, 0, 0, 1525, 1526, 3, 186, 85, 0, 1526, 1527, 1, 0, 0, 0, 1527, 1528, 6, 198, 31, 0, 1528, 413, 1, 0, 0, 0, 1529, 1530, 3, 64, 24, 0, 1530, 1531, 1, 0, 0, 0, 1531, 1532, 6, 199, 11, 0, 1532, 415, 1, 0, 0, 0, 1533, 1534, 3, 66, 25, 0, 1534, 1535, 1, 0, 0, 0, 1535, 1536, 6, 200, 11, 0, 1536, 417, 1, 0, 0, 0, 1537, 1538, 3, 68, 26, 0, 1538, 1539, 1, 0, 0, 0, 1539, 1540, 6, 201, 11, 0, 1540, 419, 1, 0, 0, 0, 1541, 1542, 3, 70, 27, 0, 1542, 1543, 1, 0, 0, 0, 1543, 1544, 6, 202, 16, 0, 1544, 1545, 6, 202, 12, 0, 1545, 421, 1, 0, 0, 0, 1546, 1547, 3, 220, 102, 0, 1547, 1548, 1, 0, 0, 0, 1548, 1549, 6, 203, 21, 0, 1549, 1550, 6, 203, 12, 0, 1550, 1551, 6, 203, 36, 0, 1551, 423, 1, 0, 0, 0, 1552, 1553, 3, 92, 38, 0, 1553, 1554, 1, 0, 0, 0, 1554, 1555, 6, 204, 22, 0, 1555, 1556, 6, 204, 12, 0, 1556, 1557, 6, 204, 36, 0, 1557, 425, 1, 0, 0, 0, 1558, 1559, 3, 64, 24, 0, 1559, 1560, 1, 0, 0, 0, 1560, 1561, 6, 205, 11, 0, 1561, 427, 1, 0, 0, 0, 1562, 1563, 3, 66, 25, 0, 1563, 1564, 1, 0, 0, 0, 1564, 1565, 6, 206, 11, 0, 1565, 429, 1, 0, 0, 0, 1566, 1567, 3, 68, 26, 0, 1567, 1568, 1, 0, 0, 0, 1568, 1569, 6, 207, 11, 0, 1569, 431, 1, 0, 0, 0, 1570, 1571, 3, 108, 46, 0, 1571, 1572, 1, 0, 0, 0, 1572, 1573, 6, 208, 18, 0, 1573, 1574, 6, 208, 12, 0, 1574, 1575, 6, 208, 10, 0, 1575, 433, 1, 0, 0, 0, 1576, 1577, 3, 110, 47, 0, 1577, 1578, 1, 0, 0, 0, 1578, 1579, 6, 209, 19, 0, 1579, 1580, 6, 209, 12, 0, 1580, 1581, 6, 209, 10, 0, 1581, 435, 1, 0, 0, 0, 1582, 1583, 3, 64, 24, 0, 1583, 1584, 1, 0, 0, 0, 1584, 1585, 6, 210, 11, 0, 1585, 437, 1, 0, 0, 0, 1586, 1587, 3, 66, 25, 0, 1587, 1588, 1, 0, 0, 0, 1588, 1589, 6, 211, 11, 0, 1589, 439, 1, 0, 0, 0, 1590, 1591, 3, 68, 26, 0, 1591, 1592, 1, 0, 0, 0, 1592, 1593, 6, 212, 11, 0, 1593, 441, 1, 0, 0, 0, 1594, 1595, 3, 186, 85, 0, 1595, 1596, 1, 0, 0, 0, 1596, 1597, 6, 213, 12, 0, 1597, 1598, 6, 213, 0, 0, 1598, 1599, 6, 213, 31, 0, 1599, 443, 1, 0, 0, 0, 1600, 1601, 3, 182, 83, 0, 1601, 1602, 1, 0, 0, 0, 1602, 1603, 6, 214, 12, 0, 1603, 1604, 6, 214, 0, 0, 1604, 1605, 6, 214, 32, 0, 1605, 445, 1, 0, 0, 0, 1606, 1607, 3, 98, 41, 0, 1607, 1608, 1, 0, 0, 0, 1608, 1609, 6, 215, 12, 0, 1609, 1610, 6, 215, 0, 0, 1610, 1611, 6, 215, 37, 0, 1611, 447, 1, 0, 0, 0, 1612, 1613, 3, 70, 27, 0, 1613, 1614, 1, 0, 0, 0, 1614, 1615, 6, 216, 16, 0, 1615, 1616, 6, 216, 12, 0, 1616, 449, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 654, 664, 668, 671, 680, 682, 693, 712, 717, 726, 733, 738, 740, 751, 759, 762, 764, 769, 774, 780, 787, 792, 798, 801, 809, 813, 945, 950, 957, 959, 975, 980, 985, 987, 993, 1070, 1075, 1124, 1128, 1133, 1138, 1143, 1145, 1149, 1151, 1238, 1242, 1247, 1392, 1394, 38, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 13, 0, 5, 11, 0, 5, 14, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 71, 0, 5, 0, 0, 7, 28, 0, 7, 72, 0, 7, 37, 0, 7, 38, 0, 7, 35, 0, 7, 82, 0, 7, 29, 0, 7, 40, 0, 7, 52, 0, 7, 70, 0, 7, 86, 0, 5, 10, 0, 5, 7, 0, 7, 96, 0, 7, 95, 0, 7, 74, 0, 7, 73, 0, 7, 94, 0, 5, 12, 0, 7, 90, 0, 5, 15, 0, 7, 32, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 28358a0f614e6..aada45659c53d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -27,31 +27,31 @@ public class EsqlBaseLexer extends LexerConfig { public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, - DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, - UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, PIPE=29, - QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, - ASC=35, ASSIGN=36, CAST_OP=37, COLON=38, COMMA=39, DESC=40, DOT=41, FALSE=42, - FIRST=43, IN=44, IS=45, LAST=46, LIKE=47, LP=48, NOT=49, NULL=50, NULLS=51, - OR=52, PARAM=53, RLIKE=54, RP=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, - LTE=61, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, - LEFT_BRACES=69, RIGHT_BRACES=70, NAMED_OR_POSITIONAL_PARAM=71, OPENING_BRACKET=72, - CLOSING_BRACKET=73, UNQUOTED_IDENTIFIER=74, QUOTED_IDENTIFIER=75, EXPR_LINE_COMMENT=76, - EXPR_MULTILINE_COMMENT=77, EXPR_WS=78, EXPLAIN_WS=79, EXPLAIN_LINE_COMMENT=80, - EXPLAIN_MULTILINE_COMMENT=81, METADATA=82, UNQUOTED_SOURCE=83, FROM_LINE_COMMENT=84, - FROM_MULTILINE_COMMENT=85, FROM_WS=86, ID_PATTERN=87, PROJECT_LINE_COMMENT=88, - PROJECT_MULTILINE_COMMENT=89, PROJECT_WS=90, AS=91, RENAME_LINE_COMMENT=92, - RENAME_MULTILINE_COMMENT=93, RENAME_WS=94, ON=95, WITH=96, ENRICH_POLICY_NAME=97, - ENRICH_LINE_COMMENT=98, ENRICH_MULTILINE_COMMENT=99, ENRICH_WS=100, ENRICH_FIELD_LINE_COMMENT=101, - ENRICH_FIELD_MULTILINE_COMMENT=102, ENRICH_FIELD_WS=103, MVEXPAND_LINE_COMMENT=104, - MVEXPAND_MULTILINE_COMMENT=105, MVEXPAND_WS=106, INFO=107, SHOW_LINE_COMMENT=108, - SHOW_MULTILINE_COMMENT=109, SHOW_WS=110, SETTING=111, SETTING_LINE_COMMENT=112, - SETTTING_MULTILINE_COMMENT=113, SETTING_WS=114, LOOKUP_LINE_COMMENT=115, - LOOKUP_MULTILINE_COMMENT=116, LOOKUP_WS=117, LOOKUP_FIELD_LINE_COMMENT=118, - LOOKUP_FIELD_MULTILINE_COMMENT=119, LOOKUP_FIELD_WS=120, USING=121, JOIN_LINE_COMMENT=122, - JOIN_MULTILINE_COMMENT=123, JOIN_WS=124, METRICS_LINE_COMMENT=125, METRICS_MULTILINE_COMMENT=126, - METRICS_WS=127, CLOSING_METRICS_LINE_COMMENT=128, CLOSING_METRICS_MULTILINE_COMMENT=129, - CLOSING_METRICS_WS=130; + WHERE=16, JOIN_LOOKUP=17, DEV_INLINESTATS=18, DEV_LOOKUP=19, DEV_METRICS=20, + DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, UNKNOWN_CMD=24, + LINE_COMMENT=25, MULTILINE_COMMENT=26, WS=27, PIPE=28, QUOTED_STRING=29, + INTEGER_LITERAL=30, DECIMAL_LITERAL=31, BY=32, AND=33, ASC=34, ASSIGN=35, + CAST_OP=36, COLON=37, COMMA=38, DESC=39, DOT=40, FALSE=41, FIRST=42, IN=43, + IS=44, LAST=45, LIKE=46, LP=47, NOT=48, NULL=49, NULLS=50, OR=51, PARAM=52, + RLIKE=53, RP=54, TRUE=55, EQ=56, CIEQ=57, NEQ=58, LT=59, LTE=60, GT=61, + GTE=62, PLUS=63, MINUS=64, ASTERISK=65, SLASH=66, PERCENT=67, LEFT_BRACES=68, + RIGHT_BRACES=69, NAMED_OR_POSITIONAL_PARAM=70, OPENING_BRACKET=71, CLOSING_BRACKET=72, + UNQUOTED_IDENTIFIER=73, QUOTED_IDENTIFIER=74, EXPR_LINE_COMMENT=75, EXPR_MULTILINE_COMMENT=76, + EXPR_WS=77, EXPLAIN_WS=78, EXPLAIN_LINE_COMMENT=79, EXPLAIN_MULTILINE_COMMENT=80, + METADATA=81, UNQUOTED_SOURCE=82, FROM_LINE_COMMENT=83, FROM_MULTILINE_COMMENT=84, + FROM_WS=85, ID_PATTERN=86, PROJECT_LINE_COMMENT=87, PROJECT_MULTILINE_COMMENT=88, + PROJECT_WS=89, AS=90, RENAME_LINE_COMMENT=91, RENAME_MULTILINE_COMMENT=92, + RENAME_WS=93, ON=94, WITH=95, ENRICH_POLICY_NAME=96, ENRICH_LINE_COMMENT=97, + ENRICH_MULTILINE_COMMENT=98, ENRICH_WS=99, ENRICH_FIELD_LINE_COMMENT=100, + ENRICH_FIELD_MULTILINE_COMMENT=101, ENRICH_FIELD_WS=102, MVEXPAND_LINE_COMMENT=103, + MVEXPAND_MULTILINE_COMMENT=104, MVEXPAND_WS=105, INFO=106, SHOW_LINE_COMMENT=107, + SHOW_MULTILINE_COMMENT=108, SHOW_WS=109, SETTING=110, SETTING_LINE_COMMENT=111, + SETTTING_MULTILINE_COMMENT=112, SETTING_WS=113, LOOKUP_LINE_COMMENT=114, + LOOKUP_MULTILINE_COMMENT=115, LOOKUP_WS=116, LOOKUP_FIELD_LINE_COMMENT=117, + LOOKUP_FIELD_MULTILINE_COMMENT=118, LOOKUP_FIELD_WS=119, JOIN=120, USING=121, + JOIN_LINE_COMMENT=122, JOIN_MULTILINE_COMMENT=123, JOIN_WS=124, METRICS_LINE_COMMENT=125, + METRICS_MULTILINE_COMMENT=126, METRICS_WS=127, CLOSING_METRICS_LINE_COMMENT=128, + CLOSING_METRICS_MULTILINE_COMMENT=129, CLOSING_METRICS_WS=130; public static final int EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, @@ -71,24 +71,24 @@ private static String[] makeRuleNames() { return new String[] { "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", - "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", "DEV_JOIN_FULL", - "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", - "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", - "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", - "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", - "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "LEFT_BRACES", "RIGHT_BRACES", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", - "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", - "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", - "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", - "EXPLAIN_MULTILINE_COMMENT", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", - "FROM_COLON", "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", - "UNQUOTED_SOURCE", "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", - "PROJECT_PARAM", "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", + "JOIN_LOOKUP", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN_FULL", + "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", + "WS", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", + "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", "UNDERSCORE", + "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", + "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COLON", "COMMA", "DESC", "DOT", + "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", + "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", + "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "LEFT_BRACES", + "RIGHT_BRACES", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", + "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", + "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_OPENING_BRACKET", + "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COLON", + "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", "UNQUOTED_SOURCE", + "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "PROJECT_PARAM", + "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", @@ -108,7 +108,7 @@ private static String[] makeRuleNames() { "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", - "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "JOIN_PIPE", "JOIN_JOIN", + "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "JOIN_PIPE", "JOIN", "JOIN_AS", "JOIN_ON", "USING", "JOIN_UNQUOTED_SOURCE", "JOIN_QUOTED_SOURCE", "JOIN_COLON", "JOIN_UNQUOTED_IDENTIFER", "JOIN_QUOTED_IDENTIFIER", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_PIPE", "METRICS_UNQUOTED_SOURCE", @@ -124,7 +124,7 @@ private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + "'sort'", "'stats'", "'where'", "'lookup'", null, null, null, null, null, null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", @@ -134,7 +134,7 @@ private static String[] makeLiteralNames() { null, null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, null, null, null, null, null, "'info'", null, null, null, null, null, null, null, null, null, - null, null, null, null, "'USING'" + null, null, null, null, "'join'", "'USING'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -142,13 +142,13 @@ private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", - "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", - "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", - "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", - "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "WHERE", "JOIN_LOOKUP", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", + "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "UNKNOWN_CMD", "LINE_COMMENT", + "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COLON", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", + "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", + "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "LEFT_BRACES", "RIGHT_BRACES", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", @@ -163,8 +163,8 @@ private static String[] makeSymbolicNames() { "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", - "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", + "JOIN", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", + "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } @@ -230,37 +230,33 @@ public EsqlBaseLexer(CharStream input) { @Override public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { - case 16: - return DEV_INLINESTATS_sempred((RuleContext)_localctx, predIndex); case 17: - return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); + return DEV_INLINESTATS_sempred((RuleContext)_localctx, predIndex); case 18: - return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); + return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); case 19: - return DEV_JOIN_sempred((RuleContext)_localctx, predIndex); + return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); case 20: return DEV_JOIN_FULL_sempred((RuleContext)_localctx, predIndex); case 21: return DEV_JOIN_LEFT_sempred((RuleContext)_localctx, predIndex); case 22: return DEV_JOIN_RIGHT_sempred((RuleContext)_localctx, predIndex); - case 23: - return DEV_JOIN_LOOKUP_sempred((RuleContext)_localctx, predIndex); - case 112: + case 111: return PROJECT_PARAM_sempred((RuleContext)_localctx, predIndex); - case 113: + case 112: return PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 124: + case 123: return RENAME_PARAM_sempred((RuleContext)_localctx, predIndex); - case 125: + case 124: return RENAME_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 148: + case 147: return ENRICH_FIELD_PARAM_sempred((RuleContext)_localctx, predIndex); - case 149: + case 148: return ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 155: + case 154: return MVEXPAND_PARAM_sempred((RuleContext)_localctx, predIndex); - case 156: + case 155: return MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); } return true; @@ -286,100 +282,86 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean DEV_JOIN_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 3: - return this.isDevVersion(); - } - return true; - } private boolean DEV_JOIN_FULL_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 4: + case 3: return this.isDevVersion(); } return true; } private boolean DEV_JOIN_LEFT_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 5: + case 4: return this.isDevVersion(); } return true; } private boolean DEV_JOIN_RIGHT_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 6: - return this.isDevVersion(); - } - return true; - } - private boolean DEV_JOIN_LOOKUP_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 7: + case 5: return this.isDevVersion(); } return true; } private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 8: + case 6: return this.isDevVersion(); } return true; } private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 9: + case 7: return this.isDevVersion(); } return true; } private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 10: + case 8: return this.isDevVersion(); } return true; } private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 11: + case 9: return this.isDevVersion(); } return true; } private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 12: + case 10: return this.isDevVersion(); } return true; } private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 13: + case 11: return this.isDevVersion(); } return true; } private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 14: + case 12: return this.isDevVersion(); } return true; } private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 15: + case 13: return this.isDevVersion(); } return true; } public static final String _serializedATN = - "\u0004\u0000\u0082\u065b\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\u0004\u0000\u0082\u0651\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ @@ -442,207 +424,205 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u00ce\u0002\u00cf\u0007\u00cf\u0002\u00d0\u0007\u00d0\u0002\u00d1\u0007"+ "\u00d1\u0002\u00d2\u0007\u00d2\u0002\u00d3\u0007\u00d3\u0002\u00d4\u0007"+ "\u00d4\u0002\u00d5\u0007\u00d5\u0002\u00d6\u0007\u00d6\u0002\u00d7\u0007"+ - "\u00d7\u0002\u00d8\u0007\u00d8\u0002\u00d9\u0007\u00d9\u0001\u0000\u0001"+ + "\u00d7\u0002\u00d8\u0007\u00d8\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b"+ - "\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b"+ + "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001"+ "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001"+ "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ - "\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001"+ + "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ + "\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0001"+ "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ - "\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0004\u0018\u0298\b\u0018\u000b"+ - "\u0018\f\u0018\u0299\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001"+ - "\u0019\u0001\u0019\u0005\u0019\u02a2\b\u0019\n\u0019\f\u0019\u02a5\t\u0019"+ - "\u0001\u0019\u0003\u0019\u02a8\b\u0019\u0001\u0019\u0003\u0019\u02ab\b"+ - "\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0005\u001a\u02b4\b\u001a\n\u001a\f\u001a\u02b7\t\u001a"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b"+ - "\u0004\u001b\u02bf\b\u001b\u000b\u001b\f\u001b\u02c0\u0001\u001b\u0001"+ - "\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001d\u0001"+ - "\u001d\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ - " \u0001 \u0001!\u0001!\u0003!\u02d4\b!\u0001!\u0004!\u02d7\b!\u000b!\f"+ - "!\u02d8\u0001\"\u0001\"\u0001#\u0001#\u0001$\u0001$\u0001$\u0003$\u02e2"+ - "\b$\u0001%\u0001%\u0001&\u0001&\u0001&\u0003&\u02e9\b&\u0001\'\u0001\'"+ - "\u0001\'\u0005\'\u02ee\b\'\n\'\f\'\u02f1\t\'\u0001\'\u0001\'\u0001\'\u0001"+ - "\'\u0001\'\u0001\'\u0005\'\u02f9\b\'\n\'\f\'\u02fc\t\'\u0001\'\u0001\'"+ - "\u0001\'\u0001\'\u0001\'\u0003\'\u0303\b\'\u0001\'\u0003\'\u0306\b\'\u0003"+ - "\'\u0308\b\'\u0001(\u0004(\u030b\b(\u000b(\f(\u030c\u0001)\u0004)\u0310"+ - "\b)\u000b)\f)\u0311\u0001)\u0001)\u0005)\u0316\b)\n)\f)\u0319\t)\u0001"+ - ")\u0001)\u0004)\u031d\b)\u000b)\f)\u031e\u0001)\u0004)\u0322\b)\u000b"+ - ")\f)\u0323\u0001)\u0001)\u0005)\u0328\b)\n)\f)\u032b\t)\u0003)\u032d\b"+ - ")\u0001)\u0001)\u0001)\u0001)\u0004)\u0333\b)\u000b)\f)\u0334\u0001)\u0001"+ - ")\u0003)\u0339\b)\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001"+ - ",\u0001,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001"+ - "/\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u0001"+ - "3\u00013\u00013\u00013\u00013\u00013\u00014\u00014\u00014\u00014\u0001"+ - "4\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u0001"+ - "7\u00017\u00017\u00018\u00018\u00018\u00018\u00018\u00019\u00019\u0001"+ - ":\u0001:\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0001;\u0001<\u0001"+ - "<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001"+ - "?\u0001?\u0001?\u0001?\u0001?\u0001?\u0001@\u0001@\u0001A\u0001A\u0001"+ - "A\u0001A\u0001A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001"+ - "D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001"+ - "H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001"+ - "M\u0001M\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001"+ - "Q\u0001Q\u0001Q\u0003Q\u03bd\bQ\u0001Q\u0005Q\u03c0\bQ\nQ\fQ\u03c3\tQ"+ - "\u0001Q\u0001Q\u0004Q\u03c7\bQ\u000bQ\fQ\u03c8\u0003Q\u03cb\bQ\u0001R"+ - "\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001S\u0001"+ - "T\u0001T\u0005T\u03d9\bT\nT\fT\u03dc\tT\u0001T\u0001T\u0003T\u03e0\bT"+ - "\u0001T\u0004T\u03e3\bT\u000bT\fT\u03e4\u0003T\u03e7\bT\u0001U\u0001U"+ - "\u0004U\u03eb\bU\u000bU\fU\u03ec\u0001U\u0001U\u0001V\u0001V\u0001W\u0001"+ - "W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001"+ - "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001"+ - "[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001"+ - "^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001"+ - "`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001"+ - "c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001"+ - "e\u0001e\u0001e\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0003"+ - "f\u043a\bf\u0001g\u0004g\u043d\bg\u000bg\fg\u043e\u0001h\u0001h\u0001"+ - "h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001"+ - "k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001"+ - "m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001"+ - "o\u0001p\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001"+ - "q\u0001r\u0001r\u0001r\u0001r\u0003r\u0470\br\u0001s\u0001s\u0003s\u0474"+ - "\bs\u0001s\u0005s\u0477\bs\ns\fs\u047a\ts\u0001s\u0001s\u0003s\u047e\b"+ - "s\u0001s\u0004s\u0481\bs\u000bs\fs\u0482\u0003s\u0485\bs\u0001t\u0001"+ - "t\u0004t\u0489\bt\u000bt\ft\u048a\u0001u\u0001u\u0001u\u0001u\u0001v\u0001"+ - "v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001"+ - "x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001"+ - "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001"+ - "}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001"+ - "\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001"+ - "\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001"+ - "\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001"+ - "\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ + "\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0004\u0017\u028d"+ + "\b\u0017\u000b\u0017\f\u0017\u028e\u0001\u0017\u0001\u0017\u0001\u0018"+ + "\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u0297\b\u0018\n\u0018"+ + "\f\u0018\u029a\t\u0018\u0001\u0018\u0003\u0018\u029d\b\u0018\u0001\u0018"+ + "\u0003\u0018\u02a0\b\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019"+ + "\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u02a9\b\u0019\n\u0019"+ + "\f\u0019\u02ac\t\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ + "\u0001\u0019\u0001\u001a\u0004\u001a\u02b4\b\u001a\u000b\u001a\f\u001a"+ + "\u02b5\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001"+ + "\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0003 \u02c9"+ + "\b \u0001 \u0004 \u02cc\b \u000b \f \u02cd\u0001!\u0001!\u0001\"\u0001"+ + "\"\u0001#\u0001#\u0001#\u0003#\u02d7\b#\u0001$\u0001$\u0001%\u0001%\u0001"+ + "%\u0003%\u02de\b%\u0001&\u0001&\u0001&\u0005&\u02e3\b&\n&\f&\u02e6\t&"+ + "\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005&\u02ee\b&\n&\f&\u02f1"+ + "\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u02f8\b&\u0001&\u0003&\u02fb"+ + "\b&\u0003&\u02fd\b&\u0001\'\u0004\'\u0300\b\'\u000b\'\f\'\u0301\u0001"+ + "(\u0004(\u0305\b(\u000b(\f(\u0306\u0001(\u0001(\u0005(\u030b\b(\n(\f("+ + "\u030e\t(\u0001(\u0001(\u0004(\u0312\b(\u000b(\f(\u0313\u0001(\u0004("+ + "\u0317\b(\u000b(\f(\u0318\u0001(\u0001(\u0005(\u031d\b(\n(\f(\u0320\t"+ + "(\u0003(\u0322\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u0328\b(\u000b(\f"+ + "(\u0329\u0001(\u0001(\u0003(\u032e\b(\u0001)\u0001)\u0001)\u0001*\u0001"+ + "*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001-\u0001"+ + "-\u0001-\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00010\u00010\u0001"+ + "0\u00011\u00011\u00012\u00012\u00012\u00012\u00012\u00012\u00013\u0001"+ + "3\u00013\u00013\u00013\u00013\u00014\u00014\u00014\u00015\u00015\u0001"+ + "5\u00016\u00016\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u0001"+ + "7\u00018\u00018\u00019\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001"+ + ":\u0001:\u0001;\u0001;\u0001;\u0001;\u0001;\u0001;\u0001<\u0001<\u0001"+ + "<\u0001=\u0001=\u0001>\u0001>\u0001>\u0001>\u0001>\u0001>\u0001?\u0001"+ + "?\u0001@\u0001@\u0001@\u0001@\u0001@\u0001A\u0001A\u0001A\u0001B\u0001"+ + "B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001E\u0001E\u0001E\u0001"+ + "F\u0001F\u0001G\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001"+ + "J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001N\u0001N\u0001O\u0001"+ + "O\u0001O\u0001O\u0001P\u0001P\u0001P\u0003P\u03b2\bP\u0001P\u0005P\u03b5"+ + "\bP\nP\fP\u03b8\tP\u0001P\u0001P\u0004P\u03bc\bP\u000bP\fP\u03bd\u0003"+ + "P\u03c0\bP\u0001Q\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001"+ + "R\u0001R\u0001S\u0001S\u0005S\u03ce\bS\nS\fS\u03d1\tS\u0001S\u0001S\u0003"+ + "S\u03d5\bS\u0001S\u0004S\u03d8\bS\u000bS\fS\u03d9\u0003S\u03dc\bS\u0001"+ + "T\u0001T\u0004T\u03e0\bT\u000bT\fT\u03e1\u0001T\u0001T\u0001U\u0001U\u0001"+ + "V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001"+ + "X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001"+ + "Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001"+ + "]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001^\u0001_\u0001"+ + "_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001"+ + "a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001"+ + "d\u0001d\u0001d\u0001d\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001"+ + "e\u0003e\u042f\be\u0001f\u0004f\u0432\bf\u000bf\ff\u0433\u0001g\u0001"+ + "g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001"+ + "i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001l\u0001"+ + "l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001"+ + "n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001"+ + "p\u0001p\u0001q\u0001q\u0001q\u0001q\u0003q\u0465\bq\u0001r\u0001r\u0003"+ + "r\u0469\br\u0001r\u0005r\u046c\br\nr\fr\u046f\tr\u0001r\u0001r\u0003r"+ + "\u0473\br\u0001r\u0004r\u0476\br\u000br\fr\u0477\u0003r\u047a\br\u0001"+ + "s\u0001s\u0004s\u047e\bs\u000bs\fs\u047f\u0001t\u0001t\u0001t\u0001t\u0001"+ + "u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001"+ + "w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001"+ + "y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001{\u0001"+ + "|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001~\u0001~\u0001"+ + "~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001"+ + "\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001\u0081\u0001"+ + "\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001"+ + "\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001"+ + "\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001"+ "\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001"+ - "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001"+ - "\u0087\u0001\u0087\u0001\u0088\u0004\u0088\u04e0\b\u0088\u000b\u0088\f"+ - "\u0088\u04e1\u0001\u0088\u0001\u0088\u0003\u0088\u04e6\b\u0088\u0001\u0088"+ - "\u0004\u0088\u04e9\b\u0088\u000b\u0088\f\u0088\u04ea\u0001\u0089\u0001"+ - "\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001"+ - "\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001"+ - "\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001"+ - "\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ - "\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001"+ - "\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ - "\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001"+ - "\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001"+ - "\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ - "\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001"+ - "\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001"+ - "\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001"+ - "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001"+ - "\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ - "\u009c\u0001\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001"+ - "\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001"+ - "\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001"+ - "\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001\u00a2\u0001"+ - "\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001"+ - "\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001"+ - "\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001"+ - "\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ - "\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001"+ - "\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0004\u00a9\u057c\b\u00a9\u000b"+ - "\u00a9\f\u00a9\u057d\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ - "\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001"+ - "\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001"+ - "\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001"+ - "\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001"+ - "\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001"+ - "\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001"+ - "\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001"+ - "\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001"+ - "\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001"+ - "\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001"+ - "\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001"+ - "\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001"+ - "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001"+ - "\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001"+ - "\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001"+ - "\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001"+ - "\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ - "\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ - "\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001"+ - "\u00c4\u0001\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001"+ - "\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c7\u0001\u00c7\u0001"+ - "\u00c7\u0001\u00c7\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001"+ - "\u00c9\u0001\u00c9\u0001\u00c9\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001"+ - "\u00ca\u0001\u00ca\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001"+ - "\u00cb\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001"+ - "\u00cc\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001"+ - "\u00cd\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00cf\u0001"+ - "\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001"+ - "\u00d0\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001"+ - "\u00d1\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001"+ - "\u00d2\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001"+ - "\u00d4\u0001\u00d4\u0001\u00d4\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001"+ - "\u00d5\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001"+ - "\u00d6\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001"+ - "\u00d7\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001"+ - "\u00d8\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0001\u00d9\u0002"+ - "\u02b5\u02fa\u0000\u00da\u0010\u0001\u0012\u0002\u0014\u0003\u0016\u0004"+ - "\u0018\u0005\u001a\u0006\u001c\u0007\u001e\b \t\"\n$\u000b&\f(\r*\u000e"+ - ",\u000f.\u00100\u00112\u00124\u00136\u00148\u0015:\u0016<\u0017>\u0018"+ - "@\u0019B\u001aD\u001bF\u001cH\u001dJ\u0000L\u0000N\u0000P\u0000R\u0000"+ - "T\u0000V\u0000X\u0000Z\u0000\\\u0000^\u001e`\u001fb d!f\"h#j$l%n&p\'r"+ - "(t)v*x+z,|-~.\u0080/\u00820\u00841\u00862\u00883\u008a4\u008c5\u008e6"+ - "\u00907\u00928\u00949\u0096:\u0098;\u009a<\u009c=\u009e>\u00a0?\u00a2"+ - "@\u00a4A\u00a6B\u00a8C\u00aaD\u00acE\u00aeF\u00b0\u0000\u00b2G\u00b4H"+ - "\u00b6I\u00b8J\u00ba\u0000\u00bcK\u00beL\u00c0M\u00c2N\u00c4\u0000\u00c6"+ - "\u0000\u00c8O\u00caP\u00ccQ\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4"+ - "\u0000\u00d6\u0000\u00d8\u0000\u00daR\u00dc\u0000\u00deS\u00e0\u0000\u00e2"+ - "\u0000\u00e4T\u00e6U\u00e8V\u00ea\u0000\u00ec\u0000\u00ee\u0000\u00f0"+ - "\u0000\u00f2\u0000\u00f4\u0000\u00f6\u0000\u00f8W\u00faX\u00fcY\u00fe"+ - "Z\u0100\u0000\u0102\u0000\u0104\u0000\u0106\u0000\u0108\u0000\u010a\u0000"+ - "\u010c[\u010e\u0000\u0110\\\u0112]\u0114^\u0116\u0000\u0118\u0000\u011a"+ - "_\u011c`\u011e\u0000\u0120a\u0122\u0000\u0124b\u0126c\u0128d\u012a\u0000"+ - "\u012c\u0000\u012e\u0000\u0130\u0000\u0132\u0000\u0134\u0000\u0136\u0000"+ - "\u0138\u0000\u013a\u0000\u013ce\u013ef\u0140g\u0142\u0000\u0144\u0000"+ - "\u0146\u0000\u0148\u0000\u014a\u0000\u014c\u0000\u014eh\u0150i\u0152j"+ - "\u0154\u0000\u0156k\u0158l\u015am\u015cn\u015e\u0000\u0160\u0000\u0162"+ - "o\u0164p\u0166q\u0168r\u016a\u0000\u016c\u0000\u016e\u0000\u0170\u0000"+ - "\u0172\u0000\u0174\u0000\u0176\u0000\u0178s\u017at\u017cu\u017e\u0000"+ - "\u0180\u0000\u0182\u0000\u0184\u0000\u0186v\u0188w\u018ax\u018c\u0000"+ - "\u018e\u0000\u0190\u0000\u0192\u0000\u0194y\u0196\u0000\u0198\u0000\u019a"+ - "\u0000\u019c\u0000\u019e\u0000\u01a0z\u01a2{\u01a4|\u01a6\u0000\u01a8"+ - "\u0000\u01aa\u0000\u01ac}\u01ae~\u01b0\u007f\u01b2\u0000\u01b4\u0000\u01b6"+ - "\u0080\u01b8\u0081\u01ba\u0082\u01bc\u0000\u01be\u0000\u01c0\u0000\u01c2"+ - "\u0000\u0010\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b"+ - "\f\r\u000e\u000f$\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000SSss\u0002"+ - "\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002\u0000"+ - "OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002"+ - "\u0000AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002\u0000"+ - "MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0002"+ - "\u0000JJjj\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000"+ - "\t\n\r\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004"+ - "\u0000\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002"+ - "\u0000YYyy\u000b\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b"+ - "\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u0676\u0000\u0010\u0001\u0000\u0000"+ + "\u0086\u0001\u0087\u0004\u0087\u04d5\b\u0087\u000b\u0087\f\u0087\u04d6"+ + "\u0001\u0087\u0001\u0087\u0003\u0087\u04db\b\u0087\u0001\u0087\u0004\u0087"+ + "\u04de\b\u0087\u000b\u0087\f\u0087\u04df\u0001\u0088\u0001\u0088\u0001"+ + "\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001"+ + "\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001"+ + "\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001"+ + "\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001"+ + "\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001"+ + "\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001"+ + "\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001"+ + "\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001"+ + "\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001"+ + "\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001"+ + "\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001"+ + "\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001"+ + "\u0099\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009a\u0001"+ + "\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001"+ + "\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d\u0001"+ + "\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ + "\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0\u0001"+ + "\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001"+ + "\u00a1\u0001\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001"+ + "\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001"+ + "\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001"+ + "\u00a5\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001"+ + "\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001\u00a8\u0001"+ + "\u00a8\u0001\u00a8\u0001\u00a8\u0004\u00a8\u0571\b\u00a8\u000b\u00a8\f"+ + "\u00a8\u0572\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa"+ + "\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab"+ + "\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac"+ + "\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ae\u0001\u00ae"+ + "\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af"+ + "\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b1"+ + "\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2"+ + "\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4"+ + "\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5"+ + "\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6"+ + "\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8"+ + "\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9\u0001\u00b9"+ + "\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00bb"+ + "\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc\u0001\u00bc\u0001\u00bc"+ + "\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd"+ + "\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00bf"+ + "\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001\u00c0\u0001\u00c0"+ + "\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1"+ + "\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1"+ + "\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3\u0001\u00c3"+ + "\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4"+ + "\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c6\u0001\u00c6"+ + "\u0001\u00c6\u0001\u00c6\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c7"+ + "\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c9\u0001\u00c9"+ + "\u0001\u00c9\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00ca"+ + "\u0001\u00ca\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb"+ + "\u0001\u00cb\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc"+ + "\u0001\u00cc\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00ce"+ + "\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00cf\u0001\u00cf\u0001\u00cf"+ + "\u0001\u00cf\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d0"+ + "\u0001\u00d0\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1"+ + "\u0001\u00d1\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d3"+ + "\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001\u00d4\u0001\u00d4"+ + "\u0001\u00d4\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001\u00d5"+ + "\u0001\u00d5\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6"+ + "\u0001\u00d6\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d7\u0001\u00d7"+ + "\u0001\u00d7\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001\u00d8\u0001\u00d8"+ + "\u0002\u02aa\u02ef\u0000\u00d9\u0010\u0001\u0012\u0002\u0014\u0003\u0016"+ + "\u0004\u0018\u0005\u001a\u0006\u001c\u0007\u001e\b \t\"\n$\u000b&\f(\r"+ + "*\u000e,\u000f.\u00100\u00112\u00124\u00136\u00148\u0015:\u0016<\u0017"+ + ">\u0018@\u0019B\u001aD\u001bF\u001cH\u0000J\u0000L\u0000N\u0000P\u0000"+ + "R\u0000T\u0000V\u0000X\u0000Z\u0000\\\u001d^\u001e`\u001fb d!f\"h#j$l"+ + "%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u00841\u00862\u00883\u008a4\u008c5"+ + "\u008e6\u00907\u00928\u00949\u0096:\u0098;\u009a<\u009c=\u009e>\u00a0"+ + "?\u00a2@\u00a4A\u00a6B\u00a8C\u00aaD\u00acE\u00ae\u0000\u00b0F\u00b2G"+ + "\u00b4H\u00b6I\u00b8\u0000\u00baJ\u00bcK\u00beL\u00c0M\u00c2\u0000\u00c4"+ + "\u0000\u00c6N\u00c8O\u00caP\u00cc\u0000\u00ce\u0000\u00d0\u0000\u00d2"+ + "\u0000\u00d4\u0000\u00d6\u0000\u00d8Q\u00da\u0000\u00dcR\u00de\u0000\u00e0"+ + "\u0000\u00e2S\u00e4T\u00e6U\u00e8\u0000\u00ea\u0000\u00ec\u0000\u00ee"+ + "\u0000\u00f0\u0000\u00f2\u0000\u00f4\u0000\u00f6V\u00f8W\u00faX\u00fc"+ + "Y\u00fe\u0000\u0100\u0000\u0102\u0000\u0104\u0000\u0106\u0000\u0108\u0000"+ + "\u010aZ\u010c\u0000\u010e[\u0110\\\u0112]\u0114\u0000\u0116\u0000\u0118"+ + "^\u011a_\u011c\u0000\u011e`\u0120\u0000\u0122a\u0124b\u0126c\u0128\u0000"+ + "\u012a\u0000\u012c\u0000\u012e\u0000\u0130\u0000\u0132\u0000\u0134\u0000"+ + "\u0136\u0000\u0138\u0000\u013ad\u013ce\u013ef\u0140\u0000\u0142\u0000"+ + "\u0144\u0000\u0146\u0000\u0148\u0000\u014a\u0000\u014cg\u014eh\u0150i"+ + "\u0152\u0000\u0154j\u0156k\u0158l\u015am\u015c\u0000\u015e\u0000\u0160"+ + "n\u0162o\u0164p\u0166q\u0168\u0000\u016a\u0000\u016c\u0000\u016e\u0000"+ + "\u0170\u0000\u0172\u0000\u0174\u0000\u0176r\u0178s\u017at\u017c\u0000"+ + "\u017e\u0000\u0180\u0000\u0182\u0000\u0184u\u0186v\u0188w\u018a\u0000"+ + "\u018cx\u018e\u0000\u0190\u0000\u0192y\u0194\u0000\u0196\u0000\u0198\u0000"+ + "\u019a\u0000\u019c\u0000\u019ez\u01a0{\u01a2|\u01a4\u0000\u01a6\u0000"+ + "\u01a8\u0000\u01aa}\u01ac~\u01ae\u007f\u01b0\u0000\u01b2\u0000\u01b4\u0080"+ + "\u01b6\u0081\u01b8\u0082\u01ba\u0000\u01bc\u0000\u01be\u0000\u01c0\u0000"+ + "\u0010\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r"+ + "\u000e\u000f$\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000SSss\u0002\u0000"+ + "EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002\u0000OOoo\u0002"+ + "\u0000PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002\u0000"+ + "AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002\u0000MMmm\u0002"+ + "\u0000GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0006\u0000"+ + "\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u0000"+ + "09\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\""+ + "\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b"+ + "\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r"+ + " \"#,,//::<<>?\\\\||\u0002\u0000JJjj\u066c\u0000\u0010\u0001\u0000\u0000"+ "\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000"+ "\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000"+ "\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000"+ @@ -654,792 +634,787 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "4\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000\u00008\u0001"+ "\u0000\u0000\u0000\u0000:\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000"+ "\u0000\u0000>\u0001\u0000\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000"+ - "B\u0001\u0000\u0000\u0000\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001"+ - "\u0000\u0000\u0000\u0001H\u0001\u0000\u0000\u0000\u0001^\u0001\u0000\u0000"+ - "\u0000\u0001`\u0001\u0000\u0000\u0000\u0001b\u0001\u0000\u0000\u0000\u0001"+ - "d\u0001\u0000\u0000\u0000\u0001f\u0001\u0000\u0000\u0000\u0001h\u0001"+ - "\u0000\u0000\u0000\u0001j\u0001\u0000\u0000\u0000\u0001l\u0001\u0000\u0000"+ - "\u0000\u0001n\u0001\u0000\u0000\u0000\u0001p\u0001\u0000\u0000\u0000\u0001"+ - "r\u0001\u0000\u0000\u0000\u0001t\u0001\u0000\u0000\u0000\u0001v\u0001"+ - "\u0000\u0000\u0000\u0001x\u0001\u0000\u0000\u0000\u0001z\u0001\u0000\u0000"+ - "\u0000\u0001|\u0001\u0000\u0000\u0000\u0001~\u0001\u0000\u0000\u0000\u0001"+ - "\u0080\u0001\u0000\u0000\u0000\u0001\u0082\u0001\u0000\u0000\u0000\u0001"+ - "\u0084\u0001\u0000\u0000\u0000\u0001\u0086\u0001\u0000\u0000\u0000\u0001"+ - "\u0088\u0001\u0000\u0000\u0000\u0001\u008a\u0001\u0000\u0000\u0000\u0001"+ - "\u008c\u0001\u0000\u0000\u0000\u0001\u008e\u0001\u0000\u0000\u0000\u0001"+ - "\u0090\u0001\u0000\u0000\u0000\u0001\u0092\u0001\u0000\u0000\u0000\u0001"+ - "\u0094\u0001\u0000\u0000\u0000\u0001\u0096\u0001\u0000\u0000\u0000\u0001"+ - "\u0098\u0001\u0000\u0000\u0000\u0001\u009a\u0001\u0000\u0000\u0000\u0001"+ - "\u009c\u0001\u0000\u0000\u0000\u0001\u009e\u0001\u0000\u0000\u0000\u0001"+ - "\u00a0\u0001\u0000\u0000\u0000\u0001\u00a2\u0001\u0000\u0000\u0000\u0001"+ - "\u00a4\u0001\u0000\u0000\u0000\u0001\u00a6\u0001\u0000\u0000\u0000\u0001"+ - "\u00a8\u0001\u0000\u0000\u0000\u0001\u00aa\u0001\u0000\u0000\u0000\u0001"+ - "\u00ac\u0001\u0000\u0000\u0000\u0001\u00ae\u0001\u0000\u0000\u0000\u0001"+ - "\u00b0\u0001\u0000\u0000\u0000\u0001\u00b2\u0001\u0000\u0000\u0000\u0001"+ - "\u00b4\u0001\u0000\u0000\u0000\u0001\u00b6\u0001\u0000\u0000\u0000\u0001"+ - "\u00b8\u0001\u0000\u0000\u0000\u0001\u00bc\u0001\u0000\u0000\u0000\u0001"+ - "\u00be\u0001\u0000\u0000\u0000\u0001\u00c0\u0001\u0000\u0000\u0000\u0001"+ - "\u00c2\u0001\u0000\u0000\u0000\u0002\u00c4\u0001\u0000\u0000\u0000\u0002"+ - "\u00c6\u0001\u0000\u0000\u0000\u0002\u00c8\u0001\u0000\u0000\u0000\u0002"+ - "\u00ca\u0001\u0000\u0000\u0000\u0002\u00cc\u0001\u0000\u0000\u0000\u0003"+ - "\u00ce\u0001\u0000\u0000\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003"+ - "\u00d2\u0001\u0000\u0000\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003"+ - "\u00d6\u0001\u0000\u0000\u0000\u0003\u00d8\u0001\u0000\u0000\u0000\u0003"+ - "\u00da\u0001\u0000\u0000\u0000\u0003\u00de\u0001\u0000\u0000\u0000\u0003"+ - "\u00e0\u0001\u0000\u0000\u0000\u0003\u00e2\u0001\u0000\u0000\u0000\u0003"+ - "\u00e4\u0001\u0000\u0000\u0000\u0003\u00e6\u0001\u0000\u0000\u0000\u0003"+ - "\u00e8\u0001\u0000\u0000\u0000\u0004\u00ea\u0001\u0000\u0000\u0000\u0004"+ - "\u00ec\u0001\u0000\u0000\u0000\u0004\u00ee\u0001\u0000\u0000\u0000\u0004"+ - "\u00f0\u0001\u0000\u0000\u0000\u0004\u00f2\u0001\u0000\u0000\u0000\u0004"+ - "\u00f8\u0001\u0000\u0000\u0000\u0004\u00fa\u0001\u0000\u0000\u0000\u0004"+ - "\u00fc\u0001\u0000\u0000\u0000\u0004\u00fe\u0001\u0000\u0000\u0000\u0005"+ - "\u0100\u0001\u0000\u0000\u0000\u0005\u0102\u0001\u0000\u0000\u0000\u0005"+ - "\u0104\u0001\u0000\u0000\u0000\u0005\u0106\u0001\u0000\u0000\u0000\u0005"+ - "\u0108\u0001\u0000\u0000\u0000\u0005\u010a\u0001\u0000\u0000\u0000\u0005"+ - "\u010c\u0001\u0000\u0000\u0000\u0005\u010e\u0001\u0000\u0000\u0000\u0005"+ - "\u0110\u0001\u0000\u0000\u0000\u0005\u0112\u0001\u0000\u0000\u0000\u0005"+ - "\u0114\u0001\u0000\u0000\u0000\u0006\u0116\u0001\u0000\u0000\u0000\u0006"+ - "\u0118\u0001\u0000\u0000\u0000\u0006\u011a\u0001\u0000\u0000\u0000\u0006"+ - "\u011c\u0001\u0000\u0000\u0000\u0006\u0120\u0001\u0000\u0000\u0000\u0006"+ - "\u0122\u0001\u0000\u0000\u0000\u0006\u0124\u0001\u0000\u0000\u0000\u0006"+ - "\u0126\u0001\u0000\u0000\u0000\u0006\u0128\u0001\u0000\u0000\u0000\u0007"+ - "\u012a\u0001\u0000\u0000\u0000\u0007\u012c\u0001\u0000\u0000\u0000\u0007"+ - "\u012e\u0001\u0000\u0000\u0000\u0007\u0130\u0001\u0000\u0000\u0000\u0007"+ - "\u0132\u0001\u0000\u0000\u0000\u0007\u0134\u0001\u0000\u0000\u0000\u0007"+ - "\u0136\u0001\u0000\u0000\u0000\u0007\u0138\u0001\u0000\u0000\u0000\u0007"+ - "\u013a\u0001\u0000\u0000\u0000\u0007\u013c\u0001\u0000\u0000\u0000\u0007"+ - "\u013e\u0001\u0000\u0000\u0000\u0007\u0140\u0001\u0000\u0000\u0000\b\u0142"+ - "\u0001\u0000\u0000\u0000\b\u0144\u0001\u0000\u0000\u0000\b\u0146\u0001"+ - "\u0000\u0000\u0000\b\u0148\u0001\u0000\u0000\u0000\b\u014a\u0001\u0000"+ - "\u0000\u0000\b\u014c\u0001\u0000\u0000\u0000\b\u014e\u0001\u0000\u0000"+ - "\u0000\b\u0150\u0001\u0000\u0000\u0000\b\u0152\u0001\u0000\u0000\u0000"+ - "\t\u0154\u0001\u0000\u0000\u0000\t\u0156\u0001\u0000\u0000\u0000\t\u0158"+ - "\u0001\u0000\u0000\u0000\t\u015a\u0001\u0000\u0000\u0000\t\u015c\u0001"+ - "\u0000\u0000\u0000\n\u015e\u0001\u0000\u0000\u0000\n\u0160\u0001\u0000"+ - "\u0000\u0000\n\u0162\u0001\u0000\u0000\u0000\n\u0164\u0001\u0000\u0000"+ - "\u0000\n\u0166\u0001\u0000\u0000\u0000\n\u0168\u0001\u0000\u0000\u0000"+ - "\u000b\u016a\u0001\u0000\u0000\u0000\u000b\u016c\u0001\u0000\u0000\u0000"+ - "\u000b\u016e\u0001\u0000\u0000\u0000\u000b\u0170\u0001\u0000\u0000\u0000"+ - "\u000b\u0172\u0001\u0000\u0000\u0000\u000b\u0174\u0001\u0000\u0000\u0000"+ - "\u000b\u0176\u0001\u0000\u0000\u0000\u000b\u0178\u0001\u0000\u0000\u0000"+ - "\u000b\u017a\u0001\u0000\u0000\u0000\u000b\u017c\u0001\u0000\u0000\u0000"+ + "B\u0001\u0000\u0000\u0000\u0000D\u0001\u0000\u0000\u0000\u0001F\u0001"+ + "\u0000\u0000\u0000\u0001\\\u0001\u0000\u0000\u0000\u0001^\u0001\u0000"+ + "\u0000\u0000\u0001`\u0001\u0000\u0000\u0000\u0001b\u0001\u0000\u0000\u0000"+ + "\u0001d\u0001\u0000\u0000\u0000\u0001f\u0001\u0000\u0000\u0000\u0001h"+ + "\u0001\u0000\u0000\u0000\u0001j\u0001\u0000\u0000\u0000\u0001l\u0001\u0000"+ + "\u0000\u0000\u0001n\u0001\u0000\u0000\u0000\u0001p\u0001\u0000\u0000\u0000"+ + "\u0001r\u0001\u0000\u0000\u0000\u0001t\u0001\u0000\u0000\u0000\u0001v"+ + "\u0001\u0000\u0000\u0000\u0001x\u0001\u0000\u0000\u0000\u0001z\u0001\u0000"+ + "\u0000\u0000\u0001|\u0001\u0000\u0000\u0000\u0001~\u0001\u0000\u0000\u0000"+ + "\u0001\u0080\u0001\u0000\u0000\u0000\u0001\u0082\u0001\u0000\u0000\u0000"+ + "\u0001\u0084\u0001\u0000\u0000\u0000\u0001\u0086\u0001\u0000\u0000\u0000"+ + "\u0001\u0088\u0001\u0000\u0000\u0000\u0001\u008a\u0001\u0000\u0000\u0000"+ + "\u0001\u008c\u0001\u0000\u0000\u0000\u0001\u008e\u0001\u0000\u0000\u0000"+ + "\u0001\u0090\u0001\u0000\u0000\u0000\u0001\u0092\u0001\u0000\u0000\u0000"+ + "\u0001\u0094\u0001\u0000\u0000\u0000\u0001\u0096\u0001\u0000\u0000\u0000"+ + "\u0001\u0098\u0001\u0000\u0000\u0000\u0001\u009a\u0001\u0000\u0000\u0000"+ + "\u0001\u009c\u0001\u0000\u0000\u0000\u0001\u009e\u0001\u0000\u0000\u0000"+ + "\u0001\u00a0\u0001\u0000\u0000\u0000\u0001\u00a2\u0001\u0000\u0000\u0000"+ + "\u0001\u00a4\u0001\u0000\u0000\u0000\u0001\u00a6\u0001\u0000\u0000\u0000"+ + "\u0001\u00a8\u0001\u0000\u0000\u0000\u0001\u00aa\u0001\u0000\u0000\u0000"+ + "\u0001\u00ac\u0001\u0000\u0000\u0000\u0001\u00ae\u0001\u0000\u0000\u0000"+ + "\u0001\u00b0\u0001\u0000\u0000\u0000\u0001\u00b2\u0001\u0000\u0000\u0000"+ + "\u0001\u00b4\u0001\u0000\u0000\u0000\u0001\u00b6\u0001\u0000\u0000\u0000"+ + "\u0001\u00ba\u0001\u0000\u0000\u0000\u0001\u00bc\u0001\u0000\u0000\u0000"+ + "\u0001\u00be\u0001\u0000\u0000\u0000\u0001\u00c0\u0001\u0000\u0000\u0000"+ + "\u0002\u00c2\u0001\u0000\u0000\u0000\u0002\u00c4\u0001\u0000\u0000\u0000"+ + "\u0002\u00c6\u0001\u0000\u0000\u0000\u0002\u00c8\u0001\u0000\u0000\u0000"+ + "\u0002\u00ca\u0001\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000\u0000"+ + "\u0003\u00ce\u0001\u0000\u0000\u0000\u0003\u00d0\u0001\u0000\u0000\u0000"+ + "\u0003\u00d2\u0001\u0000\u0000\u0000\u0003\u00d4\u0001\u0000\u0000\u0000"+ + "\u0003\u00d6\u0001\u0000\u0000\u0000\u0003\u00d8\u0001\u0000\u0000\u0000"+ + "\u0003\u00dc\u0001\u0000\u0000\u0000\u0003\u00de\u0001\u0000\u0000\u0000"+ + "\u0003\u00e0\u0001\u0000\u0000\u0000\u0003\u00e2\u0001\u0000\u0000\u0000"+ + "\u0003\u00e4\u0001\u0000\u0000\u0000\u0003\u00e6\u0001\u0000\u0000\u0000"+ + "\u0004\u00e8\u0001\u0000\u0000\u0000\u0004\u00ea\u0001\u0000\u0000\u0000"+ + "\u0004\u00ec\u0001\u0000\u0000\u0000\u0004\u00ee\u0001\u0000\u0000\u0000"+ + "\u0004\u00f0\u0001\u0000\u0000\u0000\u0004\u00f6\u0001\u0000\u0000\u0000"+ + "\u0004\u00f8\u0001\u0000\u0000\u0000\u0004\u00fa\u0001\u0000\u0000\u0000"+ + "\u0004\u00fc\u0001\u0000\u0000\u0000\u0005\u00fe\u0001\u0000\u0000\u0000"+ + "\u0005\u0100\u0001\u0000\u0000\u0000\u0005\u0102\u0001\u0000\u0000\u0000"+ + "\u0005\u0104\u0001\u0000\u0000\u0000\u0005\u0106\u0001\u0000\u0000\u0000"+ + "\u0005\u0108\u0001\u0000\u0000\u0000\u0005\u010a\u0001\u0000\u0000\u0000"+ + "\u0005\u010c\u0001\u0000\u0000\u0000\u0005\u010e\u0001\u0000\u0000\u0000"+ + "\u0005\u0110\u0001\u0000\u0000\u0000\u0005\u0112\u0001\u0000\u0000\u0000"+ + "\u0006\u0114\u0001\u0000\u0000\u0000\u0006\u0116\u0001\u0000\u0000\u0000"+ + "\u0006\u0118\u0001\u0000\u0000\u0000\u0006\u011a\u0001\u0000\u0000\u0000"+ + "\u0006\u011e\u0001\u0000\u0000\u0000\u0006\u0120\u0001\u0000\u0000\u0000"+ + "\u0006\u0122\u0001\u0000\u0000\u0000\u0006\u0124\u0001\u0000\u0000\u0000"+ + "\u0006\u0126\u0001\u0000\u0000\u0000\u0007\u0128\u0001\u0000\u0000\u0000"+ + "\u0007\u012a\u0001\u0000\u0000\u0000\u0007\u012c\u0001\u0000\u0000\u0000"+ + "\u0007\u012e\u0001\u0000\u0000\u0000\u0007\u0130\u0001\u0000\u0000\u0000"+ + "\u0007\u0132\u0001\u0000\u0000\u0000\u0007\u0134\u0001\u0000\u0000\u0000"+ + "\u0007\u0136\u0001\u0000\u0000\u0000\u0007\u0138\u0001\u0000\u0000\u0000"+ + "\u0007\u013a\u0001\u0000\u0000\u0000\u0007\u013c\u0001\u0000\u0000\u0000"+ + "\u0007\u013e\u0001\u0000\u0000\u0000\b\u0140\u0001\u0000\u0000\u0000\b"+ + "\u0142\u0001\u0000\u0000\u0000\b\u0144\u0001\u0000\u0000\u0000\b\u0146"+ + "\u0001\u0000\u0000\u0000\b\u0148\u0001\u0000\u0000\u0000\b\u014a\u0001"+ + "\u0000\u0000\u0000\b\u014c\u0001\u0000\u0000\u0000\b\u014e\u0001\u0000"+ + "\u0000\u0000\b\u0150\u0001\u0000\u0000\u0000\t\u0152\u0001\u0000\u0000"+ + "\u0000\t\u0154\u0001\u0000\u0000\u0000\t\u0156\u0001\u0000\u0000\u0000"+ + "\t\u0158\u0001\u0000\u0000\u0000\t\u015a\u0001\u0000\u0000\u0000\n\u015c"+ + "\u0001\u0000\u0000\u0000\n\u015e\u0001\u0000\u0000\u0000\n\u0160\u0001"+ + "\u0000\u0000\u0000\n\u0162\u0001\u0000\u0000\u0000\n\u0164\u0001\u0000"+ + "\u0000\u0000\n\u0166\u0001\u0000\u0000\u0000\u000b\u0168\u0001\u0000\u0000"+ + "\u0000\u000b\u016a\u0001\u0000\u0000\u0000\u000b\u016c\u0001\u0000\u0000"+ + "\u0000\u000b\u016e\u0001\u0000\u0000\u0000\u000b\u0170\u0001\u0000\u0000"+ + "\u0000\u000b\u0172\u0001\u0000\u0000\u0000\u000b\u0174\u0001\u0000\u0000"+ + "\u0000\u000b\u0176\u0001\u0000\u0000\u0000\u000b\u0178\u0001\u0000\u0000"+ + "\u0000\u000b\u017a\u0001\u0000\u0000\u0000\f\u017c\u0001\u0000\u0000\u0000"+ "\f\u017e\u0001\u0000\u0000\u0000\f\u0180\u0001\u0000\u0000\u0000\f\u0182"+ "\u0001\u0000\u0000\u0000\f\u0184\u0001\u0000\u0000\u0000\f\u0186\u0001"+ - "\u0000\u0000\u0000\f\u0188\u0001\u0000\u0000\u0000\f\u018a\u0001\u0000"+ + "\u0000\u0000\u0000\f\u0188\u0001\u0000\u0000\u0000\r\u018a\u0001\u0000"+ "\u0000\u0000\r\u018c\u0001\u0000\u0000\u0000\r\u018e\u0001\u0000\u0000"+ "\u0000\r\u0190\u0001\u0000\u0000\u0000\r\u0192\u0001\u0000\u0000\u0000"+ "\r\u0194\u0001\u0000\u0000\u0000\r\u0196\u0001\u0000\u0000\u0000\r\u0198"+ "\u0001\u0000\u0000\u0000\r\u019a\u0001\u0000\u0000\u0000\r\u019c\u0001"+ "\u0000\u0000\u0000\r\u019e\u0001\u0000\u0000\u0000\r\u01a0\u0001\u0000"+ - "\u0000\u0000\r\u01a2\u0001\u0000\u0000\u0000\r\u01a4\u0001\u0000\u0000"+ + "\u0000\u0000\r\u01a2\u0001\u0000\u0000\u0000\u000e\u01a4\u0001\u0000\u0000"+ "\u0000\u000e\u01a6\u0001\u0000\u0000\u0000\u000e\u01a8\u0001\u0000\u0000"+ "\u0000\u000e\u01aa\u0001\u0000\u0000\u0000\u000e\u01ac\u0001\u0000\u0000"+ - "\u0000\u000e\u01ae\u0001\u0000\u0000\u0000\u000e\u01b0\u0001\u0000\u0000"+ + "\u0000\u000e\u01ae\u0001\u0000\u0000\u0000\u000f\u01b0\u0001\u0000\u0000"+ "\u0000\u000f\u01b2\u0001\u0000\u0000\u0000\u000f\u01b4\u0001\u0000\u0000"+ "\u0000\u000f\u01b6\u0001\u0000\u0000\u0000\u000f\u01b8\u0001\u0000\u0000"+ "\u0000\u000f\u01ba\u0001\u0000\u0000\u0000\u000f\u01bc\u0001\u0000\u0000"+ "\u0000\u000f\u01be\u0001\u0000\u0000\u0000\u000f\u01c0\u0001\u0000\u0000"+ - "\u0000\u000f\u01c2\u0001\u0000\u0000\u0000\u0010\u01c4\u0001\u0000\u0000"+ - "\u0000\u0012\u01ce\u0001\u0000\u0000\u0000\u0014\u01d5\u0001\u0000\u0000"+ - "\u0000\u0016\u01de\u0001\u0000\u0000\u0000\u0018\u01e5\u0001\u0000\u0000"+ - "\u0000\u001a\u01ef\u0001\u0000\u0000\u0000\u001c\u01f6\u0001\u0000\u0000"+ - "\u0000\u001e\u01fd\u0001\u0000\u0000\u0000 \u0204\u0001\u0000\u0000\u0000"+ - "\"\u020c\u0001\u0000\u0000\u0000$\u0218\u0001\u0000\u0000\u0000&\u0221"+ - "\u0001\u0000\u0000\u0000(\u0227\u0001\u0000\u0000\u0000*\u022e\u0001\u0000"+ - "\u0000\u0000,\u0235\u0001\u0000\u0000\u0000.\u023d\u0001\u0000\u0000\u0000"+ - "0\u0245\u0001\u0000\u0000\u00002\u0254\u0001\u0000\u0000\u00004\u0260"+ - "\u0001\u0000\u0000\u00006\u026b\u0001\u0000\u0000\u00008\u0273\u0001\u0000"+ - "\u0000\u0000:\u027b\u0001\u0000\u0000\u0000<\u0283\u0001\u0000\u0000\u0000"+ - ">\u028c\u0001\u0000\u0000\u0000@\u0297\u0001\u0000\u0000\u0000B\u029d"+ - "\u0001\u0000\u0000\u0000D\u02ae\u0001\u0000\u0000\u0000F\u02be\u0001\u0000"+ - "\u0000\u0000H\u02c4\u0001\u0000\u0000\u0000J\u02c8\u0001\u0000\u0000\u0000"+ - "L\u02ca\u0001\u0000\u0000\u0000N\u02cc\u0001\u0000\u0000\u0000P\u02cf"+ - "\u0001\u0000\u0000\u0000R\u02d1\u0001\u0000\u0000\u0000T\u02da\u0001\u0000"+ - "\u0000\u0000V\u02dc\u0001\u0000\u0000\u0000X\u02e1\u0001\u0000\u0000\u0000"+ - "Z\u02e3\u0001\u0000\u0000\u0000\\\u02e8\u0001\u0000\u0000\u0000^\u0307"+ - "\u0001\u0000\u0000\u0000`\u030a\u0001\u0000\u0000\u0000b\u0338\u0001\u0000"+ - "\u0000\u0000d\u033a\u0001\u0000\u0000\u0000f\u033d\u0001\u0000\u0000\u0000"+ - "h\u0341\u0001\u0000\u0000\u0000j\u0345\u0001\u0000\u0000\u0000l\u0347"+ - "\u0001\u0000\u0000\u0000n\u034a\u0001\u0000\u0000\u0000p\u034c\u0001\u0000"+ - "\u0000\u0000r\u034e\u0001\u0000\u0000\u0000t\u0353\u0001\u0000\u0000\u0000"+ - "v\u0355\u0001\u0000\u0000\u0000x\u035b\u0001\u0000\u0000\u0000z\u0361"+ - "\u0001\u0000\u0000\u0000|\u0364\u0001\u0000\u0000\u0000~\u0367\u0001\u0000"+ - "\u0000\u0000\u0080\u036c\u0001\u0000\u0000\u0000\u0082\u0371\u0001\u0000"+ - "\u0000\u0000\u0084\u0373\u0001\u0000\u0000\u0000\u0086\u0377\u0001\u0000"+ - "\u0000\u0000\u0088\u037c\u0001\u0000\u0000\u0000\u008a\u0382\u0001\u0000"+ - "\u0000\u0000\u008c\u0385\u0001\u0000\u0000\u0000\u008e\u0387\u0001\u0000"+ - "\u0000\u0000\u0090\u038d\u0001\u0000\u0000\u0000\u0092\u038f\u0001\u0000"+ - "\u0000\u0000\u0094\u0394\u0001\u0000\u0000\u0000\u0096\u0397\u0001\u0000"+ - "\u0000\u0000\u0098\u039a\u0001\u0000\u0000\u0000\u009a\u039d\u0001\u0000"+ - "\u0000\u0000\u009c\u039f\u0001\u0000\u0000\u0000\u009e\u03a2\u0001\u0000"+ - "\u0000\u0000\u00a0\u03a4\u0001\u0000\u0000\u0000\u00a2\u03a7\u0001\u0000"+ - "\u0000\u0000\u00a4\u03a9\u0001\u0000\u0000\u0000\u00a6\u03ab\u0001\u0000"+ - "\u0000\u0000\u00a8\u03ad\u0001\u0000\u0000\u0000\u00aa\u03af\u0001\u0000"+ - "\u0000\u0000\u00ac\u03b1\u0001\u0000\u0000\u0000\u00ae\u03b3\u0001\u0000"+ - "\u0000\u0000\u00b0\u03b5\u0001\u0000\u0000\u0000\u00b2\u03ca\u0001\u0000"+ - "\u0000\u0000\u00b4\u03cc\u0001\u0000\u0000\u0000\u00b6\u03d1\u0001\u0000"+ - "\u0000\u0000\u00b8\u03e6\u0001\u0000\u0000\u0000\u00ba\u03e8\u0001\u0000"+ - "\u0000\u0000\u00bc\u03f0\u0001\u0000\u0000\u0000\u00be\u03f2\u0001\u0000"+ - "\u0000\u0000\u00c0\u03f6\u0001\u0000\u0000\u0000\u00c2\u03fa\u0001\u0000"+ - "\u0000\u0000\u00c4\u03fe\u0001\u0000\u0000\u0000\u00c6\u0403\u0001\u0000"+ - "\u0000\u0000\u00c8\u0408\u0001\u0000\u0000\u0000\u00ca\u040c\u0001\u0000"+ - "\u0000\u0000\u00cc\u0410\u0001\u0000\u0000\u0000\u00ce\u0414\u0001\u0000"+ - "\u0000\u0000\u00d0\u0419\u0001\u0000\u0000\u0000\u00d2\u041d\u0001\u0000"+ - "\u0000\u0000\u00d4\u0421\u0001\u0000\u0000\u0000\u00d6\u0425\u0001\u0000"+ - "\u0000\u0000\u00d8\u0429\u0001\u0000\u0000\u0000\u00da\u042d\u0001\u0000"+ - "\u0000\u0000\u00dc\u0439\u0001\u0000\u0000\u0000\u00de\u043c\u0001\u0000"+ - "\u0000\u0000\u00e0\u0440\u0001\u0000\u0000\u0000\u00e2\u0444\u0001\u0000"+ - "\u0000\u0000\u00e4\u0448\u0001\u0000\u0000\u0000\u00e6\u044c\u0001\u0000"+ - "\u0000\u0000\u00e8\u0450\u0001\u0000\u0000\u0000\u00ea\u0454\u0001\u0000"+ - "\u0000\u0000\u00ec\u0459\u0001\u0000\u0000\u0000\u00ee\u045d\u0001\u0000"+ - "\u0000\u0000\u00f0\u0461\u0001\u0000\u0000\u0000\u00f2\u0466\u0001\u0000"+ - "\u0000\u0000\u00f4\u046f\u0001\u0000\u0000\u0000\u00f6\u0484\u0001\u0000"+ - "\u0000\u0000\u00f8\u0488\u0001\u0000\u0000\u0000\u00fa\u048c\u0001\u0000"+ - "\u0000\u0000\u00fc\u0490\u0001\u0000\u0000\u0000\u00fe\u0494\u0001\u0000"+ - "\u0000\u0000\u0100\u0498\u0001\u0000\u0000\u0000\u0102\u049d\u0001\u0000"+ - "\u0000\u0000\u0104\u04a1\u0001\u0000\u0000\u0000\u0106\u04a5\u0001\u0000"+ - "\u0000\u0000\u0108\u04a9\u0001\u0000\u0000\u0000\u010a\u04ae\u0001\u0000"+ - "\u0000\u0000\u010c\u04b3\u0001\u0000\u0000\u0000\u010e\u04b6\u0001\u0000"+ - "\u0000\u0000\u0110\u04ba\u0001\u0000\u0000\u0000\u0112\u04be\u0001\u0000"+ - "\u0000\u0000\u0114\u04c2\u0001\u0000\u0000\u0000\u0116\u04c6\u0001\u0000"+ - "\u0000\u0000\u0118\u04cb\u0001\u0000\u0000\u0000\u011a\u04d0\u0001\u0000"+ - "\u0000\u0000\u011c\u04d5\u0001\u0000\u0000\u0000\u011e\u04dc\u0001\u0000"+ - "\u0000\u0000\u0120\u04e5\u0001\u0000\u0000\u0000\u0122\u04ec\u0001\u0000"+ - "\u0000\u0000\u0124\u04f0\u0001\u0000\u0000\u0000\u0126\u04f4\u0001\u0000"+ - "\u0000\u0000\u0128\u04f8\u0001\u0000\u0000\u0000\u012a\u04fc\u0001\u0000"+ - "\u0000\u0000\u012c\u0502\u0001\u0000\u0000\u0000\u012e\u0506\u0001\u0000"+ - "\u0000\u0000\u0130\u050a\u0001\u0000\u0000\u0000\u0132\u050e\u0001\u0000"+ - "\u0000\u0000\u0134\u0512\u0001\u0000\u0000\u0000\u0136\u0516\u0001\u0000"+ - "\u0000\u0000\u0138\u051a\u0001\u0000\u0000\u0000\u013a\u051f\u0001\u0000"+ - "\u0000\u0000\u013c\u0524\u0001\u0000\u0000\u0000\u013e\u0528\u0001\u0000"+ - "\u0000\u0000\u0140\u052c\u0001\u0000\u0000\u0000\u0142\u0530\u0001\u0000"+ - "\u0000\u0000\u0144\u0535\u0001\u0000\u0000\u0000\u0146\u0539\u0001\u0000"+ - "\u0000\u0000\u0148\u053e\u0001\u0000\u0000\u0000\u014a\u0543\u0001\u0000"+ - "\u0000\u0000\u014c\u0547\u0001\u0000\u0000\u0000\u014e\u054b\u0001\u0000"+ - "\u0000\u0000\u0150\u054f\u0001\u0000\u0000\u0000\u0152\u0553\u0001\u0000"+ - "\u0000\u0000\u0154\u0557\u0001\u0000\u0000\u0000\u0156\u055c\u0001\u0000"+ - "\u0000\u0000\u0158\u0561\u0001\u0000\u0000\u0000\u015a\u0565\u0001\u0000"+ - "\u0000\u0000\u015c\u0569\u0001\u0000\u0000\u0000\u015e\u056d\u0001\u0000"+ - "\u0000\u0000\u0160\u0572\u0001\u0000\u0000\u0000\u0162\u057b\u0001\u0000"+ - "\u0000\u0000\u0164\u057f\u0001\u0000\u0000\u0000\u0166\u0583\u0001\u0000"+ - "\u0000\u0000\u0168\u0587\u0001\u0000\u0000\u0000\u016a\u058b\u0001\u0000"+ - "\u0000\u0000\u016c\u0590\u0001\u0000\u0000\u0000\u016e\u0594\u0001\u0000"+ - "\u0000\u0000\u0170\u0598\u0001\u0000\u0000\u0000\u0172\u059c\u0001\u0000"+ - "\u0000\u0000\u0174\u05a1\u0001\u0000\u0000\u0000\u0176\u05a5\u0001\u0000"+ - "\u0000\u0000\u0178\u05a9\u0001\u0000\u0000\u0000\u017a\u05ad\u0001\u0000"+ - "\u0000\u0000\u017c\u05b1\u0001\u0000\u0000\u0000\u017e\u05b5\u0001\u0000"+ - "\u0000\u0000\u0180\u05bb\u0001\u0000\u0000\u0000\u0182\u05bf\u0001\u0000"+ - "\u0000\u0000\u0184\u05c3\u0001\u0000\u0000\u0000\u0186\u05c7\u0001\u0000"+ - "\u0000\u0000\u0188\u05cb\u0001\u0000\u0000\u0000\u018a\u05cf\u0001\u0000"+ - "\u0000\u0000\u018c\u05d3\u0001\u0000\u0000\u0000\u018e\u05d8\u0001\u0000"+ - "\u0000\u0000\u0190\u05dc\u0001\u0000\u0000\u0000\u0192\u05e0\u0001\u0000"+ - "\u0000\u0000\u0194\u05e6\u0001\u0000\u0000\u0000\u0196\u05ef\u0001\u0000"+ - "\u0000\u0000\u0198\u05f3\u0001\u0000\u0000\u0000\u019a\u05f7\u0001\u0000"+ - "\u0000\u0000\u019c\u05fb\u0001\u0000\u0000\u0000\u019e\u05ff\u0001\u0000"+ - "\u0000\u0000\u01a0\u0603\u0001\u0000\u0000\u0000\u01a2\u0607\u0001\u0000"+ - "\u0000\u0000\u01a4\u060b\u0001\u0000\u0000\u0000\u01a6\u060f\u0001\u0000"+ - "\u0000\u0000\u01a8\u0614\u0001\u0000\u0000\u0000\u01aa\u061a\u0001\u0000"+ - "\u0000\u0000\u01ac\u0620\u0001\u0000\u0000\u0000\u01ae\u0624\u0001\u0000"+ - "\u0000\u0000\u01b0\u0628\u0001\u0000\u0000\u0000\u01b2\u062c\u0001\u0000"+ - "\u0000\u0000\u01b4\u0632\u0001\u0000\u0000\u0000\u01b6\u0638\u0001\u0000"+ - "\u0000\u0000\u01b8\u063c\u0001\u0000\u0000\u0000\u01ba\u0640\u0001\u0000"+ - "\u0000\u0000\u01bc\u0644\u0001\u0000\u0000\u0000\u01be\u064a\u0001\u0000"+ - "\u0000\u0000\u01c0\u0650\u0001\u0000\u0000\u0000\u01c2\u0656\u0001\u0000"+ - "\u0000\u0000\u01c4\u01c5\u0007\u0000\u0000\u0000\u01c5\u01c6\u0007\u0001"+ - "\u0000\u0000\u01c6\u01c7\u0007\u0002\u0000\u0000\u01c7\u01c8\u0007\u0002"+ - "\u0000\u0000\u01c8\u01c9\u0007\u0003\u0000\u0000\u01c9\u01ca\u0007\u0004"+ - "\u0000\u0000\u01ca\u01cb\u0007\u0005\u0000\u0000\u01cb\u01cc\u0001\u0000"+ - "\u0000\u0000\u01cc\u01cd\u0006\u0000\u0000\u0000\u01cd\u0011\u0001\u0000"+ - "\u0000\u0000\u01ce\u01cf\u0007\u0000\u0000\u0000\u01cf\u01d0\u0007\u0006"+ - "\u0000\u0000\u01d0\u01d1\u0007\u0007\u0000\u0000\u01d1\u01d2\u0007\b\u0000"+ - "\u0000\u01d2\u01d3\u0001\u0000\u0000\u0000\u01d3\u01d4\u0006\u0001\u0001"+ - "\u0000\u01d4\u0013\u0001\u0000\u0000\u0000\u01d5\u01d6\u0007\u0003\u0000"+ - "\u0000\u01d6\u01d7\u0007\t\u0000\u0000\u01d7\u01d8\u0007\u0006\u0000\u0000"+ - "\u01d8\u01d9\u0007\u0001\u0000\u0000\u01d9\u01da\u0007\u0004\u0000\u0000"+ - "\u01da\u01db\u0007\n\u0000\u0000\u01db\u01dc\u0001\u0000\u0000\u0000\u01dc"+ - "\u01dd\u0006\u0002\u0002\u0000\u01dd\u0015\u0001\u0000\u0000\u0000\u01de"+ - "\u01df\u0007\u0003\u0000\u0000\u01df\u01e0\u0007\u000b\u0000\u0000\u01e0"+ - "\u01e1\u0007\f\u0000\u0000\u01e1\u01e2\u0007\r\u0000\u0000\u01e2\u01e3"+ - "\u0001\u0000\u0000\u0000\u01e3\u01e4\u0006\u0003\u0000\u0000\u01e4\u0017"+ - "\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0003\u0000\u0000\u01e6\u01e7"+ - "\u0007\u000e\u0000\u0000\u01e7\u01e8\u0007\b\u0000\u0000\u01e8\u01e9\u0007"+ - "\r\u0000\u0000\u01e9\u01ea\u0007\f\u0000\u0000\u01ea\u01eb\u0007\u0001"+ - "\u0000\u0000\u01eb\u01ec\u0007\t\u0000\u0000\u01ec\u01ed\u0001\u0000\u0000"+ - "\u0000\u01ed\u01ee\u0006\u0004\u0003\u0000\u01ee\u0019\u0001\u0000\u0000"+ - "\u0000\u01ef\u01f0\u0007\u000f\u0000\u0000\u01f0\u01f1\u0007\u0006\u0000"+ - "\u0000\u01f1\u01f2\u0007\u0007\u0000\u0000\u01f2\u01f3\u0007\u0010\u0000"+ - "\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000\u01f4\u01f5\u0006\u0005\u0004"+ - "\u0000\u01f5\u001b\u0001\u0000\u0000\u0000\u01f6\u01f7\u0007\u0011\u0000"+ - "\u0000\u01f7\u01f8\u0007\u0006\u0000\u0000\u01f8\u01f9\u0007\u0007\u0000"+ - "\u0000\u01f9\u01fa\u0007\u0012\u0000\u0000\u01fa\u01fb\u0001\u0000\u0000"+ - "\u0000\u01fb\u01fc\u0006\u0006\u0000\u0000\u01fc\u001d\u0001\u0000\u0000"+ - "\u0000\u01fd\u01fe\u0007\u0012\u0000\u0000\u01fe\u01ff\u0007\u0003\u0000"+ - "\u0000\u01ff\u0200\u0007\u0003\u0000\u0000\u0200\u0201\u0007\b\u0000\u0000"+ - "\u0201\u0202\u0001\u0000\u0000\u0000\u0202\u0203\u0006\u0007\u0001\u0000"+ - "\u0203\u001f\u0001\u0000\u0000\u0000\u0204\u0205\u0007\r\u0000\u0000\u0205"+ - "\u0206\u0007\u0001\u0000\u0000\u0206\u0207\u0007\u0010\u0000\u0000\u0207"+ - "\u0208\u0007\u0001\u0000\u0000\u0208\u0209\u0007\u0005\u0000\u0000\u0209"+ - "\u020a\u0001\u0000\u0000\u0000\u020a\u020b\u0006\b\u0000\u0000\u020b!"+ - "\u0001\u0000\u0000\u0000\u020c\u020d\u0007\u0010\u0000\u0000\u020d\u020e"+ - "\u0007\u000b\u0000\u0000\u020e\u020f\u0005_\u0000\u0000\u020f\u0210\u0007"+ - "\u0003\u0000\u0000\u0210\u0211\u0007\u000e\u0000\u0000\u0211\u0212\u0007"+ - "\b\u0000\u0000\u0212\u0213\u0007\f\u0000\u0000\u0213\u0214\u0007\t\u0000"+ - "\u0000\u0214\u0215\u0007\u0000\u0000\u0000\u0215\u0216\u0001\u0000\u0000"+ - "\u0000\u0216\u0217\u0006\t\u0005\u0000\u0217#\u0001\u0000\u0000\u0000"+ - "\u0218\u0219\u0007\u0006\u0000\u0000\u0219\u021a\u0007\u0003\u0000\u0000"+ - "\u021a\u021b\u0007\t\u0000\u0000\u021b\u021c\u0007\f\u0000\u0000\u021c"+ - "\u021d\u0007\u0010\u0000\u0000\u021d\u021e\u0007\u0003\u0000\u0000\u021e"+ - "\u021f\u0001\u0000\u0000\u0000\u021f\u0220\u0006\n\u0006\u0000\u0220%"+ - "\u0001\u0000\u0000\u0000\u0221\u0222\u0007\u0006\u0000\u0000\u0222\u0223"+ - "\u0007\u0007\u0000\u0000\u0223\u0224\u0007\u0013\u0000\u0000\u0224\u0225"+ - "\u0001\u0000\u0000\u0000\u0225\u0226\u0006\u000b\u0000\u0000\u0226\'\u0001"+ - "\u0000\u0000\u0000\u0227\u0228\u0007\u0002\u0000\u0000\u0228\u0229\u0007"+ - "\n\u0000\u0000\u0229\u022a\u0007\u0007\u0000\u0000\u022a\u022b\u0007\u0013"+ - "\u0000\u0000\u022b\u022c\u0001\u0000\u0000\u0000\u022c\u022d\u0006\f\u0007"+ - "\u0000\u022d)\u0001\u0000\u0000\u0000\u022e\u022f\u0007\u0002\u0000\u0000"+ - "\u022f\u0230\u0007\u0007\u0000\u0000\u0230\u0231\u0007\u0006\u0000\u0000"+ - "\u0231\u0232\u0007\u0005\u0000\u0000\u0232\u0233\u0001\u0000\u0000\u0000"+ - "\u0233\u0234\u0006\r\u0000\u0000\u0234+\u0001\u0000\u0000\u0000\u0235"+ - "\u0236\u0007\u0002\u0000\u0000\u0236\u0237\u0007\u0005\u0000\u0000\u0237"+ - "\u0238\u0007\f\u0000\u0000\u0238\u0239\u0007\u0005\u0000\u0000\u0239\u023a"+ - "\u0007\u0002\u0000\u0000\u023a\u023b\u0001\u0000\u0000\u0000\u023b\u023c"+ - "\u0006\u000e\u0000\u0000\u023c-\u0001\u0000\u0000\u0000\u023d\u023e\u0007"+ - "\u0013\u0000\u0000\u023e\u023f\u0007\n\u0000\u0000\u023f\u0240\u0007\u0003"+ - "\u0000\u0000\u0240\u0241\u0007\u0006\u0000\u0000\u0241\u0242\u0007\u0003"+ - "\u0000\u0000\u0242\u0243\u0001\u0000\u0000\u0000\u0243\u0244\u0006\u000f"+ - "\u0000\u0000\u0244/\u0001\u0000\u0000\u0000\u0245\u0246\u0004\u0010\u0000"+ - "\u0000\u0246\u0247\u0007\u0001\u0000\u0000\u0247\u0248\u0007\t\u0000\u0000"+ - "\u0248\u0249\u0007\r\u0000\u0000\u0249\u024a\u0007\u0001\u0000\u0000\u024a"+ - "\u024b\u0007\t\u0000\u0000\u024b\u024c\u0007\u0003\u0000\u0000\u024c\u024d"+ - "\u0007\u0002\u0000\u0000\u024d\u024e\u0007\u0005\u0000\u0000\u024e\u024f"+ - "\u0007\f\u0000\u0000\u024f\u0250\u0007\u0005\u0000\u0000\u0250\u0251\u0007"+ - "\u0002\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252\u0253\u0006"+ - "\u0010\u0000\u0000\u02531\u0001\u0000\u0000\u0000\u0254\u0255\u0004\u0011"+ - "\u0001\u0000\u0255\u0256\u0007\r\u0000\u0000\u0256\u0257\u0007\u0007\u0000"+ - "\u0000\u0257\u0258\u0007\u0007\u0000\u0000\u0258\u0259\u0007\u0012\u0000"+ - "\u0000\u0259\u025a\u0007\u0014\u0000\u0000\u025a\u025b\u0007\b\u0000\u0000"+ - "\u025b\u025c\u0005_\u0000\u0000\u025c\u025d\u0005\u8001\uf414\u0000\u0000"+ - "\u025d\u025e\u0001\u0000\u0000\u0000\u025e\u025f\u0006\u0011\b\u0000\u025f"+ - "3\u0001\u0000\u0000\u0000\u0260\u0261\u0004\u0012\u0002\u0000\u0261\u0262"+ - "\u0007\u0010\u0000\u0000\u0262\u0263\u0007\u0003\u0000\u0000\u0263\u0264"+ - "\u0007\u0005\u0000\u0000\u0264\u0265\u0007\u0006\u0000\u0000\u0265\u0266"+ - "\u0007\u0001\u0000\u0000\u0266\u0267\u0007\u0004\u0000\u0000\u0267\u0268"+ - "\u0007\u0002\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269\u026a"+ - "\u0006\u0012\t\u0000\u026a5\u0001\u0000\u0000\u0000\u026b\u026c\u0004"+ - "\u0013\u0003\u0000\u026c\u026d\u0007\u0015\u0000\u0000\u026d\u026e\u0007"+ - "\u0007\u0000\u0000\u026e\u026f\u0007\u0001\u0000\u0000\u026f\u0270\u0007"+ - "\t\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000\u0271\u0272\u0006\u0013"+ - "\n\u0000\u02727\u0001\u0000\u0000\u0000\u0273\u0274\u0004\u0014\u0004"+ - "\u0000\u0274\u0275\u0007\u000f\u0000\u0000\u0275\u0276\u0007\u0014\u0000"+ - "\u0000\u0276\u0277\u0007\r\u0000\u0000\u0277\u0278\u0007\r\u0000\u0000"+ - "\u0278\u0279\u0001\u0000\u0000\u0000\u0279\u027a\u0006\u0014\n\u0000\u027a"+ - "9\u0001\u0000\u0000\u0000\u027b\u027c\u0004\u0015\u0005\u0000\u027c\u027d"+ - "\u0007\r\u0000\u0000\u027d\u027e\u0007\u0003\u0000\u0000\u027e\u027f\u0007"+ - "\u000f\u0000\u0000\u027f\u0280\u0007\u0005\u0000\u0000\u0280\u0281\u0001"+ - "\u0000\u0000\u0000\u0281\u0282\u0006\u0015\n\u0000\u0282;\u0001\u0000"+ - "\u0000\u0000\u0283\u0284\u0004\u0016\u0006\u0000\u0284\u0285\u0007\u0006"+ - "\u0000\u0000\u0285\u0286\u0007\u0001\u0000\u0000\u0286\u0287\u0007\u0011"+ - "\u0000\u0000\u0287\u0288\u0007\n\u0000\u0000\u0288\u0289\u0007\u0005\u0000"+ - "\u0000\u0289\u028a\u0001\u0000\u0000\u0000\u028a\u028b\u0006\u0016\n\u0000"+ - "\u028b=\u0001\u0000\u0000\u0000\u028c\u028d\u0004\u0017\u0007\u0000\u028d"+ - "\u028e\u0007\r\u0000\u0000\u028e\u028f\u0007\u0007\u0000\u0000\u028f\u0290"+ - "\u0007\u0007\u0000\u0000\u0290\u0291\u0007\u0012\u0000\u0000\u0291\u0292"+ - "\u0007\u0014\u0000\u0000\u0292\u0293\u0007\b\u0000\u0000\u0293\u0294\u0001"+ - "\u0000\u0000\u0000\u0294\u0295\u0006\u0017\n\u0000\u0295?\u0001\u0000"+ - "\u0000\u0000\u0296\u0298\b\u0016\u0000\u0000\u0297\u0296\u0001\u0000\u0000"+ - "\u0000\u0298\u0299\u0001\u0000\u0000\u0000\u0299\u0297\u0001\u0000\u0000"+ - "\u0000\u0299\u029a\u0001\u0000\u0000\u0000\u029a\u029b\u0001\u0000\u0000"+ - "\u0000\u029b\u029c\u0006\u0018\u0000\u0000\u029cA\u0001\u0000\u0000\u0000"+ - "\u029d\u029e\u0005/\u0000\u0000\u029e\u029f\u0005/\u0000\u0000\u029f\u02a3"+ - "\u0001\u0000\u0000\u0000\u02a0\u02a2\b\u0017\u0000\u0000\u02a1\u02a0\u0001"+ - "\u0000\u0000\u0000\u02a2\u02a5\u0001\u0000\u0000\u0000\u02a3\u02a1\u0001"+ - "\u0000\u0000\u0000\u02a3\u02a4\u0001\u0000\u0000\u0000\u02a4\u02a7\u0001"+ - "\u0000\u0000\u0000\u02a5\u02a3\u0001\u0000\u0000\u0000\u02a6\u02a8\u0005"+ - "\r\u0000\u0000\u02a7\u02a6\u0001\u0000\u0000\u0000\u02a7\u02a8\u0001\u0000"+ - "\u0000\u0000\u02a8\u02aa\u0001\u0000\u0000\u0000\u02a9\u02ab\u0005\n\u0000"+ - "\u0000\u02aa\u02a9\u0001\u0000\u0000\u0000\u02aa\u02ab\u0001\u0000\u0000"+ - "\u0000\u02ab\u02ac\u0001\u0000\u0000\u0000\u02ac\u02ad\u0006\u0019\u000b"+ - "\u0000\u02adC\u0001\u0000\u0000\u0000\u02ae\u02af\u0005/\u0000\u0000\u02af"+ - "\u02b0\u0005*\u0000\u0000\u02b0\u02b5\u0001\u0000\u0000\u0000\u02b1\u02b4"+ - "\u0003D\u001a\u0000\u02b2\u02b4\t\u0000\u0000\u0000\u02b3\u02b1\u0001"+ - "\u0000\u0000\u0000\u02b3\u02b2\u0001\u0000\u0000\u0000\u02b4\u02b7\u0001"+ - "\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000\u0000\u0000\u02b5\u02b3\u0001"+ - "\u0000\u0000\u0000\u02b6\u02b8\u0001\u0000\u0000\u0000\u02b7\u02b5\u0001"+ - "\u0000\u0000\u0000\u02b8\u02b9\u0005*\u0000\u0000\u02b9\u02ba\u0005/\u0000"+ - "\u0000\u02ba\u02bb\u0001\u0000\u0000\u0000\u02bb\u02bc\u0006\u001a\u000b"+ - "\u0000\u02bcE\u0001\u0000\u0000\u0000\u02bd\u02bf\u0007\u0018\u0000\u0000"+ - "\u02be\u02bd\u0001\u0000\u0000\u0000\u02bf\u02c0\u0001\u0000\u0000\u0000"+ - "\u02c0\u02be\u0001\u0000\u0000\u0000\u02c0\u02c1\u0001\u0000\u0000\u0000"+ - "\u02c1\u02c2\u0001\u0000\u0000\u0000\u02c2\u02c3\u0006\u001b\u000b\u0000"+ - "\u02c3G\u0001\u0000\u0000\u0000\u02c4\u02c5\u0005|\u0000\u0000\u02c5\u02c6"+ - "\u0001\u0000\u0000\u0000\u02c6\u02c7\u0006\u001c\f\u0000\u02c7I\u0001"+ - "\u0000\u0000\u0000\u02c8\u02c9\u0007\u0019\u0000\u0000\u02c9K\u0001\u0000"+ - "\u0000\u0000\u02ca\u02cb\u0007\u001a\u0000\u0000\u02cbM\u0001\u0000\u0000"+ - "\u0000\u02cc\u02cd\u0005\\\u0000\u0000\u02cd\u02ce\u0007\u001b\u0000\u0000"+ - "\u02ceO\u0001\u0000\u0000\u0000\u02cf\u02d0\b\u001c\u0000\u0000\u02d0"+ - "Q\u0001\u0000\u0000\u0000\u02d1\u02d3\u0007\u0003\u0000\u0000\u02d2\u02d4"+ - "\u0007\u001d\u0000\u0000\u02d3\u02d2\u0001\u0000\u0000\u0000\u02d3\u02d4"+ - "\u0001\u0000\u0000\u0000\u02d4\u02d6\u0001\u0000\u0000\u0000\u02d5\u02d7"+ - "\u0003J\u001d\u0000\u02d6\u02d5\u0001\u0000\u0000\u0000\u02d7\u02d8\u0001"+ - "\u0000\u0000\u0000\u02d8\u02d6\u0001\u0000\u0000\u0000\u02d8\u02d9\u0001"+ - "\u0000\u0000\u0000\u02d9S\u0001\u0000\u0000\u0000\u02da\u02db\u0005@\u0000"+ - "\u0000\u02dbU\u0001\u0000\u0000\u0000\u02dc\u02dd\u0005`\u0000\u0000\u02dd"+ - "W\u0001\u0000\u0000\u0000\u02de\u02e2\b\u001e\u0000\u0000\u02df\u02e0"+ - "\u0005`\u0000\u0000\u02e0\u02e2\u0005`\u0000\u0000\u02e1\u02de\u0001\u0000"+ - "\u0000\u0000\u02e1\u02df\u0001\u0000\u0000\u0000\u02e2Y\u0001\u0000\u0000"+ - "\u0000\u02e3\u02e4\u0005_\u0000\u0000\u02e4[\u0001\u0000\u0000\u0000\u02e5"+ - "\u02e9\u0003L\u001e\u0000\u02e6\u02e9\u0003J\u001d\u0000\u02e7\u02e9\u0003"+ - "Z%\u0000\u02e8\u02e5\u0001\u0000\u0000\u0000\u02e8\u02e6\u0001\u0000\u0000"+ - "\u0000\u02e8\u02e7\u0001\u0000\u0000\u0000\u02e9]\u0001\u0000\u0000\u0000"+ - "\u02ea\u02ef\u0005\"\u0000\u0000\u02eb\u02ee\u0003N\u001f\u0000\u02ec"+ - "\u02ee\u0003P \u0000\u02ed\u02eb\u0001\u0000\u0000\u0000\u02ed\u02ec\u0001"+ - "\u0000\u0000\u0000\u02ee\u02f1\u0001\u0000\u0000\u0000\u02ef\u02ed\u0001"+ - "\u0000\u0000\u0000\u02ef\u02f0\u0001\u0000\u0000\u0000\u02f0\u02f2\u0001"+ - "\u0000\u0000\u0000\u02f1\u02ef\u0001\u0000\u0000\u0000\u02f2\u0308\u0005"+ + "\u0000\u0010\u01c2\u0001\u0000\u0000\u0000\u0012\u01cc\u0001\u0000\u0000"+ + "\u0000\u0014\u01d3\u0001\u0000\u0000\u0000\u0016\u01dc\u0001\u0000\u0000"+ + "\u0000\u0018\u01e3\u0001\u0000\u0000\u0000\u001a\u01ed\u0001\u0000\u0000"+ + "\u0000\u001c\u01f4\u0001\u0000\u0000\u0000\u001e\u01fb\u0001\u0000\u0000"+ + "\u0000 \u0202\u0001\u0000\u0000\u0000\"\u020a\u0001\u0000\u0000\u0000"+ + "$\u0216\u0001\u0000\u0000\u0000&\u021f\u0001\u0000\u0000\u0000(\u0225"+ + "\u0001\u0000\u0000\u0000*\u022c\u0001\u0000\u0000\u0000,\u0233\u0001\u0000"+ + "\u0000\u0000.\u023b\u0001\u0000\u0000\u00000\u0243\u0001\u0000\u0000\u0000"+ + "2\u024c\u0001\u0000\u0000\u00004\u025b\u0001\u0000\u0000\u00006\u0267"+ + "\u0001\u0000\u0000\u00008\u0272\u0001\u0000\u0000\u0000:\u027a\u0001\u0000"+ + "\u0000\u0000<\u0282\u0001\u0000\u0000\u0000>\u028c\u0001\u0000\u0000\u0000"+ + "@\u0292\u0001\u0000\u0000\u0000B\u02a3\u0001\u0000\u0000\u0000D\u02b3"+ + "\u0001\u0000\u0000\u0000F\u02b9\u0001\u0000\u0000\u0000H\u02bd\u0001\u0000"+ + "\u0000\u0000J\u02bf\u0001\u0000\u0000\u0000L\u02c1\u0001\u0000\u0000\u0000"+ + "N\u02c4\u0001\u0000\u0000\u0000P\u02c6\u0001\u0000\u0000\u0000R\u02cf"+ + "\u0001\u0000\u0000\u0000T\u02d1\u0001\u0000\u0000\u0000V\u02d6\u0001\u0000"+ + "\u0000\u0000X\u02d8\u0001\u0000\u0000\u0000Z\u02dd\u0001\u0000\u0000\u0000"+ + "\\\u02fc\u0001\u0000\u0000\u0000^\u02ff\u0001\u0000\u0000\u0000`\u032d"+ + "\u0001\u0000\u0000\u0000b\u032f\u0001\u0000\u0000\u0000d\u0332\u0001\u0000"+ + "\u0000\u0000f\u0336\u0001\u0000\u0000\u0000h\u033a\u0001\u0000\u0000\u0000"+ + "j\u033c\u0001\u0000\u0000\u0000l\u033f\u0001\u0000\u0000\u0000n\u0341"+ + "\u0001\u0000\u0000\u0000p\u0343\u0001\u0000\u0000\u0000r\u0348\u0001\u0000"+ + "\u0000\u0000t\u034a\u0001\u0000\u0000\u0000v\u0350\u0001\u0000\u0000\u0000"+ + "x\u0356\u0001\u0000\u0000\u0000z\u0359\u0001\u0000\u0000\u0000|\u035c"+ + "\u0001\u0000\u0000\u0000~\u0361\u0001\u0000\u0000\u0000\u0080\u0366\u0001"+ + "\u0000\u0000\u0000\u0082\u0368\u0001\u0000\u0000\u0000\u0084\u036c\u0001"+ + "\u0000\u0000\u0000\u0086\u0371\u0001\u0000\u0000\u0000\u0088\u0377\u0001"+ + "\u0000\u0000\u0000\u008a\u037a\u0001\u0000\u0000\u0000\u008c\u037c\u0001"+ + "\u0000\u0000\u0000\u008e\u0382\u0001\u0000\u0000\u0000\u0090\u0384\u0001"+ + "\u0000\u0000\u0000\u0092\u0389\u0001\u0000\u0000\u0000\u0094\u038c\u0001"+ + "\u0000\u0000\u0000\u0096\u038f\u0001\u0000\u0000\u0000\u0098\u0392\u0001"+ + "\u0000\u0000\u0000\u009a\u0394\u0001\u0000\u0000\u0000\u009c\u0397\u0001"+ + "\u0000\u0000\u0000\u009e\u0399\u0001\u0000\u0000\u0000\u00a0\u039c\u0001"+ + "\u0000\u0000\u0000\u00a2\u039e\u0001\u0000\u0000\u0000\u00a4\u03a0\u0001"+ + "\u0000\u0000\u0000\u00a6\u03a2\u0001\u0000\u0000\u0000\u00a8\u03a4\u0001"+ + "\u0000\u0000\u0000\u00aa\u03a6\u0001\u0000\u0000\u0000\u00ac\u03a8\u0001"+ + "\u0000\u0000\u0000\u00ae\u03aa\u0001\u0000\u0000\u0000\u00b0\u03bf\u0001"+ + "\u0000\u0000\u0000\u00b2\u03c1\u0001\u0000\u0000\u0000\u00b4\u03c6\u0001"+ + "\u0000\u0000\u0000\u00b6\u03db\u0001\u0000\u0000\u0000\u00b8\u03dd\u0001"+ + "\u0000\u0000\u0000\u00ba\u03e5\u0001\u0000\u0000\u0000\u00bc\u03e7\u0001"+ + "\u0000\u0000\u0000\u00be\u03eb\u0001\u0000\u0000\u0000\u00c0\u03ef\u0001"+ + "\u0000\u0000\u0000\u00c2\u03f3\u0001\u0000\u0000\u0000\u00c4\u03f8\u0001"+ + "\u0000\u0000\u0000\u00c6\u03fd\u0001\u0000\u0000\u0000\u00c8\u0401\u0001"+ + "\u0000\u0000\u0000\u00ca\u0405\u0001\u0000\u0000\u0000\u00cc\u0409\u0001"+ + "\u0000\u0000\u0000\u00ce\u040e\u0001\u0000\u0000\u0000\u00d0\u0412\u0001"+ + "\u0000\u0000\u0000\u00d2\u0416\u0001\u0000\u0000\u0000\u00d4\u041a\u0001"+ + "\u0000\u0000\u0000\u00d6\u041e\u0001\u0000\u0000\u0000\u00d8\u0422\u0001"+ + "\u0000\u0000\u0000\u00da\u042e\u0001\u0000\u0000\u0000\u00dc\u0431\u0001"+ + "\u0000\u0000\u0000\u00de\u0435\u0001\u0000\u0000\u0000\u00e0\u0439\u0001"+ + "\u0000\u0000\u0000\u00e2\u043d\u0001\u0000\u0000\u0000\u00e4\u0441\u0001"+ + "\u0000\u0000\u0000\u00e6\u0445\u0001\u0000\u0000\u0000\u00e8\u0449\u0001"+ + "\u0000\u0000\u0000\u00ea\u044e\u0001\u0000\u0000\u0000\u00ec\u0452\u0001"+ + "\u0000\u0000\u0000\u00ee\u0456\u0001\u0000\u0000\u0000\u00f0\u045b\u0001"+ + "\u0000\u0000\u0000\u00f2\u0464\u0001\u0000\u0000\u0000\u00f4\u0479\u0001"+ + "\u0000\u0000\u0000\u00f6\u047d\u0001\u0000\u0000\u0000\u00f8\u0481\u0001"+ + "\u0000\u0000\u0000\u00fa\u0485\u0001\u0000\u0000\u0000\u00fc\u0489\u0001"+ + "\u0000\u0000\u0000\u00fe\u048d\u0001\u0000\u0000\u0000\u0100\u0492\u0001"+ + "\u0000\u0000\u0000\u0102\u0496\u0001\u0000\u0000\u0000\u0104\u049a\u0001"+ + "\u0000\u0000\u0000\u0106\u049e\u0001\u0000\u0000\u0000\u0108\u04a3\u0001"+ + "\u0000\u0000\u0000\u010a\u04a8\u0001\u0000\u0000\u0000\u010c\u04ab\u0001"+ + "\u0000\u0000\u0000\u010e\u04af\u0001\u0000\u0000\u0000\u0110\u04b3\u0001"+ + "\u0000\u0000\u0000\u0112\u04b7\u0001\u0000\u0000\u0000\u0114\u04bb\u0001"+ + "\u0000\u0000\u0000\u0116\u04c0\u0001\u0000\u0000\u0000\u0118\u04c5\u0001"+ + "\u0000\u0000\u0000\u011a\u04ca\u0001\u0000\u0000\u0000\u011c\u04d1\u0001"+ + "\u0000\u0000\u0000\u011e\u04da\u0001\u0000\u0000\u0000\u0120\u04e1\u0001"+ + "\u0000\u0000\u0000\u0122\u04e5\u0001\u0000\u0000\u0000\u0124\u04e9\u0001"+ + "\u0000\u0000\u0000\u0126\u04ed\u0001\u0000\u0000\u0000\u0128\u04f1\u0001"+ + "\u0000\u0000\u0000\u012a\u04f7\u0001\u0000\u0000\u0000\u012c\u04fb\u0001"+ + "\u0000\u0000\u0000\u012e\u04ff\u0001\u0000\u0000\u0000\u0130\u0503\u0001"+ + "\u0000\u0000\u0000\u0132\u0507\u0001\u0000\u0000\u0000\u0134\u050b\u0001"+ + "\u0000\u0000\u0000\u0136\u050f\u0001\u0000\u0000\u0000\u0138\u0514\u0001"+ + "\u0000\u0000\u0000\u013a\u0519\u0001\u0000\u0000\u0000\u013c\u051d\u0001"+ + "\u0000\u0000\u0000\u013e\u0521\u0001\u0000\u0000\u0000\u0140\u0525\u0001"+ + "\u0000\u0000\u0000\u0142\u052a\u0001\u0000\u0000\u0000\u0144\u052e\u0001"+ + "\u0000\u0000\u0000\u0146\u0533\u0001\u0000\u0000\u0000\u0148\u0538\u0001"+ + "\u0000\u0000\u0000\u014a\u053c\u0001\u0000\u0000\u0000\u014c\u0540\u0001"+ + "\u0000\u0000\u0000\u014e\u0544\u0001\u0000\u0000\u0000\u0150\u0548\u0001"+ + "\u0000\u0000\u0000\u0152\u054c\u0001\u0000\u0000\u0000\u0154\u0551\u0001"+ + "\u0000\u0000\u0000\u0156\u0556\u0001\u0000\u0000\u0000\u0158\u055a\u0001"+ + "\u0000\u0000\u0000\u015a\u055e\u0001\u0000\u0000\u0000\u015c\u0562\u0001"+ + "\u0000\u0000\u0000\u015e\u0567\u0001\u0000\u0000\u0000\u0160\u0570\u0001"+ + "\u0000\u0000\u0000\u0162\u0574\u0001\u0000\u0000\u0000\u0164\u0578\u0001"+ + "\u0000\u0000\u0000\u0166\u057c\u0001\u0000\u0000\u0000\u0168\u0580\u0001"+ + "\u0000\u0000\u0000\u016a\u0585\u0001\u0000\u0000\u0000\u016c\u0589\u0001"+ + "\u0000\u0000\u0000\u016e\u058d\u0001\u0000\u0000\u0000\u0170\u0591\u0001"+ + "\u0000\u0000\u0000\u0172\u0596\u0001\u0000\u0000\u0000\u0174\u059a\u0001"+ + "\u0000\u0000\u0000\u0176\u059e\u0001\u0000\u0000\u0000\u0178\u05a2\u0001"+ + "\u0000\u0000\u0000\u017a\u05a6\u0001\u0000\u0000\u0000\u017c\u05aa\u0001"+ + "\u0000\u0000\u0000\u017e\u05b0\u0001\u0000\u0000\u0000\u0180\u05b4\u0001"+ + "\u0000\u0000\u0000\u0182\u05b8\u0001\u0000\u0000\u0000\u0184\u05bc\u0001"+ + "\u0000\u0000\u0000\u0186\u05c0\u0001\u0000\u0000\u0000\u0188\u05c4\u0001"+ + "\u0000\u0000\u0000\u018a\u05c8\u0001\u0000\u0000\u0000\u018c\u05cd\u0001"+ + "\u0000\u0000\u0000\u018e\u05d2\u0001\u0000\u0000\u0000\u0190\u05d6\u0001"+ + "\u0000\u0000\u0000\u0192\u05dc\u0001\u0000\u0000\u0000\u0194\u05e5\u0001"+ + "\u0000\u0000\u0000\u0196\u05e9\u0001\u0000\u0000\u0000\u0198\u05ed\u0001"+ + "\u0000\u0000\u0000\u019a\u05f1\u0001\u0000\u0000\u0000\u019c\u05f5\u0001"+ + "\u0000\u0000\u0000\u019e\u05f9\u0001\u0000\u0000\u0000\u01a0\u05fd\u0001"+ + "\u0000\u0000\u0000\u01a2\u0601\u0001\u0000\u0000\u0000\u01a4\u0605\u0001"+ + "\u0000\u0000\u0000\u01a6\u060a\u0001\u0000\u0000\u0000\u01a8\u0610\u0001"+ + "\u0000\u0000\u0000\u01aa\u0616\u0001\u0000\u0000\u0000\u01ac\u061a\u0001"+ + "\u0000\u0000\u0000\u01ae\u061e\u0001\u0000\u0000\u0000\u01b0\u0622\u0001"+ + "\u0000\u0000\u0000\u01b2\u0628\u0001\u0000\u0000\u0000\u01b4\u062e\u0001"+ + "\u0000\u0000\u0000\u01b6\u0632\u0001\u0000\u0000\u0000\u01b8\u0636\u0001"+ + "\u0000\u0000\u0000\u01ba\u063a\u0001\u0000\u0000\u0000\u01bc\u0640\u0001"+ + "\u0000\u0000\u0000\u01be\u0646\u0001\u0000\u0000\u0000\u01c0\u064c\u0001"+ + "\u0000\u0000\u0000\u01c2\u01c3\u0007\u0000\u0000\u0000\u01c3\u01c4\u0007"+ + "\u0001\u0000\u0000\u01c4\u01c5\u0007\u0002\u0000\u0000\u01c5\u01c6\u0007"+ + "\u0002\u0000\u0000\u01c6\u01c7\u0007\u0003\u0000\u0000\u01c7\u01c8\u0007"+ + "\u0004\u0000\u0000\u01c8\u01c9\u0007\u0005\u0000\u0000\u01c9\u01ca\u0001"+ + "\u0000\u0000\u0000\u01ca\u01cb\u0006\u0000\u0000\u0000\u01cb\u0011\u0001"+ + "\u0000\u0000\u0000\u01cc\u01cd\u0007\u0000\u0000\u0000\u01cd\u01ce\u0007"+ + "\u0006\u0000\u0000\u01ce\u01cf\u0007\u0007\u0000\u0000\u01cf\u01d0\u0007"+ + "\b\u0000\u0000\u01d0\u01d1\u0001\u0000\u0000\u0000\u01d1\u01d2\u0006\u0001"+ + "\u0001\u0000\u01d2\u0013\u0001\u0000\u0000\u0000\u01d3\u01d4\u0007\u0003"+ + "\u0000\u0000\u01d4\u01d5\u0007\t\u0000\u0000\u01d5\u01d6\u0007\u0006\u0000"+ + "\u0000\u01d6\u01d7\u0007\u0001\u0000\u0000\u01d7\u01d8\u0007\u0004\u0000"+ + "\u0000\u01d8\u01d9\u0007\n\u0000\u0000\u01d9\u01da\u0001\u0000\u0000\u0000"+ + "\u01da\u01db\u0006\u0002\u0002\u0000\u01db\u0015\u0001\u0000\u0000\u0000"+ + "\u01dc\u01dd\u0007\u0003\u0000\u0000\u01dd\u01de\u0007\u000b\u0000\u0000"+ + "\u01de\u01df\u0007\f\u0000\u0000\u01df\u01e0\u0007\r\u0000\u0000\u01e0"+ + "\u01e1\u0001\u0000\u0000\u0000\u01e1\u01e2\u0006\u0003\u0000\u0000\u01e2"+ + "\u0017\u0001\u0000\u0000\u0000\u01e3\u01e4\u0007\u0003\u0000\u0000\u01e4"+ + "\u01e5\u0007\u000e\u0000\u0000\u01e5\u01e6\u0007\b\u0000\u0000\u01e6\u01e7"+ + "\u0007\r\u0000\u0000\u01e7\u01e8\u0007\f\u0000\u0000\u01e8\u01e9\u0007"+ + "\u0001\u0000\u0000\u01e9\u01ea\u0007\t\u0000\u0000\u01ea\u01eb\u0001\u0000"+ + "\u0000\u0000\u01eb\u01ec\u0006\u0004\u0003\u0000\u01ec\u0019\u0001\u0000"+ + "\u0000\u0000\u01ed\u01ee\u0007\u000f\u0000\u0000\u01ee\u01ef\u0007\u0006"+ + "\u0000\u0000\u01ef\u01f0\u0007\u0007\u0000\u0000\u01f0\u01f1\u0007\u0010"+ + "\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2\u01f3\u0006\u0005"+ + "\u0004\u0000\u01f3\u001b\u0001\u0000\u0000\u0000\u01f4\u01f5\u0007\u0011"+ + "\u0000\u0000\u01f5\u01f6\u0007\u0006\u0000\u0000\u01f6\u01f7\u0007\u0007"+ + "\u0000\u0000\u01f7\u01f8\u0007\u0012\u0000\u0000\u01f8\u01f9\u0001\u0000"+ + "\u0000\u0000\u01f9\u01fa\u0006\u0006\u0000\u0000\u01fa\u001d\u0001\u0000"+ + "\u0000\u0000\u01fb\u01fc\u0007\u0012\u0000\u0000\u01fc\u01fd\u0007\u0003"+ + "\u0000\u0000\u01fd\u01fe\u0007\u0003\u0000\u0000\u01fe\u01ff\u0007\b\u0000"+ + "\u0000\u01ff\u0200\u0001\u0000\u0000\u0000\u0200\u0201\u0006\u0007\u0001"+ + "\u0000\u0201\u001f\u0001\u0000\u0000\u0000\u0202\u0203\u0007\r\u0000\u0000"+ + "\u0203\u0204\u0007\u0001\u0000\u0000\u0204\u0205\u0007\u0010\u0000\u0000"+ + "\u0205\u0206\u0007\u0001\u0000\u0000\u0206\u0207\u0007\u0005\u0000\u0000"+ + "\u0207\u0208\u0001\u0000\u0000\u0000\u0208\u0209\u0006\b\u0000\u0000\u0209"+ + "!\u0001\u0000\u0000\u0000\u020a\u020b\u0007\u0010\u0000\u0000\u020b\u020c"+ + "\u0007\u000b\u0000\u0000\u020c\u020d\u0005_\u0000\u0000\u020d\u020e\u0007"+ + "\u0003\u0000\u0000\u020e\u020f\u0007\u000e\u0000\u0000\u020f\u0210\u0007"+ + "\b\u0000\u0000\u0210\u0211\u0007\f\u0000\u0000\u0211\u0212\u0007\t\u0000"+ + "\u0000\u0212\u0213\u0007\u0000\u0000\u0000\u0213\u0214\u0001\u0000\u0000"+ + "\u0000\u0214\u0215\u0006\t\u0005\u0000\u0215#\u0001\u0000\u0000\u0000"+ + "\u0216\u0217\u0007\u0006\u0000\u0000\u0217\u0218\u0007\u0003\u0000\u0000"+ + "\u0218\u0219\u0007\t\u0000\u0000\u0219\u021a\u0007\f\u0000\u0000\u021a"+ + "\u021b\u0007\u0010\u0000\u0000\u021b\u021c\u0007\u0003\u0000\u0000\u021c"+ + "\u021d\u0001\u0000\u0000\u0000\u021d\u021e\u0006\n\u0006\u0000\u021e%"+ + "\u0001\u0000\u0000\u0000\u021f\u0220\u0007\u0006\u0000\u0000\u0220\u0221"+ + "\u0007\u0007\u0000\u0000\u0221\u0222\u0007\u0013\u0000\u0000\u0222\u0223"+ + "\u0001\u0000\u0000\u0000\u0223\u0224\u0006\u000b\u0000\u0000\u0224\'\u0001"+ + "\u0000\u0000\u0000\u0225\u0226\u0007\u0002\u0000\u0000\u0226\u0227\u0007"+ + "\n\u0000\u0000\u0227\u0228\u0007\u0007\u0000\u0000\u0228\u0229\u0007\u0013"+ + "\u0000\u0000\u0229\u022a\u0001\u0000\u0000\u0000\u022a\u022b\u0006\f\u0007"+ + "\u0000\u022b)\u0001\u0000\u0000\u0000\u022c\u022d\u0007\u0002\u0000\u0000"+ + "\u022d\u022e\u0007\u0007\u0000\u0000\u022e\u022f\u0007\u0006\u0000\u0000"+ + "\u022f\u0230\u0007\u0005\u0000\u0000\u0230\u0231\u0001\u0000\u0000\u0000"+ + "\u0231\u0232\u0006\r\u0000\u0000\u0232+\u0001\u0000\u0000\u0000\u0233"+ + "\u0234\u0007\u0002\u0000\u0000\u0234\u0235\u0007\u0005\u0000\u0000\u0235"+ + "\u0236\u0007\f\u0000\u0000\u0236\u0237\u0007\u0005\u0000\u0000\u0237\u0238"+ + "\u0007\u0002\u0000\u0000\u0238\u0239\u0001\u0000\u0000\u0000\u0239\u023a"+ + "\u0006\u000e\u0000\u0000\u023a-\u0001\u0000\u0000\u0000\u023b\u023c\u0007"+ + "\u0013\u0000\u0000\u023c\u023d\u0007\n\u0000\u0000\u023d\u023e\u0007\u0003"+ + "\u0000\u0000\u023e\u023f\u0007\u0006\u0000\u0000\u023f\u0240\u0007\u0003"+ + "\u0000\u0000\u0240\u0241\u0001\u0000\u0000\u0000\u0241\u0242\u0006\u000f"+ + "\u0000\u0000\u0242/\u0001\u0000\u0000\u0000\u0243\u0244\u0007\r\u0000"+ + "\u0000\u0244\u0245\u0007\u0007\u0000\u0000\u0245\u0246\u0007\u0007\u0000"+ + "\u0000\u0246\u0247\u0007\u0012\u0000\u0000\u0247\u0248\u0007\u0014\u0000"+ + "\u0000\u0248\u0249\u0007\b\u0000\u0000\u0249\u024a\u0001\u0000\u0000\u0000"+ + "\u024a\u024b\u0006\u0010\b\u0000\u024b1\u0001\u0000\u0000\u0000\u024c"+ + "\u024d\u0004\u0011\u0000\u0000\u024d\u024e\u0007\u0001\u0000\u0000\u024e"+ + "\u024f\u0007\t\u0000\u0000\u024f\u0250\u0007\r\u0000\u0000\u0250\u0251"+ + "\u0007\u0001\u0000\u0000\u0251\u0252\u0007\t\u0000\u0000\u0252\u0253\u0007"+ + "\u0003\u0000\u0000\u0253\u0254\u0007\u0002\u0000\u0000\u0254\u0255\u0007"+ + "\u0005\u0000\u0000\u0255\u0256\u0007\f\u0000\u0000\u0256\u0257\u0007\u0005"+ + "\u0000\u0000\u0257\u0258\u0007\u0002\u0000\u0000\u0258\u0259\u0001\u0000"+ + "\u0000\u0000\u0259\u025a\u0006\u0011\u0000\u0000\u025a3\u0001\u0000\u0000"+ + "\u0000\u025b\u025c\u0004\u0012\u0001\u0000\u025c\u025d\u0007\r\u0000\u0000"+ + "\u025d\u025e\u0007\u0007\u0000\u0000\u025e\u025f\u0007\u0007\u0000\u0000"+ + "\u025f\u0260\u0007\u0012\u0000\u0000\u0260\u0261\u0007\u0014\u0000\u0000"+ + "\u0261\u0262\u0007\b\u0000\u0000\u0262\u0263\u0005_\u0000\u0000\u0263"+ + "\u0264\u0005\u8001\uf414\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000"+ + "\u0265\u0266\u0006\u0012\t\u0000\u02665\u0001\u0000\u0000\u0000\u0267"+ + "\u0268\u0004\u0013\u0002\u0000\u0268\u0269\u0007\u0010\u0000\u0000\u0269"+ + "\u026a\u0007\u0003\u0000\u0000\u026a\u026b\u0007\u0005\u0000\u0000\u026b"+ + "\u026c\u0007\u0006\u0000\u0000\u026c\u026d\u0007\u0001\u0000\u0000\u026d"+ + "\u026e\u0007\u0004\u0000\u0000\u026e\u026f\u0007\u0002\u0000\u0000\u026f"+ + "\u0270\u0001\u0000\u0000\u0000\u0270\u0271\u0006\u0013\n\u0000\u02717"+ + "\u0001\u0000\u0000\u0000\u0272\u0273\u0004\u0014\u0003\u0000\u0273\u0274"+ + "\u0007\u000f\u0000\u0000\u0274\u0275\u0007\u0014\u0000\u0000\u0275\u0276"+ + "\u0007\r\u0000\u0000\u0276\u0277\u0007\r\u0000\u0000\u0277\u0278\u0001"+ + "\u0000\u0000\u0000\u0278\u0279\u0006\u0014\b\u0000\u02799\u0001\u0000"+ + "\u0000\u0000\u027a\u027b\u0004\u0015\u0004\u0000\u027b\u027c\u0007\r\u0000"+ + "\u0000\u027c\u027d\u0007\u0003\u0000\u0000\u027d\u027e\u0007\u000f\u0000"+ + "\u0000\u027e\u027f\u0007\u0005\u0000\u0000\u027f\u0280\u0001\u0000\u0000"+ + "\u0000\u0280\u0281\u0006\u0015\b\u0000\u0281;\u0001\u0000\u0000\u0000"+ + "\u0282\u0283\u0004\u0016\u0005\u0000\u0283\u0284\u0007\u0006\u0000\u0000"+ + "\u0284\u0285\u0007\u0001\u0000\u0000\u0285\u0286\u0007\u0011\u0000\u0000"+ + "\u0286\u0287\u0007\n\u0000\u0000\u0287\u0288\u0007\u0005\u0000\u0000\u0288"+ + "\u0289\u0001\u0000\u0000\u0000\u0289\u028a\u0006\u0016\b\u0000\u028a="+ + "\u0001\u0000\u0000\u0000\u028b\u028d\b\u0015\u0000\u0000\u028c\u028b\u0001"+ + "\u0000\u0000\u0000\u028d\u028e\u0001\u0000\u0000\u0000\u028e\u028c\u0001"+ + "\u0000\u0000\u0000\u028e\u028f\u0001\u0000\u0000\u0000\u028f\u0290\u0001"+ + "\u0000\u0000\u0000\u0290\u0291\u0006\u0017\u0000\u0000\u0291?\u0001\u0000"+ + "\u0000\u0000\u0292\u0293\u0005/\u0000\u0000\u0293\u0294\u0005/\u0000\u0000"+ + "\u0294\u0298\u0001\u0000\u0000\u0000\u0295\u0297\b\u0016\u0000\u0000\u0296"+ + "\u0295\u0001\u0000\u0000\u0000\u0297\u029a\u0001\u0000\u0000\u0000\u0298"+ + "\u0296\u0001\u0000\u0000\u0000\u0298\u0299\u0001\u0000\u0000\u0000\u0299"+ + "\u029c\u0001\u0000\u0000\u0000\u029a\u0298\u0001\u0000\u0000\u0000\u029b"+ + "\u029d\u0005\r\u0000\u0000\u029c\u029b\u0001\u0000\u0000\u0000\u029c\u029d"+ + "\u0001\u0000\u0000\u0000\u029d\u029f\u0001\u0000\u0000\u0000\u029e\u02a0"+ + "\u0005\n\u0000\u0000\u029f\u029e\u0001\u0000\u0000\u0000\u029f\u02a0\u0001"+ + "\u0000\u0000\u0000\u02a0\u02a1\u0001\u0000\u0000\u0000\u02a1\u02a2\u0006"+ + "\u0018\u000b\u0000\u02a2A\u0001\u0000\u0000\u0000\u02a3\u02a4\u0005/\u0000"+ + "\u0000\u02a4\u02a5\u0005*\u0000\u0000\u02a5\u02aa\u0001\u0000\u0000\u0000"+ + "\u02a6\u02a9\u0003B\u0019\u0000\u02a7\u02a9\t\u0000\u0000\u0000\u02a8"+ + "\u02a6\u0001\u0000\u0000\u0000\u02a8\u02a7\u0001\u0000\u0000\u0000\u02a9"+ + "\u02ac\u0001\u0000\u0000\u0000\u02aa\u02ab\u0001\u0000\u0000\u0000\u02aa"+ + "\u02a8\u0001\u0000\u0000\u0000\u02ab\u02ad\u0001\u0000\u0000\u0000\u02ac"+ + "\u02aa\u0001\u0000\u0000\u0000\u02ad\u02ae\u0005*\u0000\u0000\u02ae\u02af"+ + "\u0005/\u0000\u0000\u02af\u02b0\u0001\u0000\u0000\u0000\u02b0\u02b1\u0006"+ + "\u0019\u000b\u0000\u02b1C\u0001\u0000\u0000\u0000\u02b2\u02b4\u0007\u0017"+ + "\u0000\u0000\u02b3\u02b2\u0001\u0000\u0000\u0000\u02b4\u02b5\u0001\u0000"+ + "\u0000\u0000\u02b5\u02b3\u0001\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000"+ + "\u0000\u0000\u02b6\u02b7\u0001\u0000\u0000\u0000\u02b7\u02b8\u0006\u001a"+ + "\u000b\u0000\u02b8E\u0001\u0000\u0000\u0000\u02b9\u02ba\u0005|\u0000\u0000"+ + "\u02ba\u02bb\u0001\u0000\u0000\u0000\u02bb\u02bc\u0006\u001b\f\u0000\u02bc"+ + "G\u0001\u0000\u0000\u0000\u02bd\u02be\u0007\u0018\u0000\u0000\u02beI\u0001"+ + "\u0000\u0000\u0000\u02bf\u02c0\u0007\u0019\u0000\u0000\u02c0K\u0001\u0000"+ + "\u0000\u0000\u02c1\u02c2\u0005\\\u0000\u0000\u02c2\u02c3\u0007\u001a\u0000"+ + "\u0000\u02c3M\u0001\u0000\u0000\u0000\u02c4\u02c5\b\u001b\u0000\u0000"+ + "\u02c5O\u0001\u0000\u0000\u0000\u02c6\u02c8\u0007\u0003\u0000\u0000\u02c7"+ + "\u02c9\u0007\u001c\u0000\u0000\u02c8\u02c7\u0001\u0000\u0000\u0000\u02c8"+ + "\u02c9\u0001\u0000\u0000\u0000\u02c9\u02cb\u0001\u0000\u0000\u0000\u02ca"+ + "\u02cc\u0003H\u001c\u0000\u02cb\u02ca\u0001\u0000\u0000\u0000\u02cc\u02cd"+ + "\u0001\u0000\u0000\u0000\u02cd\u02cb\u0001\u0000\u0000\u0000\u02cd\u02ce"+ + "\u0001\u0000\u0000\u0000\u02ceQ\u0001\u0000\u0000\u0000\u02cf\u02d0\u0005"+ + "@\u0000\u0000\u02d0S\u0001\u0000\u0000\u0000\u02d1\u02d2\u0005`\u0000"+ + "\u0000\u02d2U\u0001\u0000\u0000\u0000\u02d3\u02d7\b\u001d\u0000\u0000"+ + "\u02d4\u02d5\u0005`\u0000\u0000\u02d5\u02d7\u0005`\u0000\u0000\u02d6\u02d3"+ + "\u0001\u0000\u0000\u0000\u02d6\u02d4\u0001\u0000\u0000\u0000\u02d7W\u0001"+ + "\u0000\u0000\u0000\u02d8\u02d9\u0005_\u0000\u0000\u02d9Y\u0001\u0000\u0000"+ + "\u0000\u02da\u02de\u0003J\u001d\u0000\u02db\u02de\u0003H\u001c\u0000\u02dc"+ + "\u02de\u0003X$\u0000\u02dd\u02da\u0001\u0000\u0000\u0000\u02dd\u02db\u0001"+ + "\u0000\u0000\u0000\u02dd\u02dc\u0001\u0000\u0000\u0000\u02de[\u0001\u0000"+ + "\u0000\u0000\u02df\u02e4\u0005\"\u0000\u0000\u02e0\u02e3\u0003L\u001e"+ + "\u0000\u02e1\u02e3\u0003N\u001f\u0000\u02e2\u02e0\u0001\u0000\u0000\u0000"+ + "\u02e2\u02e1\u0001\u0000\u0000\u0000\u02e3\u02e6\u0001\u0000\u0000\u0000"+ + "\u02e4\u02e2\u0001\u0000\u0000\u0000\u02e4\u02e5\u0001\u0000\u0000\u0000"+ + "\u02e5\u02e7\u0001\u0000\u0000\u0000\u02e6\u02e4\u0001\u0000\u0000\u0000"+ + "\u02e7\u02fd\u0005\"\u0000\u0000\u02e8\u02e9\u0005\"\u0000\u0000\u02e9"+ + "\u02ea\u0005\"\u0000\u0000\u02ea\u02eb\u0005\"\u0000\u0000\u02eb\u02ef"+ + "\u0001\u0000\u0000\u0000\u02ec\u02ee\b\u0016\u0000\u0000\u02ed\u02ec\u0001"+ + "\u0000\u0000\u0000\u02ee\u02f1\u0001\u0000\u0000\u0000\u02ef\u02f0\u0001"+ + "\u0000\u0000\u0000\u02ef\u02ed\u0001\u0000\u0000\u0000\u02f0\u02f2\u0001"+ + "\u0000\u0000\u0000\u02f1\u02ef\u0001\u0000\u0000\u0000\u02f2\u02f3\u0005"+ "\"\u0000\u0000\u02f3\u02f4\u0005\"\u0000\u0000\u02f4\u02f5\u0005\"\u0000"+ - "\u0000\u02f5\u02f6\u0005\"\u0000\u0000\u02f6\u02fa\u0001\u0000\u0000\u0000"+ - "\u02f7\u02f9\b\u0017\u0000\u0000\u02f8\u02f7\u0001\u0000\u0000\u0000\u02f9"+ - "\u02fc\u0001\u0000\u0000\u0000\u02fa\u02fb\u0001\u0000\u0000\u0000\u02fa"+ - "\u02f8\u0001\u0000\u0000\u0000\u02fb\u02fd\u0001\u0000\u0000\u0000\u02fc"+ - "\u02fa\u0001\u0000\u0000\u0000\u02fd\u02fe\u0005\"\u0000\u0000\u02fe\u02ff"+ - "\u0005\"\u0000\u0000\u02ff\u0300\u0005\"\u0000\u0000\u0300\u0302\u0001"+ - "\u0000\u0000\u0000\u0301\u0303\u0005\"\u0000\u0000\u0302\u0301\u0001\u0000"+ - "\u0000\u0000\u0302\u0303\u0001\u0000\u0000\u0000\u0303\u0305\u0001\u0000"+ - "\u0000\u0000\u0304\u0306\u0005\"\u0000\u0000\u0305\u0304\u0001\u0000\u0000"+ - "\u0000\u0305\u0306\u0001\u0000\u0000\u0000\u0306\u0308\u0001\u0000\u0000"+ - "\u0000\u0307\u02ea\u0001\u0000\u0000\u0000\u0307\u02f3\u0001\u0000\u0000"+ - "\u0000\u0308_\u0001\u0000\u0000\u0000\u0309\u030b\u0003J\u001d\u0000\u030a"+ - "\u0309\u0001\u0000\u0000\u0000\u030b\u030c\u0001\u0000\u0000\u0000\u030c"+ - "\u030a\u0001\u0000\u0000\u0000\u030c\u030d\u0001\u0000\u0000\u0000\u030d"+ - "a\u0001\u0000\u0000\u0000\u030e\u0310\u0003J\u001d\u0000\u030f\u030e\u0001"+ - "\u0000\u0000\u0000\u0310\u0311\u0001\u0000\u0000\u0000\u0311\u030f\u0001"+ - "\u0000\u0000\u0000\u0311\u0312\u0001\u0000\u0000\u0000\u0312\u0313\u0001"+ - "\u0000\u0000\u0000\u0313\u0317\u0003t2\u0000\u0314\u0316\u0003J\u001d"+ - "\u0000\u0315\u0314\u0001\u0000\u0000\u0000\u0316\u0319\u0001\u0000\u0000"+ - "\u0000\u0317\u0315\u0001\u0000\u0000\u0000\u0317\u0318\u0001\u0000\u0000"+ - "\u0000\u0318\u0339\u0001\u0000\u0000\u0000\u0319\u0317\u0001\u0000\u0000"+ - "\u0000\u031a\u031c\u0003t2\u0000\u031b\u031d\u0003J\u001d\u0000\u031c"+ - "\u031b\u0001\u0000\u0000\u0000\u031d\u031e\u0001\u0000\u0000\u0000\u031e"+ - "\u031c\u0001\u0000\u0000\u0000\u031e\u031f\u0001\u0000\u0000\u0000\u031f"+ - "\u0339\u0001\u0000\u0000\u0000\u0320\u0322\u0003J\u001d\u0000\u0321\u0320"+ - "\u0001\u0000\u0000\u0000\u0322\u0323\u0001\u0000\u0000\u0000\u0323\u0321"+ - "\u0001\u0000\u0000\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324\u032c"+ - "\u0001\u0000\u0000\u0000\u0325\u0329\u0003t2\u0000\u0326\u0328\u0003J"+ - "\u001d\u0000\u0327\u0326\u0001\u0000\u0000\u0000\u0328\u032b\u0001\u0000"+ - "\u0000\u0000\u0329\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001\u0000"+ - "\u0000\u0000\u032a\u032d\u0001\u0000\u0000\u0000\u032b\u0329\u0001\u0000"+ - "\u0000\u0000\u032c\u0325\u0001\u0000\u0000\u0000\u032c\u032d\u0001\u0000"+ - "\u0000\u0000\u032d\u032e\u0001\u0000\u0000\u0000\u032e\u032f\u0003R!\u0000"+ - "\u032f\u0339\u0001\u0000\u0000\u0000\u0330\u0332\u0003t2\u0000\u0331\u0333"+ - "\u0003J\u001d\u0000\u0332\u0331\u0001\u0000\u0000\u0000\u0333\u0334\u0001"+ - "\u0000\u0000\u0000\u0334\u0332\u0001\u0000\u0000\u0000\u0334\u0335\u0001"+ - "\u0000\u0000\u0000\u0335\u0336\u0001\u0000\u0000\u0000\u0336\u0337\u0003"+ - "R!\u0000\u0337\u0339\u0001\u0000\u0000\u0000\u0338\u030f\u0001\u0000\u0000"+ - "\u0000\u0338\u031a\u0001\u0000\u0000\u0000\u0338\u0321\u0001\u0000\u0000"+ - "\u0000\u0338\u0330\u0001\u0000\u0000\u0000\u0339c\u0001\u0000\u0000\u0000"+ - "\u033a\u033b\u0007\u001f\u0000\u0000\u033b\u033c\u0007 \u0000\u0000\u033c"+ - "e\u0001\u0000\u0000\u0000\u033d\u033e\u0007\f\u0000\u0000\u033e\u033f"+ - "\u0007\t\u0000\u0000\u033f\u0340\u0007\u0000\u0000\u0000\u0340g\u0001"+ - "\u0000\u0000\u0000\u0341\u0342\u0007\f\u0000\u0000\u0342\u0343\u0007\u0002"+ - "\u0000\u0000\u0343\u0344\u0007\u0004\u0000\u0000\u0344i\u0001\u0000\u0000"+ - "\u0000\u0345\u0346\u0005=\u0000\u0000\u0346k\u0001\u0000\u0000\u0000\u0347"+ - "\u0348\u0005:\u0000\u0000\u0348\u0349\u0005:\u0000\u0000\u0349m\u0001"+ - "\u0000\u0000\u0000\u034a\u034b\u0005:\u0000\u0000\u034bo\u0001\u0000\u0000"+ - "\u0000\u034c\u034d\u0005,\u0000\u0000\u034dq\u0001\u0000\u0000\u0000\u034e"+ - "\u034f\u0007\u0000\u0000\u0000\u034f\u0350\u0007\u0003\u0000\u0000\u0350"+ - "\u0351\u0007\u0002\u0000\u0000\u0351\u0352\u0007\u0004\u0000\u0000\u0352"+ - "s\u0001\u0000\u0000\u0000\u0353\u0354\u0005.\u0000\u0000\u0354u\u0001"+ - "\u0000\u0000\u0000\u0355\u0356\u0007\u000f\u0000\u0000\u0356\u0357\u0007"+ - "\f\u0000\u0000\u0357\u0358\u0007\r\u0000\u0000\u0358\u0359\u0007\u0002"+ - "\u0000\u0000\u0359\u035a\u0007\u0003\u0000\u0000\u035aw\u0001\u0000\u0000"+ - "\u0000\u035b\u035c\u0007\u000f\u0000\u0000\u035c\u035d\u0007\u0001\u0000"+ - "\u0000\u035d\u035e\u0007\u0006\u0000\u0000\u035e\u035f\u0007\u0002\u0000"+ - "\u0000\u035f\u0360\u0007\u0005\u0000\u0000\u0360y\u0001\u0000\u0000\u0000"+ - "\u0361\u0362\u0007\u0001\u0000\u0000\u0362\u0363\u0007\t\u0000\u0000\u0363"+ - "{\u0001\u0000\u0000\u0000\u0364\u0365\u0007\u0001\u0000\u0000\u0365\u0366"+ - "\u0007\u0002\u0000\u0000\u0366}\u0001\u0000\u0000\u0000\u0367\u0368\u0007"+ - "\r\u0000\u0000\u0368\u0369\u0007\f\u0000\u0000\u0369\u036a\u0007\u0002"+ - "\u0000\u0000\u036a\u036b\u0007\u0005\u0000\u0000\u036b\u007f\u0001\u0000"+ - "\u0000\u0000\u036c\u036d\u0007\r\u0000\u0000\u036d\u036e\u0007\u0001\u0000"+ - "\u0000\u036e\u036f\u0007\u0012\u0000\u0000\u036f\u0370\u0007\u0003\u0000"+ - "\u0000\u0370\u0081\u0001\u0000\u0000\u0000\u0371\u0372\u0005(\u0000\u0000"+ - "\u0372\u0083\u0001\u0000\u0000\u0000\u0373\u0374\u0007\t\u0000\u0000\u0374"+ - "\u0375\u0007\u0007\u0000\u0000\u0375\u0376\u0007\u0005\u0000\u0000\u0376"+ - "\u0085\u0001\u0000\u0000\u0000\u0377\u0378\u0007\t\u0000\u0000\u0378\u0379"+ - "\u0007\u0014\u0000\u0000\u0379\u037a\u0007\r\u0000\u0000\u037a\u037b\u0007"+ - "\r\u0000\u0000\u037b\u0087\u0001\u0000\u0000\u0000\u037c\u037d\u0007\t"+ - "\u0000\u0000\u037d\u037e\u0007\u0014\u0000\u0000\u037e\u037f\u0007\r\u0000"+ - "\u0000\u037f\u0380\u0007\r\u0000\u0000\u0380\u0381\u0007\u0002\u0000\u0000"+ - "\u0381\u0089\u0001\u0000\u0000\u0000\u0382\u0383\u0007\u0007\u0000\u0000"+ - "\u0383\u0384\u0007\u0006\u0000\u0000\u0384\u008b\u0001\u0000\u0000\u0000"+ - "\u0385\u0386\u0005?\u0000\u0000\u0386\u008d\u0001\u0000\u0000\u0000\u0387"+ - "\u0388\u0007\u0006\u0000\u0000\u0388\u0389\u0007\r\u0000\u0000\u0389\u038a"+ - "\u0007\u0001\u0000\u0000\u038a\u038b\u0007\u0012\u0000\u0000\u038b\u038c"+ - "\u0007\u0003\u0000\u0000\u038c\u008f\u0001\u0000\u0000\u0000\u038d\u038e"+ - "\u0005)\u0000\u0000\u038e\u0091\u0001\u0000\u0000\u0000\u038f\u0390\u0007"+ - "\u0005\u0000\u0000\u0390\u0391\u0007\u0006\u0000\u0000\u0391\u0392\u0007"+ - "\u0014\u0000\u0000\u0392\u0393\u0007\u0003\u0000\u0000\u0393\u0093\u0001"+ - "\u0000\u0000\u0000\u0394\u0395\u0005=\u0000\u0000\u0395\u0396\u0005=\u0000"+ - "\u0000\u0396\u0095\u0001\u0000\u0000\u0000\u0397\u0398\u0005=\u0000\u0000"+ - "\u0398\u0399\u0005~\u0000\u0000\u0399\u0097\u0001\u0000\u0000\u0000\u039a"+ - "\u039b\u0005!\u0000\u0000\u039b\u039c\u0005=\u0000\u0000\u039c\u0099\u0001"+ - "\u0000\u0000\u0000\u039d\u039e\u0005<\u0000\u0000\u039e\u009b\u0001\u0000"+ - "\u0000\u0000\u039f\u03a0\u0005<\u0000\u0000\u03a0\u03a1\u0005=\u0000\u0000"+ - "\u03a1\u009d\u0001\u0000\u0000\u0000\u03a2\u03a3\u0005>\u0000\u0000\u03a3"+ - "\u009f\u0001\u0000\u0000\u0000\u03a4\u03a5\u0005>\u0000\u0000\u03a5\u03a6"+ - "\u0005=\u0000\u0000\u03a6\u00a1\u0001\u0000\u0000\u0000\u03a7\u03a8\u0005"+ - "+\u0000\u0000\u03a8\u00a3\u0001\u0000\u0000\u0000\u03a9\u03aa\u0005-\u0000"+ - "\u0000\u03aa\u00a5\u0001\u0000\u0000\u0000\u03ab\u03ac\u0005*\u0000\u0000"+ - "\u03ac\u00a7\u0001\u0000\u0000\u0000\u03ad\u03ae\u0005/\u0000\u0000\u03ae"+ - "\u00a9\u0001\u0000\u0000\u0000\u03af\u03b0\u0005%\u0000\u0000\u03b0\u00ab"+ - "\u0001\u0000\u0000\u0000\u03b1\u03b2\u0005{\u0000\u0000\u03b2\u00ad\u0001"+ - "\u0000\u0000\u0000\u03b3\u03b4\u0005}\u0000\u0000\u03b4\u00af\u0001\u0000"+ - "\u0000\u0000\u03b5\u03b6\u0003.\u000f\u0000\u03b6\u03b7\u0001\u0000\u0000"+ - "\u0000\u03b7\u03b8\u0006P\r\u0000\u03b8\u00b1\u0001\u0000\u0000\u0000"+ - "\u03b9\u03bc\u0003\u008c>\u0000\u03ba\u03bd\u0003L\u001e\u0000\u03bb\u03bd"+ - "\u0003Z%\u0000\u03bc\u03ba\u0001\u0000\u0000\u0000\u03bc\u03bb\u0001\u0000"+ - "\u0000\u0000\u03bd\u03c1\u0001\u0000\u0000\u0000\u03be\u03c0\u0003\\&"+ - "\u0000\u03bf\u03be\u0001\u0000\u0000\u0000\u03c0\u03c3\u0001\u0000\u0000"+ - "\u0000\u03c1\u03bf\u0001\u0000\u0000\u0000\u03c1\u03c2\u0001\u0000\u0000"+ - "\u0000\u03c2\u03cb\u0001\u0000\u0000\u0000\u03c3\u03c1\u0001\u0000\u0000"+ - "\u0000\u03c4\u03c6\u0003\u008c>\u0000\u03c5\u03c7\u0003J\u001d\u0000\u03c6"+ - "\u03c5\u0001\u0000\u0000\u0000\u03c7\u03c8\u0001\u0000\u0000\u0000\u03c8"+ - "\u03c6\u0001\u0000\u0000\u0000\u03c8\u03c9\u0001\u0000\u0000\u0000\u03c9"+ - "\u03cb\u0001\u0000\u0000\u0000\u03ca\u03b9\u0001\u0000\u0000\u0000\u03ca"+ - "\u03c4\u0001\u0000\u0000\u0000\u03cb\u00b3\u0001\u0000\u0000\u0000\u03cc"+ - "\u03cd\u0005[\u0000\u0000\u03cd\u03ce\u0001\u0000\u0000\u0000\u03ce\u03cf"+ - "\u0006R\u0000\u0000\u03cf\u03d0\u0006R\u0000\u0000\u03d0\u00b5\u0001\u0000"+ - "\u0000\u0000\u03d1\u03d2\u0005]\u0000\u0000\u03d2\u03d3\u0001\u0000\u0000"+ - "\u0000\u03d3\u03d4\u0006S\f\u0000\u03d4\u03d5\u0006S\f\u0000\u03d5\u00b7"+ - "\u0001\u0000\u0000\u0000\u03d6\u03da\u0003L\u001e\u0000\u03d7\u03d9\u0003"+ - "\\&\u0000\u03d8\u03d7\u0001\u0000\u0000\u0000\u03d9\u03dc\u0001\u0000"+ - "\u0000\u0000\u03da\u03d8\u0001\u0000\u0000\u0000\u03da\u03db\u0001\u0000"+ - "\u0000\u0000\u03db\u03e7\u0001\u0000\u0000\u0000\u03dc\u03da\u0001\u0000"+ - "\u0000\u0000\u03dd\u03e0\u0003Z%\u0000\u03de\u03e0\u0003T\"\u0000\u03df"+ - "\u03dd\u0001\u0000\u0000\u0000\u03df\u03de\u0001\u0000\u0000\u0000\u03e0"+ - "\u03e2\u0001\u0000\u0000\u0000\u03e1\u03e3\u0003\\&\u0000\u03e2\u03e1"+ - "\u0001\u0000\u0000\u0000\u03e3\u03e4\u0001\u0000\u0000\u0000\u03e4\u03e2"+ - "\u0001\u0000\u0000\u0000\u03e4\u03e5\u0001\u0000\u0000\u0000\u03e5\u03e7"+ - "\u0001\u0000\u0000\u0000\u03e6\u03d6\u0001\u0000\u0000\u0000\u03e6\u03df"+ - "\u0001\u0000\u0000\u0000\u03e7\u00b9\u0001\u0000\u0000\u0000\u03e8\u03ea"+ - "\u0003V#\u0000\u03e9\u03eb\u0003X$\u0000\u03ea\u03e9\u0001\u0000\u0000"+ - "\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec\u03ea\u0001\u0000\u0000"+ - "\u0000\u03ec\u03ed\u0001\u0000\u0000\u0000\u03ed\u03ee\u0001\u0000\u0000"+ - "\u0000\u03ee\u03ef\u0003V#\u0000\u03ef\u00bb\u0001\u0000\u0000\u0000\u03f0"+ - "\u03f1\u0003\u00baU\u0000\u03f1\u00bd\u0001\u0000\u0000\u0000\u03f2\u03f3"+ - "\u0003B\u0019\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006"+ - "W\u000b\u0000\u03f5\u00bf\u0001\u0000\u0000\u0000\u03f6\u03f7\u0003D\u001a"+ - "\u0000\u03f7\u03f8\u0001\u0000\u0000\u0000\u03f8\u03f9\u0006X\u000b\u0000"+ - "\u03f9\u00c1\u0001\u0000\u0000\u0000\u03fa\u03fb\u0003F\u001b\u0000\u03fb"+ - "\u03fc\u0001\u0000\u0000\u0000\u03fc\u03fd\u0006Y\u000b\u0000\u03fd\u00c3"+ - "\u0001\u0000\u0000\u0000\u03fe\u03ff\u0003\u00b4R\u0000\u03ff\u0400\u0001"+ - "\u0000\u0000\u0000\u0400\u0401\u0006Z\u000e\u0000\u0401\u0402\u0006Z\u000f"+ - "\u0000\u0402\u00c5\u0001\u0000\u0000\u0000\u0403\u0404\u0003H\u001c\u0000"+ - "\u0404\u0405\u0001\u0000\u0000\u0000\u0405\u0406\u0006[\u0010\u0000\u0406"+ - "\u0407\u0006[\f\u0000\u0407\u00c7\u0001\u0000\u0000\u0000\u0408\u0409"+ - "\u0003F\u001b\u0000\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u040b\u0006"+ - "\\\u000b\u0000\u040b\u00c9\u0001\u0000\u0000\u0000\u040c\u040d\u0003B"+ - "\u0019\u0000\u040d\u040e\u0001\u0000\u0000\u0000\u040e\u040f\u0006]\u000b"+ - "\u0000\u040f\u00cb\u0001\u0000\u0000\u0000\u0410\u0411\u0003D\u001a\u0000"+ - "\u0411\u0412\u0001\u0000\u0000\u0000\u0412\u0413\u0006^\u000b\u0000\u0413"+ - "\u00cd\u0001\u0000\u0000\u0000\u0414\u0415\u0003H\u001c\u0000\u0415\u0416"+ - "\u0001\u0000\u0000\u0000\u0416\u0417\u0006_\u0010\u0000\u0417\u0418\u0006"+ - "_\f\u0000\u0418\u00cf\u0001\u0000\u0000\u0000\u0419\u041a\u0003\u00b4"+ - "R\u0000\u041a\u041b\u0001\u0000\u0000\u0000\u041b\u041c\u0006`\u000e\u0000"+ - "\u041c\u00d1\u0001\u0000\u0000\u0000\u041d\u041e\u0003\u00b6S\u0000\u041e"+ - "\u041f\u0001\u0000\u0000\u0000\u041f\u0420\u0006a\u0011\u0000\u0420\u00d3"+ - "\u0001\u0000\u0000\u0000\u0421\u0422\u0003n/\u0000\u0422\u0423\u0001\u0000"+ - "\u0000\u0000\u0423\u0424\u0006b\u0012\u0000\u0424\u00d5\u0001\u0000\u0000"+ - "\u0000\u0425\u0426\u0003p0\u0000\u0426\u0427\u0001\u0000\u0000\u0000\u0427"+ - "\u0428\u0006c\u0013\u0000\u0428\u00d7\u0001\u0000\u0000\u0000\u0429\u042a"+ - "\u0003j-\u0000\u042a\u042b\u0001\u0000\u0000\u0000\u042b\u042c\u0006d"+ - "\u0014\u0000\u042c\u00d9\u0001\u0000\u0000\u0000\u042d\u042e\u0007\u0010"+ - "\u0000\u0000\u042e\u042f\u0007\u0003\u0000\u0000\u042f\u0430\u0007\u0005"+ - "\u0000\u0000\u0430\u0431\u0007\f\u0000\u0000\u0431\u0432\u0007\u0000\u0000"+ - "\u0000\u0432\u0433\u0007\f\u0000\u0000\u0433\u0434\u0007\u0005\u0000\u0000"+ - "\u0434\u0435\u0007\f\u0000\u0000\u0435\u00db\u0001\u0000\u0000\u0000\u0436"+ - "\u043a\b!\u0000\u0000\u0437\u0438\u0005/\u0000\u0000\u0438\u043a\b\"\u0000"+ - "\u0000\u0439\u0436\u0001\u0000\u0000\u0000\u0439\u0437\u0001\u0000\u0000"+ - "\u0000\u043a\u00dd\u0001\u0000\u0000\u0000\u043b\u043d\u0003\u00dcf\u0000"+ - "\u043c\u043b\u0001\u0000\u0000\u0000\u043d\u043e\u0001\u0000\u0000\u0000"+ - "\u043e\u043c\u0001\u0000\u0000\u0000\u043e\u043f\u0001\u0000\u0000\u0000"+ - "\u043f\u00df\u0001\u0000\u0000\u0000\u0440\u0441\u0003\u00deg\u0000\u0441"+ - "\u0442\u0001\u0000\u0000\u0000\u0442\u0443\u0006h\u0015\u0000\u0443\u00e1"+ - "\u0001\u0000\u0000\u0000\u0444\u0445\u0003^\'\u0000\u0445\u0446\u0001"+ - "\u0000\u0000\u0000\u0446\u0447\u0006i\u0016\u0000\u0447\u00e3\u0001\u0000"+ - "\u0000\u0000\u0448\u0449\u0003B\u0019\u0000\u0449\u044a\u0001\u0000\u0000"+ - "\u0000\u044a\u044b\u0006j\u000b\u0000\u044b\u00e5\u0001\u0000\u0000\u0000"+ - "\u044c\u044d\u0003D\u001a\u0000\u044d\u044e\u0001\u0000\u0000\u0000\u044e"+ - "\u044f\u0006k\u000b\u0000\u044f\u00e7\u0001\u0000\u0000\u0000\u0450\u0451"+ - "\u0003F\u001b\u0000\u0451\u0452\u0001\u0000\u0000\u0000\u0452\u0453\u0006"+ - "l\u000b\u0000\u0453\u00e9\u0001\u0000\u0000\u0000\u0454\u0455\u0003H\u001c"+ - "\u0000\u0455\u0456\u0001\u0000\u0000\u0000\u0456\u0457\u0006m\u0010\u0000"+ - "\u0457\u0458\u0006m\f\u0000\u0458\u00eb\u0001\u0000\u0000\u0000\u0459"+ - "\u045a\u0003t2\u0000\u045a\u045b\u0001\u0000\u0000\u0000\u045b\u045c\u0006"+ - "n\u0017\u0000\u045c\u00ed\u0001\u0000\u0000\u0000\u045d\u045e\u0003p0"+ - "\u0000\u045e\u045f\u0001\u0000\u0000\u0000\u045f\u0460\u0006o\u0013\u0000"+ - "\u0460\u00ef\u0001\u0000\u0000\u0000\u0461\u0462\u0004p\b\u0000\u0462"+ - "\u0463\u0003\u008c>\u0000\u0463\u0464\u0001\u0000\u0000\u0000\u0464\u0465"+ - "\u0006p\u0018\u0000\u0465\u00f1\u0001\u0000\u0000\u0000\u0466\u0467\u0004"+ - "q\t\u0000\u0467\u0468\u0003\u00b2Q\u0000\u0468\u0469\u0001\u0000\u0000"+ - "\u0000\u0469\u046a\u0006q\u0019\u0000\u046a\u00f3\u0001\u0000\u0000\u0000"+ - "\u046b\u0470\u0003L\u001e\u0000\u046c\u0470\u0003J\u001d\u0000\u046d\u0470"+ - "\u0003Z%\u0000\u046e\u0470\u0003\u00a6K\u0000\u046f\u046b\u0001\u0000"+ - "\u0000\u0000\u046f\u046c\u0001\u0000\u0000\u0000\u046f\u046d\u0001\u0000"+ - "\u0000\u0000\u046f\u046e\u0001\u0000\u0000\u0000\u0470\u00f5\u0001\u0000"+ - "\u0000\u0000\u0471\u0474\u0003L\u001e\u0000\u0472\u0474\u0003\u00a6K\u0000"+ - "\u0473\u0471\u0001\u0000\u0000\u0000\u0473\u0472\u0001\u0000\u0000\u0000"+ - "\u0474\u0478\u0001\u0000\u0000\u0000\u0475\u0477\u0003\u00f4r\u0000\u0476"+ - "\u0475\u0001\u0000\u0000\u0000\u0477\u047a\u0001\u0000\u0000\u0000\u0478"+ - "\u0476\u0001\u0000\u0000\u0000\u0478\u0479\u0001\u0000\u0000\u0000\u0479"+ - "\u0485\u0001\u0000\u0000\u0000\u047a\u0478\u0001\u0000\u0000\u0000\u047b"+ - "\u047e\u0003Z%\u0000\u047c\u047e\u0003T\"\u0000\u047d\u047b\u0001\u0000"+ - "\u0000\u0000\u047d\u047c\u0001\u0000\u0000\u0000\u047e\u0480\u0001\u0000"+ - "\u0000\u0000\u047f\u0481\u0003\u00f4r\u0000\u0480\u047f\u0001\u0000\u0000"+ - "\u0000\u0481\u0482\u0001\u0000\u0000\u0000\u0482\u0480\u0001\u0000\u0000"+ - "\u0000\u0482\u0483\u0001\u0000\u0000\u0000\u0483\u0485\u0001\u0000\u0000"+ - "\u0000\u0484\u0473\u0001\u0000\u0000\u0000\u0484\u047d\u0001\u0000\u0000"+ - "\u0000\u0485\u00f7\u0001\u0000\u0000\u0000\u0486\u0489\u0003\u00f6s\u0000"+ - "\u0487\u0489\u0003\u00baU\u0000\u0488\u0486\u0001\u0000\u0000\u0000\u0488"+ - "\u0487\u0001\u0000\u0000\u0000\u0489\u048a\u0001\u0000\u0000\u0000\u048a"+ - "\u0488\u0001\u0000\u0000\u0000\u048a\u048b\u0001\u0000\u0000\u0000\u048b"+ - "\u00f9\u0001\u0000\u0000\u0000\u048c\u048d\u0003B\u0019\u0000\u048d\u048e"+ - "\u0001\u0000\u0000\u0000\u048e\u048f\u0006u\u000b\u0000\u048f\u00fb\u0001"+ - "\u0000\u0000\u0000\u0490\u0491\u0003D\u001a\u0000\u0491\u0492\u0001\u0000"+ - "\u0000\u0000\u0492\u0493\u0006v\u000b\u0000\u0493\u00fd\u0001\u0000\u0000"+ - "\u0000\u0494\u0495\u0003F\u001b\u0000\u0495\u0496\u0001\u0000\u0000\u0000"+ - "\u0496\u0497\u0006w\u000b\u0000\u0497\u00ff\u0001\u0000\u0000\u0000\u0498"+ - "\u0499\u0003H\u001c\u0000\u0499\u049a\u0001\u0000\u0000\u0000\u049a\u049b"+ - "\u0006x\u0010\u0000\u049b\u049c\u0006x\f\u0000\u049c\u0101\u0001\u0000"+ - "\u0000\u0000\u049d\u049e\u0003j-\u0000\u049e\u049f\u0001\u0000\u0000\u0000"+ - "\u049f\u04a0\u0006y\u0014\u0000\u04a0\u0103\u0001\u0000\u0000\u0000\u04a1"+ - "\u04a2\u0003p0\u0000\u04a2\u04a3\u0001\u0000\u0000\u0000\u04a3\u04a4\u0006"+ - "z\u0013\u0000\u04a4\u0105\u0001\u0000\u0000\u0000\u04a5\u04a6\u0003t2"+ - "\u0000\u04a6\u04a7\u0001\u0000\u0000\u0000\u04a7\u04a8\u0006{\u0017\u0000"+ - "\u04a8\u0107\u0001\u0000\u0000\u0000\u04a9\u04aa\u0004|\n\u0000\u04aa"+ - "\u04ab\u0003\u008c>\u0000\u04ab\u04ac\u0001\u0000\u0000\u0000\u04ac\u04ad"+ - "\u0006|\u0018\u0000\u04ad\u0109\u0001\u0000\u0000\u0000\u04ae\u04af\u0004"+ - "}\u000b\u0000\u04af\u04b0\u0003\u00b2Q\u0000\u04b0\u04b1\u0001\u0000\u0000"+ - "\u0000\u04b1\u04b2\u0006}\u0019\u0000\u04b2\u010b\u0001\u0000\u0000\u0000"+ - "\u04b3\u04b4\u0007\f\u0000\u0000\u04b4\u04b5\u0007\u0002\u0000\u0000\u04b5"+ - "\u010d\u0001\u0000\u0000\u0000\u04b6\u04b7\u0003\u00f8t\u0000\u04b7\u04b8"+ - "\u0001\u0000\u0000\u0000\u04b8\u04b9\u0006\u007f\u001a\u0000\u04b9\u010f"+ - "\u0001\u0000\u0000\u0000\u04ba\u04bb\u0003B\u0019\u0000\u04bb\u04bc\u0001"+ - "\u0000\u0000\u0000\u04bc\u04bd\u0006\u0080\u000b\u0000\u04bd\u0111\u0001"+ - "\u0000\u0000\u0000\u04be\u04bf\u0003D\u001a\u0000\u04bf\u04c0\u0001\u0000"+ - "\u0000\u0000\u04c0\u04c1\u0006\u0081\u000b\u0000\u04c1\u0113\u0001\u0000"+ - "\u0000\u0000\u04c2\u04c3\u0003F\u001b\u0000\u04c3\u04c4\u0001\u0000\u0000"+ - "\u0000\u04c4\u04c5\u0006\u0082\u000b\u0000\u04c5\u0115\u0001\u0000\u0000"+ - "\u0000\u04c6\u04c7\u0003H\u001c\u0000\u04c7\u04c8\u0001\u0000\u0000\u0000"+ - "\u04c8\u04c9\u0006\u0083\u0010\u0000\u04c9\u04ca\u0006\u0083\f\u0000\u04ca"+ - "\u0117\u0001\u0000\u0000\u0000\u04cb\u04cc\u0003\u00b4R\u0000\u04cc\u04cd"+ - "\u0001\u0000\u0000\u0000\u04cd\u04ce\u0006\u0084\u000e\u0000\u04ce\u04cf"+ - "\u0006\u0084\u001b\u0000\u04cf\u0119\u0001\u0000\u0000\u0000\u04d0\u04d1"+ - "\u0007\u0007\u0000\u0000\u04d1\u04d2\u0007\t\u0000\u0000\u04d2\u04d3\u0001"+ - "\u0000\u0000\u0000\u04d3\u04d4\u0006\u0085\u001c\u0000\u04d4\u011b\u0001"+ - "\u0000\u0000\u0000\u04d5\u04d6\u0007\u0013\u0000\u0000\u04d6\u04d7\u0007"+ - "\u0001\u0000\u0000\u04d7\u04d8\u0007\u0005\u0000\u0000\u04d8\u04d9\u0007"+ - "\n\u0000\u0000\u04d9\u04da\u0001\u0000\u0000\u0000\u04da\u04db\u0006\u0086"+ - "\u001c\u0000\u04db\u011d\u0001\u0000\u0000\u0000\u04dc\u04dd\b#\u0000"+ - "\u0000\u04dd\u011f\u0001\u0000\u0000\u0000\u04de\u04e0\u0003\u011e\u0087"+ - "\u0000\u04df\u04de\u0001\u0000\u0000\u0000\u04e0\u04e1\u0001\u0000\u0000"+ - "\u0000\u04e1\u04df\u0001\u0000\u0000\u0000\u04e1\u04e2\u0001\u0000\u0000"+ - "\u0000\u04e2\u04e3\u0001\u0000\u0000\u0000\u04e3\u04e4\u0003n/\u0000\u04e4"+ - "\u04e6\u0001\u0000\u0000\u0000\u04e5\u04df\u0001\u0000\u0000\u0000\u04e5"+ - "\u04e6\u0001\u0000\u0000\u0000\u04e6\u04e8\u0001\u0000\u0000\u0000\u04e7"+ - "\u04e9\u0003\u011e\u0087\u0000\u04e8\u04e7\u0001\u0000\u0000\u0000\u04e9"+ - "\u04ea\u0001\u0000\u0000\u0000\u04ea\u04e8\u0001\u0000\u0000\u0000\u04ea"+ - "\u04eb\u0001\u0000\u0000\u0000\u04eb\u0121\u0001\u0000\u0000\u0000\u04ec"+ - "\u04ed\u0003\u0120\u0088\u0000\u04ed\u04ee\u0001\u0000\u0000\u0000\u04ee"+ - "\u04ef\u0006\u0089\u001d\u0000\u04ef\u0123\u0001\u0000\u0000\u0000\u04f0"+ - "\u04f1\u0003B\u0019\u0000\u04f1\u04f2\u0001\u0000\u0000\u0000\u04f2\u04f3"+ - "\u0006\u008a\u000b\u0000\u04f3\u0125\u0001\u0000\u0000\u0000\u04f4\u04f5"+ - "\u0003D\u001a\u0000\u04f5\u04f6\u0001\u0000\u0000\u0000\u04f6\u04f7\u0006"+ - "\u008b\u000b\u0000\u04f7\u0127\u0001\u0000\u0000\u0000\u04f8\u04f9\u0003"+ - "F\u001b\u0000\u04f9\u04fa\u0001\u0000\u0000\u0000\u04fa\u04fb\u0006\u008c"+ - "\u000b\u0000\u04fb\u0129\u0001\u0000\u0000\u0000\u04fc\u04fd\u0003H\u001c"+ - "\u0000\u04fd\u04fe\u0001\u0000\u0000\u0000\u04fe\u04ff\u0006\u008d\u0010"+ - "\u0000\u04ff\u0500\u0006\u008d\f\u0000\u0500\u0501\u0006\u008d\f\u0000"+ - "\u0501\u012b\u0001\u0000\u0000\u0000\u0502\u0503\u0003j-\u0000\u0503\u0504"+ - "\u0001\u0000\u0000\u0000\u0504\u0505\u0006\u008e\u0014\u0000\u0505\u012d"+ - "\u0001\u0000\u0000\u0000\u0506\u0507\u0003p0\u0000\u0507\u0508\u0001\u0000"+ - "\u0000\u0000\u0508\u0509\u0006\u008f\u0013\u0000\u0509\u012f\u0001\u0000"+ - "\u0000\u0000\u050a\u050b\u0003t2\u0000\u050b\u050c\u0001\u0000\u0000\u0000"+ - "\u050c\u050d\u0006\u0090\u0017\u0000\u050d\u0131\u0001\u0000\u0000\u0000"+ - "\u050e\u050f\u0003\u011c\u0086\u0000\u050f\u0510\u0001\u0000\u0000\u0000"+ - "\u0510\u0511\u0006\u0091\u001e\u0000\u0511\u0133\u0001\u0000\u0000\u0000"+ - "\u0512\u0513\u0003\u00f8t\u0000\u0513\u0514\u0001\u0000\u0000\u0000\u0514"+ - "\u0515\u0006\u0092\u001a\u0000\u0515\u0135\u0001\u0000\u0000\u0000\u0516"+ - "\u0517\u0003\u00bcV\u0000\u0517\u0518\u0001\u0000\u0000\u0000\u0518\u0519"+ - "\u0006\u0093\u001f\u0000\u0519\u0137\u0001\u0000\u0000\u0000\u051a\u051b"+ - "\u0004\u0094\f\u0000\u051b\u051c\u0003\u008c>\u0000\u051c\u051d\u0001"+ - "\u0000\u0000\u0000\u051d\u051e\u0006\u0094\u0018\u0000\u051e\u0139\u0001"+ - "\u0000\u0000\u0000\u051f\u0520\u0004\u0095\r\u0000\u0520\u0521\u0003\u00b2"+ - "Q\u0000\u0521\u0522\u0001\u0000\u0000\u0000\u0522\u0523\u0006\u0095\u0019"+ - "\u0000\u0523\u013b\u0001\u0000\u0000\u0000\u0524\u0525\u0003B\u0019\u0000"+ - "\u0525\u0526\u0001\u0000\u0000\u0000\u0526\u0527\u0006\u0096\u000b\u0000"+ - "\u0527\u013d\u0001\u0000\u0000\u0000\u0528\u0529\u0003D\u001a\u0000\u0529"+ - "\u052a\u0001\u0000\u0000\u0000\u052a\u052b\u0006\u0097\u000b\u0000\u052b"+ - "\u013f\u0001\u0000\u0000\u0000\u052c\u052d\u0003F\u001b\u0000\u052d\u052e"+ - "\u0001\u0000\u0000\u0000\u052e\u052f\u0006\u0098\u000b\u0000\u052f\u0141"+ - "\u0001\u0000\u0000\u0000\u0530\u0531\u0003H\u001c\u0000\u0531\u0532\u0001"+ - "\u0000\u0000\u0000\u0532\u0533\u0006\u0099\u0010\u0000\u0533\u0534\u0006"+ - "\u0099\f\u0000\u0534\u0143\u0001\u0000\u0000\u0000\u0535\u0536\u0003t"+ - "2\u0000\u0536\u0537\u0001\u0000\u0000\u0000\u0537\u0538\u0006\u009a\u0017"+ - "\u0000\u0538\u0145\u0001\u0000\u0000\u0000\u0539\u053a\u0004\u009b\u000e"+ - "\u0000\u053a\u053b\u0003\u008c>\u0000\u053b\u053c\u0001\u0000\u0000\u0000"+ - "\u053c\u053d\u0006\u009b\u0018\u0000\u053d\u0147\u0001\u0000\u0000\u0000"+ - "\u053e\u053f\u0004\u009c\u000f\u0000\u053f\u0540\u0003\u00b2Q\u0000\u0540"+ - "\u0541\u0001\u0000\u0000\u0000\u0541\u0542\u0006\u009c\u0019\u0000\u0542"+ - "\u0149\u0001\u0000\u0000\u0000\u0543\u0544\u0003\u00bcV\u0000\u0544\u0545"+ - "\u0001\u0000\u0000\u0000\u0545\u0546\u0006\u009d\u001f\u0000\u0546\u014b"+ - "\u0001\u0000\u0000\u0000\u0547\u0548\u0003\u00b8T\u0000\u0548\u0549\u0001"+ - "\u0000\u0000\u0000\u0549\u054a\u0006\u009e \u0000\u054a\u014d\u0001\u0000"+ - "\u0000\u0000\u054b\u054c\u0003B\u0019\u0000\u054c\u054d\u0001\u0000\u0000"+ - "\u0000\u054d\u054e\u0006\u009f\u000b\u0000\u054e\u014f\u0001\u0000\u0000"+ - "\u0000\u054f\u0550\u0003D\u001a\u0000\u0550\u0551\u0001\u0000\u0000\u0000"+ - "\u0551\u0552\u0006\u00a0\u000b\u0000\u0552\u0151\u0001\u0000\u0000\u0000"+ - "\u0553\u0554\u0003F\u001b\u0000\u0554\u0555\u0001\u0000\u0000\u0000\u0555"+ - "\u0556\u0006\u00a1\u000b\u0000\u0556\u0153\u0001\u0000\u0000\u0000\u0557"+ - "\u0558\u0003H\u001c\u0000\u0558\u0559\u0001\u0000\u0000\u0000\u0559\u055a"+ - "\u0006\u00a2\u0010\u0000\u055a\u055b\u0006\u00a2\f\u0000\u055b\u0155\u0001"+ - "\u0000\u0000\u0000\u055c\u055d\u0007\u0001\u0000\u0000\u055d\u055e\u0007"+ - "\t\u0000\u0000\u055e\u055f\u0007\u000f\u0000\u0000\u055f\u0560\u0007\u0007"+ - "\u0000\u0000\u0560\u0157\u0001\u0000\u0000\u0000\u0561\u0562\u0003B\u0019"+ - "\u0000\u0562\u0563\u0001\u0000\u0000\u0000\u0563\u0564\u0006\u00a4\u000b"+ - "\u0000\u0564\u0159\u0001\u0000\u0000\u0000\u0565\u0566\u0003D\u001a\u0000"+ - "\u0566\u0567\u0001\u0000\u0000\u0000\u0567\u0568\u0006\u00a5\u000b\u0000"+ - "\u0568\u015b\u0001\u0000\u0000\u0000\u0569\u056a\u0003F\u001b\u0000\u056a"+ - "\u056b\u0001\u0000\u0000\u0000\u056b\u056c\u0006\u00a6\u000b\u0000\u056c"+ - "\u015d\u0001\u0000\u0000\u0000\u056d\u056e\u0003\u00b6S\u0000\u056e\u056f"+ - "\u0001\u0000\u0000\u0000\u056f\u0570\u0006\u00a7\u0011\u0000\u0570\u0571"+ - "\u0006\u00a7\f\u0000\u0571\u015f\u0001\u0000\u0000\u0000\u0572\u0573\u0003"+ - "n/\u0000\u0573\u0574\u0001\u0000\u0000\u0000\u0574\u0575\u0006\u00a8\u0012"+ - "\u0000\u0575\u0161\u0001\u0000\u0000\u0000\u0576\u057c\u0003T\"\u0000"+ - "\u0577\u057c\u0003J\u001d\u0000\u0578\u057c\u0003t2\u0000\u0579\u057c"+ - "\u0003L\u001e\u0000\u057a\u057c\u0003Z%\u0000\u057b\u0576\u0001\u0000"+ - "\u0000\u0000\u057b\u0577\u0001\u0000\u0000\u0000\u057b\u0578\u0001\u0000"+ - "\u0000\u0000\u057b\u0579\u0001\u0000\u0000\u0000\u057b\u057a\u0001\u0000"+ - "\u0000\u0000\u057c\u057d\u0001\u0000\u0000\u0000\u057d\u057b\u0001\u0000"+ - "\u0000\u0000\u057d\u057e\u0001\u0000\u0000\u0000\u057e\u0163\u0001\u0000"+ - "\u0000\u0000\u057f\u0580\u0003B\u0019\u0000\u0580\u0581\u0001\u0000\u0000"+ - "\u0000\u0581\u0582\u0006\u00aa\u000b\u0000\u0582\u0165\u0001\u0000\u0000"+ - "\u0000\u0583\u0584\u0003D\u001a\u0000\u0584\u0585\u0001\u0000\u0000\u0000"+ - "\u0585\u0586\u0006\u00ab\u000b\u0000\u0586\u0167\u0001\u0000\u0000\u0000"+ - "\u0587\u0588\u0003F\u001b\u0000\u0588\u0589\u0001\u0000\u0000\u0000\u0589"+ - "\u058a\u0006\u00ac\u000b\u0000\u058a\u0169\u0001\u0000\u0000\u0000\u058b"+ - "\u058c\u0003H\u001c\u0000\u058c\u058d\u0001\u0000\u0000\u0000\u058d\u058e"+ - "\u0006\u00ad\u0010\u0000\u058e\u058f\u0006\u00ad\f\u0000\u058f\u016b\u0001"+ - "\u0000\u0000\u0000\u0590\u0591\u0003n/\u0000\u0591\u0592\u0001\u0000\u0000"+ - "\u0000\u0592\u0593\u0006\u00ae\u0012\u0000\u0593\u016d\u0001\u0000\u0000"+ - "\u0000\u0594\u0595\u0003p0\u0000\u0595\u0596\u0001\u0000\u0000\u0000\u0596"+ - "\u0597\u0006\u00af\u0013\u0000\u0597\u016f\u0001\u0000\u0000\u0000\u0598"+ - "\u0599\u0003t2\u0000\u0599\u059a\u0001\u0000\u0000\u0000\u059a\u059b\u0006"+ - "\u00b0\u0017\u0000\u059b\u0171\u0001\u0000\u0000\u0000\u059c\u059d\u0003"+ - "\u011a\u0085\u0000\u059d\u059e\u0001\u0000\u0000\u0000\u059e\u059f\u0006"+ - "\u00b1!\u0000\u059f\u05a0\u0006\u00b1\"\u0000\u05a0\u0173\u0001\u0000"+ - "\u0000\u0000\u05a1\u05a2\u0003\u00deg\u0000\u05a2\u05a3\u0001\u0000\u0000"+ - "\u0000\u05a3\u05a4\u0006\u00b2\u0015\u0000\u05a4\u0175\u0001\u0000\u0000"+ - "\u0000\u05a5\u05a6\u0003^\'\u0000\u05a6\u05a7\u0001\u0000\u0000\u0000"+ - "\u05a7\u05a8\u0006\u00b3\u0016\u0000\u05a8\u0177\u0001\u0000\u0000\u0000"+ - "\u05a9\u05aa\u0003B\u0019\u0000\u05aa\u05ab\u0001\u0000\u0000\u0000\u05ab"+ - "\u05ac\u0006\u00b4\u000b\u0000\u05ac\u0179\u0001\u0000\u0000\u0000\u05ad"+ - "\u05ae\u0003D\u001a\u0000\u05ae\u05af\u0001\u0000\u0000\u0000\u05af\u05b0"+ - "\u0006\u00b5\u000b\u0000\u05b0\u017b\u0001\u0000\u0000\u0000\u05b1\u05b2"+ - "\u0003F\u001b\u0000\u05b2\u05b3\u0001\u0000\u0000\u0000\u05b3\u05b4\u0006"+ - "\u00b6\u000b\u0000\u05b4\u017d\u0001\u0000\u0000\u0000\u05b5\u05b6\u0003"+ - "H\u001c\u0000\u05b6\u05b7\u0001\u0000\u0000\u0000\u05b7\u05b8\u0006\u00b7"+ - "\u0010\u0000\u05b8\u05b9\u0006\u00b7\f\u0000\u05b9\u05ba\u0006\u00b7\f"+ - "\u0000\u05ba\u017f\u0001\u0000\u0000\u0000\u05bb\u05bc\u0003p0\u0000\u05bc"+ - "\u05bd\u0001\u0000\u0000\u0000\u05bd\u05be\u0006\u00b8\u0013\u0000\u05be"+ - "\u0181\u0001\u0000\u0000\u0000\u05bf\u05c0\u0003t2\u0000\u05c0\u05c1\u0001"+ - "\u0000\u0000\u0000\u05c1\u05c2\u0006\u00b9\u0017\u0000\u05c2\u0183\u0001"+ - "\u0000\u0000\u0000\u05c3\u05c4\u0003\u00f8t\u0000\u05c4\u05c5\u0001\u0000"+ - "\u0000\u0000\u05c5\u05c6\u0006\u00ba\u001a\u0000\u05c6\u0185\u0001\u0000"+ - "\u0000\u0000\u05c7\u05c8\u0003B\u0019\u0000\u05c8\u05c9\u0001\u0000\u0000"+ - "\u0000\u05c9\u05ca\u0006\u00bb\u000b\u0000\u05ca\u0187\u0001\u0000\u0000"+ - "\u0000\u05cb\u05cc\u0003D\u001a\u0000\u05cc\u05cd\u0001\u0000\u0000\u0000"+ - "\u05cd\u05ce\u0006\u00bc\u000b\u0000\u05ce\u0189\u0001\u0000\u0000\u0000"+ - "\u05cf\u05d0\u0003F\u001b\u0000\u05d0\u05d1\u0001\u0000\u0000\u0000\u05d1"+ - "\u05d2\u0006\u00bd\u000b\u0000\u05d2\u018b\u0001\u0000\u0000\u0000\u05d3"+ - "\u05d4\u0003H\u001c\u0000\u05d4\u05d5\u0001\u0000\u0000\u0000\u05d5\u05d6"+ - "\u0006\u00be\u0010\u0000\u05d6\u05d7\u0006\u00be\f\u0000\u05d7\u018d\u0001"+ - "\u0000\u0000\u0000\u05d8\u05d9\u00036\u0013\u0000\u05d9\u05da\u0001\u0000"+ - "\u0000\u0000\u05da\u05db\u0006\u00bf#\u0000\u05db\u018f\u0001\u0000\u0000"+ - "\u0000\u05dc\u05dd\u0003\u010c~\u0000\u05dd\u05de\u0001\u0000\u0000\u0000"+ - "\u05de\u05df\u0006\u00c0$\u0000\u05df\u0191\u0001\u0000\u0000\u0000\u05e0"+ - "\u05e1\u0003\u011a\u0085\u0000\u05e1\u05e2\u0001\u0000\u0000\u0000\u05e2"+ - "\u05e3\u0006\u00c1!\u0000\u05e3\u05e4\u0006\u00c1\f\u0000\u05e4\u05e5"+ - "\u0006\u00c1\u0000\u0000\u05e5\u0193\u0001\u0000\u0000\u0000\u05e6\u05e7"+ - "\u0007\u0014\u0000\u0000\u05e7\u05e8\u0007\u0002\u0000\u0000\u05e8\u05e9"+ - "\u0007\u0001\u0000\u0000\u05e9\u05ea\u0007\t\u0000\u0000\u05ea\u05eb\u0007"+ - "\u0011\u0000\u0000\u05eb\u05ec\u0001\u0000\u0000\u0000\u05ec\u05ed\u0006"+ - "\u00c2\f\u0000\u05ed\u05ee\u0006\u00c2\u0000\u0000\u05ee\u0195\u0001\u0000"+ - "\u0000\u0000\u05ef\u05f0\u0003\u00deg\u0000\u05f0\u05f1\u0001\u0000\u0000"+ - "\u0000\u05f1\u05f2\u0006\u00c3\u0015\u0000\u05f2\u0197\u0001\u0000\u0000"+ - "\u0000\u05f3\u05f4\u0003^\'\u0000\u05f4\u05f5\u0001\u0000\u0000\u0000"+ - "\u05f5\u05f6\u0006\u00c4\u0016\u0000\u05f6\u0199\u0001\u0000\u0000\u0000"+ - "\u05f7\u05f8\u0003n/\u0000\u05f8\u05f9\u0001\u0000\u0000\u0000\u05f9\u05fa"+ - "\u0006\u00c5\u0012\u0000\u05fa\u019b\u0001\u0000\u0000\u0000\u05fb\u05fc"+ - "\u0003\u00b8T\u0000\u05fc\u05fd\u0001\u0000\u0000\u0000\u05fd\u05fe\u0006"+ - "\u00c6 \u0000\u05fe\u019d\u0001\u0000\u0000\u0000\u05ff\u0600\u0003\u00bc"+ - "V\u0000\u0600\u0601\u0001\u0000\u0000\u0000\u0601\u0602\u0006\u00c7\u001f"+ - "\u0000\u0602\u019f\u0001\u0000\u0000\u0000\u0603\u0604\u0003B\u0019\u0000"+ - "\u0604\u0605\u0001\u0000\u0000\u0000\u0605\u0606\u0006\u00c8\u000b\u0000"+ - "\u0606\u01a1\u0001\u0000\u0000\u0000\u0607\u0608\u0003D\u001a\u0000\u0608"+ - "\u0609\u0001\u0000\u0000\u0000\u0609\u060a\u0006\u00c9\u000b\u0000\u060a"+ - "\u01a3\u0001\u0000\u0000\u0000\u060b\u060c\u0003F\u001b\u0000\u060c\u060d"+ - "\u0001\u0000\u0000\u0000\u060d\u060e\u0006\u00ca\u000b\u0000\u060e\u01a5"+ - "\u0001\u0000\u0000\u0000\u060f\u0610\u0003H\u001c\u0000\u0610\u0611\u0001"+ - "\u0000\u0000\u0000\u0611\u0612\u0006\u00cb\u0010\u0000\u0612\u0613\u0006"+ - "\u00cb\f\u0000\u0613\u01a7\u0001\u0000\u0000\u0000\u0614\u0615\u0003\u00de"+ - "g\u0000\u0615\u0616\u0001\u0000\u0000\u0000\u0616\u0617\u0006\u00cc\u0015"+ - "\u0000\u0617\u0618\u0006\u00cc\f\u0000\u0618\u0619\u0006\u00cc%\u0000"+ - "\u0619\u01a9\u0001\u0000\u0000\u0000\u061a\u061b\u0003^\'\u0000\u061b"+ - "\u061c\u0001\u0000\u0000\u0000\u061c\u061d\u0006\u00cd\u0016\u0000\u061d"+ - "\u061e\u0006\u00cd\f\u0000\u061e\u061f\u0006\u00cd%\u0000\u061f\u01ab"+ - "\u0001\u0000\u0000\u0000\u0620\u0621\u0003B\u0019\u0000\u0621\u0622\u0001"+ - "\u0000\u0000\u0000\u0622\u0623\u0006\u00ce\u000b\u0000\u0623\u01ad\u0001"+ - "\u0000\u0000\u0000\u0624\u0625\u0003D\u001a\u0000\u0625\u0626\u0001\u0000"+ - "\u0000\u0000\u0626\u0627\u0006\u00cf\u000b\u0000\u0627\u01af\u0001\u0000"+ - "\u0000\u0000\u0628\u0629\u0003F\u001b\u0000\u0629\u062a\u0001\u0000\u0000"+ - "\u0000\u062a\u062b\u0006\u00d0\u000b\u0000\u062b\u01b1\u0001\u0000\u0000"+ - "\u0000\u062c\u062d\u0003n/\u0000\u062d\u062e\u0001\u0000\u0000\u0000\u062e"+ - "\u062f\u0006\u00d1\u0012\u0000\u062f\u0630\u0006\u00d1\f\u0000\u0630\u0631"+ - "\u0006\u00d1\t\u0000\u0631\u01b3\u0001\u0000\u0000\u0000\u0632\u0633\u0003"+ - "p0\u0000\u0633\u0634\u0001\u0000\u0000\u0000\u0634\u0635\u0006\u00d2\u0013"+ - "\u0000\u0635\u0636\u0006\u00d2\f\u0000\u0636\u0637\u0006\u00d2\t\u0000"+ - "\u0637\u01b5\u0001\u0000\u0000\u0000\u0638\u0639\u0003B\u0019\u0000\u0639"+ - "\u063a\u0001\u0000\u0000\u0000\u063a\u063b\u0006\u00d3\u000b\u0000\u063b"+ - "\u01b7\u0001\u0000\u0000\u0000\u063c\u063d\u0003D\u001a\u0000\u063d\u063e"+ - "\u0001\u0000\u0000\u0000\u063e\u063f\u0006\u00d4\u000b\u0000\u063f\u01b9"+ - "\u0001\u0000\u0000\u0000\u0640\u0641\u0003F\u001b\u0000\u0641\u0642\u0001"+ - "\u0000\u0000\u0000\u0642\u0643\u0006\u00d5\u000b\u0000\u0643\u01bb\u0001"+ - "\u0000\u0000\u0000\u0644\u0645\u0003\u00bcV\u0000\u0645\u0646\u0001\u0000"+ - "\u0000\u0000\u0646\u0647\u0006\u00d6\f\u0000\u0647\u0648\u0006\u00d6\u0000"+ - "\u0000\u0648\u0649\u0006\u00d6\u001f\u0000\u0649\u01bd\u0001\u0000\u0000"+ - "\u0000\u064a\u064b\u0003\u00b8T\u0000\u064b\u064c\u0001\u0000\u0000\u0000"+ - "\u064c\u064d\u0006\u00d7\f\u0000\u064d\u064e\u0006\u00d7\u0000\u0000\u064e"+ - "\u064f\u0006\u00d7 \u0000\u064f\u01bf\u0001\u0000\u0000\u0000\u0650\u0651"+ - "\u0003d*\u0000\u0651\u0652\u0001\u0000\u0000\u0000\u0652\u0653\u0006\u00d8"+ - "\f\u0000\u0653\u0654\u0006\u00d8\u0000\u0000\u0654\u0655\u0006\u00d8&"+ - "\u0000\u0655\u01c1\u0001\u0000\u0000\u0000\u0656\u0657\u0003H\u001c\u0000"+ - "\u0657\u0658\u0001\u0000\u0000\u0000\u0658\u0659\u0006\u00d9\u0010\u0000"+ - "\u0659\u065a\u0006\u00d9\f\u0000\u065a\u01c3\u0001\u0000\u0000\u0000B"+ - "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e"+ - "\u000f\u0299\u02a3\u02a7\u02aa\u02b3\u02b5\u02c0\u02d3\u02d8\u02e1\u02e8"+ - "\u02ed\u02ef\u02fa\u0302\u0305\u0307\u030c\u0311\u0317\u031e\u0323\u0329"+ - "\u032c\u0334\u0338\u03bc\u03c1\u03c8\u03ca\u03da\u03df\u03e4\u03e6\u03ec"+ - "\u0439\u043e\u046f\u0473\u0478\u047d\u0482\u0484\u0488\u048a\u04e1\u04e5"+ - "\u04ea\u057b\u057d\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006\u0000"+ - "\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005"+ - "\t\u0000\u0005\u000b\u0000\u0005\u000e\u0000\u0005\r\u0000\u0000\u0001"+ - "\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007H\u0000\u0005\u0000\u0000"+ - "\u0007\u001d\u0000\u0007I\u0000\u0007&\u0000\u0007\'\u0000\u0007$\u0000"+ - "\u0007S\u0000\u0007\u001e\u0000\u0007)\u0000\u00075\u0000\u0007G\u0000"+ - "\u0007W\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007a\u0000\u0007`\u0000"+ - "\u0007K\u0000\u0007J\u0000\u0007_\u0000\u0005\f\u0000\u0007\u0014\u0000"+ - "\u0007[\u0000\u0005\u000f\u0000\u0007!\u0000"; + "\u0000\u02f5\u02f7\u0001\u0000\u0000\u0000\u02f6\u02f8\u0005\"\u0000\u0000"+ + "\u02f7\u02f6\u0001\u0000\u0000\u0000\u02f7\u02f8\u0001\u0000\u0000\u0000"+ + "\u02f8\u02fa\u0001\u0000\u0000\u0000\u02f9\u02fb\u0005\"\u0000\u0000\u02fa"+ + "\u02f9\u0001\u0000\u0000\u0000\u02fa\u02fb\u0001\u0000\u0000\u0000\u02fb"+ + "\u02fd\u0001\u0000\u0000\u0000\u02fc\u02df\u0001\u0000\u0000\u0000\u02fc"+ + "\u02e8\u0001\u0000\u0000\u0000\u02fd]\u0001\u0000\u0000\u0000\u02fe\u0300"+ + "\u0003H\u001c\u0000\u02ff\u02fe\u0001\u0000\u0000\u0000\u0300\u0301\u0001"+ + "\u0000\u0000\u0000\u0301\u02ff\u0001\u0000\u0000\u0000\u0301\u0302\u0001"+ + "\u0000\u0000\u0000\u0302_\u0001\u0000\u0000\u0000\u0303\u0305\u0003H\u001c"+ + "\u0000\u0304\u0303\u0001\u0000\u0000\u0000\u0305\u0306\u0001\u0000\u0000"+ + "\u0000\u0306\u0304\u0001\u0000\u0000\u0000\u0306\u0307\u0001\u0000\u0000"+ + "\u0000\u0307\u0308\u0001\u0000\u0000\u0000\u0308\u030c\u0003r1\u0000\u0309"+ + "\u030b\u0003H\u001c\u0000\u030a\u0309\u0001\u0000\u0000\u0000\u030b\u030e"+ + "\u0001\u0000\u0000\u0000\u030c\u030a\u0001\u0000\u0000\u0000\u030c\u030d"+ + "\u0001\u0000\u0000\u0000\u030d\u032e\u0001\u0000\u0000\u0000\u030e\u030c"+ + "\u0001\u0000\u0000\u0000\u030f\u0311\u0003r1\u0000\u0310\u0312\u0003H"+ + "\u001c\u0000\u0311\u0310\u0001\u0000\u0000\u0000\u0312\u0313\u0001\u0000"+ + "\u0000\u0000\u0313\u0311\u0001\u0000\u0000\u0000\u0313\u0314\u0001\u0000"+ + "\u0000\u0000\u0314\u032e\u0001\u0000\u0000\u0000\u0315\u0317\u0003H\u001c"+ + "\u0000\u0316\u0315\u0001\u0000\u0000\u0000\u0317\u0318\u0001\u0000\u0000"+ + "\u0000\u0318\u0316\u0001\u0000\u0000\u0000\u0318\u0319\u0001\u0000\u0000"+ + "\u0000\u0319\u0321\u0001\u0000\u0000\u0000\u031a\u031e\u0003r1\u0000\u031b"+ + "\u031d\u0003H\u001c\u0000\u031c\u031b\u0001\u0000\u0000\u0000\u031d\u0320"+ + "\u0001\u0000\u0000\u0000\u031e\u031c\u0001\u0000\u0000\u0000\u031e\u031f"+ + "\u0001\u0000\u0000\u0000\u031f\u0322\u0001\u0000\u0000\u0000\u0320\u031e"+ + "\u0001\u0000\u0000\u0000\u0321\u031a\u0001\u0000\u0000\u0000\u0321\u0322"+ + "\u0001\u0000\u0000\u0000\u0322\u0323\u0001\u0000\u0000\u0000\u0323\u0324"+ + "\u0003P \u0000\u0324\u032e\u0001\u0000\u0000\u0000\u0325\u0327\u0003r"+ + "1\u0000\u0326\u0328\u0003H\u001c\u0000\u0327\u0326\u0001\u0000\u0000\u0000"+ + "\u0328\u0329\u0001\u0000\u0000\u0000\u0329\u0327\u0001\u0000\u0000\u0000"+ + "\u0329\u032a\u0001\u0000\u0000\u0000\u032a\u032b\u0001\u0000\u0000\u0000"+ + "\u032b\u032c\u0003P \u0000\u032c\u032e\u0001\u0000\u0000\u0000\u032d\u0304"+ + "\u0001\u0000\u0000\u0000\u032d\u030f\u0001\u0000\u0000\u0000\u032d\u0316"+ + "\u0001\u0000\u0000\u0000\u032d\u0325\u0001\u0000\u0000\u0000\u032ea\u0001"+ + "\u0000\u0000\u0000\u032f\u0330\u0007\u001e\u0000\u0000\u0330\u0331\u0007"+ + "\u001f\u0000\u0000\u0331c\u0001\u0000\u0000\u0000\u0332\u0333\u0007\f"+ + "\u0000\u0000\u0333\u0334\u0007\t\u0000\u0000\u0334\u0335\u0007\u0000\u0000"+ + "\u0000\u0335e\u0001\u0000\u0000\u0000\u0336\u0337\u0007\f\u0000\u0000"+ + "\u0337\u0338\u0007\u0002\u0000\u0000\u0338\u0339\u0007\u0004\u0000\u0000"+ + "\u0339g\u0001\u0000\u0000\u0000\u033a\u033b\u0005=\u0000\u0000\u033bi"+ + "\u0001\u0000\u0000\u0000\u033c\u033d\u0005:\u0000\u0000\u033d\u033e\u0005"+ + ":\u0000\u0000\u033ek\u0001\u0000\u0000\u0000\u033f\u0340\u0005:\u0000"+ + "\u0000\u0340m\u0001\u0000\u0000\u0000\u0341\u0342\u0005,\u0000\u0000\u0342"+ + "o\u0001\u0000\u0000\u0000\u0343\u0344\u0007\u0000\u0000\u0000\u0344\u0345"+ + "\u0007\u0003\u0000\u0000\u0345\u0346\u0007\u0002\u0000\u0000\u0346\u0347"+ + "\u0007\u0004\u0000\u0000\u0347q\u0001\u0000\u0000\u0000\u0348\u0349\u0005"+ + ".\u0000\u0000\u0349s\u0001\u0000\u0000\u0000\u034a\u034b\u0007\u000f\u0000"+ + "\u0000\u034b\u034c\u0007\f\u0000\u0000\u034c\u034d\u0007\r\u0000\u0000"+ + "\u034d\u034e\u0007\u0002\u0000\u0000\u034e\u034f\u0007\u0003\u0000\u0000"+ + "\u034fu\u0001\u0000\u0000\u0000\u0350\u0351\u0007\u000f\u0000\u0000\u0351"+ + "\u0352\u0007\u0001\u0000\u0000\u0352\u0353\u0007\u0006\u0000\u0000\u0353"+ + "\u0354\u0007\u0002\u0000\u0000\u0354\u0355\u0007\u0005\u0000\u0000\u0355"+ + "w\u0001\u0000\u0000\u0000\u0356\u0357\u0007\u0001\u0000\u0000\u0357\u0358"+ + "\u0007\t\u0000\u0000\u0358y\u0001\u0000\u0000\u0000\u0359\u035a\u0007"+ + "\u0001\u0000\u0000\u035a\u035b\u0007\u0002\u0000\u0000\u035b{\u0001\u0000"+ + "\u0000\u0000\u035c\u035d\u0007\r\u0000\u0000\u035d\u035e\u0007\f\u0000"+ + "\u0000\u035e\u035f\u0007\u0002\u0000\u0000\u035f\u0360\u0007\u0005\u0000"+ + "\u0000\u0360}\u0001\u0000\u0000\u0000\u0361\u0362\u0007\r\u0000\u0000"+ + "\u0362\u0363\u0007\u0001\u0000\u0000\u0363\u0364\u0007\u0012\u0000\u0000"+ + "\u0364\u0365\u0007\u0003\u0000\u0000\u0365\u007f\u0001\u0000\u0000\u0000"+ + "\u0366\u0367\u0005(\u0000\u0000\u0367\u0081\u0001\u0000\u0000\u0000\u0368"+ + "\u0369\u0007\t\u0000\u0000\u0369\u036a\u0007\u0007\u0000\u0000\u036a\u036b"+ + "\u0007\u0005\u0000\u0000\u036b\u0083\u0001\u0000\u0000\u0000\u036c\u036d"+ + "\u0007\t\u0000\u0000\u036d\u036e\u0007\u0014\u0000\u0000\u036e\u036f\u0007"+ + "\r\u0000\u0000\u036f\u0370\u0007\r\u0000\u0000\u0370\u0085\u0001\u0000"+ + "\u0000\u0000\u0371\u0372\u0007\t\u0000\u0000\u0372\u0373\u0007\u0014\u0000"+ + "\u0000\u0373\u0374\u0007\r\u0000\u0000\u0374\u0375\u0007\r\u0000\u0000"+ + "\u0375\u0376\u0007\u0002\u0000\u0000\u0376\u0087\u0001\u0000\u0000\u0000"+ + "\u0377\u0378\u0007\u0007\u0000\u0000\u0378\u0379\u0007\u0006\u0000\u0000"+ + "\u0379\u0089\u0001\u0000\u0000\u0000\u037a\u037b\u0005?\u0000\u0000\u037b"+ + "\u008b\u0001\u0000\u0000\u0000\u037c\u037d\u0007\u0006\u0000\u0000\u037d"+ + "\u037e\u0007\r\u0000\u0000\u037e\u037f\u0007\u0001\u0000\u0000\u037f\u0380"+ + "\u0007\u0012\u0000\u0000\u0380\u0381\u0007\u0003\u0000\u0000\u0381\u008d"+ + "\u0001\u0000\u0000\u0000\u0382\u0383\u0005)\u0000\u0000\u0383\u008f\u0001"+ + "\u0000\u0000\u0000\u0384\u0385\u0007\u0005\u0000\u0000\u0385\u0386\u0007"+ + "\u0006\u0000\u0000\u0386\u0387\u0007\u0014\u0000\u0000\u0387\u0388\u0007"+ + "\u0003\u0000\u0000\u0388\u0091\u0001\u0000\u0000\u0000\u0389\u038a\u0005"+ + "=\u0000\u0000\u038a\u038b\u0005=\u0000\u0000\u038b\u0093\u0001\u0000\u0000"+ + "\u0000\u038c\u038d\u0005=\u0000\u0000\u038d\u038e\u0005~\u0000\u0000\u038e"+ + "\u0095\u0001\u0000\u0000\u0000\u038f\u0390\u0005!\u0000\u0000\u0390\u0391"+ + "\u0005=\u0000\u0000\u0391\u0097\u0001\u0000\u0000\u0000\u0392\u0393\u0005"+ + "<\u0000\u0000\u0393\u0099\u0001\u0000\u0000\u0000\u0394\u0395\u0005<\u0000"+ + "\u0000\u0395\u0396\u0005=\u0000\u0000\u0396\u009b\u0001\u0000\u0000\u0000"+ + "\u0397\u0398\u0005>\u0000\u0000\u0398\u009d\u0001\u0000\u0000\u0000\u0399"+ + "\u039a\u0005>\u0000\u0000\u039a\u039b\u0005=\u0000\u0000\u039b\u009f\u0001"+ + "\u0000\u0000\u0000\u039c\u039d\u0005+\u0000\u0000\u039d\u00a1\u0001\u0000"+ + "\u0000\u0000\u039e\u039f\u0005-\u0000\u0000\u039f\u00a3\u0001\u0000\u0000"+ + "\u0000\u03a0\u03a1\u0005*\u0000\u0000\u03a1\u00a5\u0001\u0000\u0000\u0000"+ + "\u03a2\u03a3\u0005/\u0000\u0000\u03a3\u00a7\u0001\u0000\u0000\u0000\u03a4"+ + "\u03a5\u0005%\u0000\u0000\u03a5\u00a9\u0001\u0000\u0000\u0000\u03a6\u03a7"+ + "\u0005{\u0000\u0000\u03a7\u00ab\u0001\u0000\u0000\u0000\u03a8\u03a9\u0005"+ + "}\u0000\u0000\u03a9\u00ad\u0001\u0000\u0000\u0000\u03aa\u03ab\u0003.\u000f"+ + "\u0000\u03ab\u03ac\u0001\u0000\u0000\u0000\u03ac\u03ad\u0006O\r\u0000"+ + "\u03ad\u00af\u0001\u0000\u0000\u0000\u03ae\u03b1\u0003\u008a=\u0000\u03af"+ + "\u03b2\u0003J\u001d\u0000\u03b0\u03b2\u0003X$\u0000\u03b1\u03af\u0001"+ + "\u0000\u0000\u0000\u03b1\u03b0\u0001\u0000\u0000\u0000\u03b2\u03b6\u0001"+ + "\u0000\u0000\u0000\u03b3\u03b5\u0003Z%\u0000\u03b4\u03b3\u0001\u0000\u0000"+ + "\u0000\u03b5\u03b8\u0001\u0000\u0000\u0000\u03b6\u03b4\u0001\u0000\u0000"+ + "\u0000\u03b6\u03b7\u0001\u0000\u0000\u0000\u03b7\u03c0\u0001\u0000\u0000"+ + "\u0000\u03b8\u03b6\u0001\u0000\u0000\u0000\u03b9\u03bb\u0003\u008a=\u0000"+ + "\u03ba\u03bc\u0003H\u001c\u0000\u03bb\u03ba\u0001\u0000\u0000\u0000\u03bc"+ + "\u03bd\u0001\u0000\u0000\u0000\u03bd\u03bb\u0001\u0000\u0000\u0000\u03bd"+ + "\u03be\u0001\u0000\u0000\u0000\u03be\u03c0\u0001\u0000\u0000\u0000\u03bf"+ + "\u03ae\u0001\u0000\u0000\u0000\u03bf\u03b9\u0001\u0000\u0000\u0000\u03c0"+ + "\u00b1\u0001\u0000\u0000\u0000\u03c1\u03c2\u0005[\u0000\u0000\u03c2\u03c3"+ + "\u0001\u0000\u0000\u0000\u03c3\u03c4\u0006Q\u0000\u0000\u03c4\u03c5\u0006"+ + "Q\u0000\u0000\u03c5\u00b3\u0001\u0000\u0000\u0000\u03c6\u03c7\u0005]\u0000"+ + "\u0000\u03c7\u03c8\u0001\u0000\u0000\u0000\u03c8\u03c9\u0006R\f\u0000"+ + "\u03c9\u03ca\u0006R\f\u0000\u03ca\u00b5\u0001\u0000\u0000\u0000\u03cb"+ + "\u03cf\u0003J\u001d\u0000\u03cc\u03ce\u0003Z%\u0000\u03cd\u03cc\u0001"+ + "\u0000\u0000\u0000\u03ce\u03d1\u0001\u0000\u0000\u0000\u03cf\u03cd\u0001"+ + "\u0000\u0000\u0000\u03cf\u03d0\u0001\u0000\u0000\u0000\u03d0\u03dc\u0001"+ + "\u0000\u0000\u0000\u03d1\u03cf\u0001\u0000\u0000\u0000\u03d2\u03d5\u0003"+ + "X$\u0000\u03d3\u03d5\u0003R!\u0000\u03d4\u03d2\u0001\u0000\u0000\u0000"+ + "\u03d4\u03d3\u0001\u0000\u0000\u0000\u03d5\u03d7\u0001\u0000\u0000\u0000"+ + "\u03d6\u03d8\u0003Z%\u0000\u03d7\u03d6\u0001\u0000\u0000\u0000\u03d8\u03d9"+ + "\u0001\u0000\u0000\u0000\u03d9\u03d7\u0001\u0000\u0000\u0000\u03d9\u03da"+ + "\u0001\u0000\u0000\u0000\u03da\u03dc\u0001\u0000\u0000\u0000\u03db\u03cb"+ + "\u0001\u0000\u0000\u0000\u03db\u03d4\u0001\u0000\u0000\u0000\u03dc\u00b7"+ + "\u0001\u0000\u0000\u0000\u03dd\u03df\u0003T\"\u0000\u03de\u03e0\u0003"+ + "V#\u0000\u03df\u03de\u0001\u0000\u0000\u0000\u03e0\u03e1\u0001\u0000\u0000"+ + "\u0000\u03e1\u03df\u0001\u0000\u0000\u0000\u03e1\u03e2\u0001\u0000\u0000"+ + "\u0000\u03e2\u03e3\u0001\u0000\u0000\u0000\u03e3\u03e4\u0003T\"\u0000"+ + "\u03e4\u00b9\u0001\u0000\u0000\u0000\u03e5\u03e6\u0003\u00b8T\u0000\u03e6"+ + "\u00bb\u0001\u0000\u0000\u0000\u03e7\u03e8\u0003@\u0018\u0000\u03e8\u03e9"+ + "\u0001\u0000\u0000\u0000\u03e9\u03ea\u0006V\u000b\u0000\u03ea\u00bd\u0001"+ + "\u0000\u0000\u0000\u03eb\u03ec\u0003B\u0019\u0000\u03ec\u03ed\u0001\u0000"+ + "\u0000\u0000\u03ed\u03ee\u0006W\u000b\u0000\u03ee\u00bf\u0001\u0000\u0000"+ + "\u0000\u03ef\u03f0\u0003D\u001a\u0000\u03f0\u03f1\u0001\u0000\u0000\u0000"+ + "\u03f1\u03f2\u0006X\u000b\u0000\u03f2\u00c1\u0001\u0000\u0000\u0000\u03f3"+ + "\u03f4\u0003\u00b2Q\u0000\u03f4\u03f5\u0001\u0000\u0000\u0000\u03f5\u03f6"+ + "\u0006Y\u000e\u0000\u03f6\u03f7\u0006Y\u000f\u0000\u03f7\u00c3\u0001\u0000"+ + "\u0000\u0000\u03f8\u03f9\u0003F\u001b\u0000\u03f9\u03fa\u0001\u0000\u0000"+ + "\u0000\u03fa\u03fb\u0006Z\u0010\u0000\u03fb\u03fc\u0006Z\f\u0000\u03fc"+ + "\u00c5\u0001\u0000\u0000\u0000\u03fd\u03fe\u0003D\u001a\u0000\u03fe\u03ff"+ + "\u0001\u0000\u0000\u0000\u03ff\u0400\u0006[\u000b\u0000\u0400\u00c7\u0001"+ + "\u0000\u0000\u0000\u0401\u0402\u0003@\u0018\u0000\u0402\u0403\u0001\u0000"+ + "\u0000\u0000\u0403\u0404\u0006\\\u000b\u0000\u0404\u00c9\u0001\u0000\u0000"+ + "\u0000\u0405\u0406\u0003B\u0019\u0000\u0406\u0407\u0001\u0000\u0000\u0000"+ + "\u0407\u0408\u0006]\u000b\u0000\u0408\u00cb\u0001\u0000\u0000\u0000\u0409"+ + "\u040a\u0003F\u001b\u0000\u040a\u040b\u0001\u0000\u0000\u0000\u040b\u040c"+ + "\u0006^\u0010\u0000\u040c\u040d\u0006^\f\u0000\u040d\u00cd\u0001\u0000"+ + "\u0000\u0000\u040e\u040f\u0003\u00b2Q\u0000\u040f\u0410\u0001\u0000\u0000"+ + "\u0000\u0410\u0411\u0006_\u000e\u0000\u0411\u00cf\u0001\u0000\u0000\u0000"+ + "\u0412\u0413\u0003\u00b4R\u0000\u0413\u0414\u0001\u0000\u0000\u0000\u0414"+ + "\u0415\u0006`\u0011\u0000\u0415\u00d1\u0001\u0000\u0000\u0000\u0416\u0417"+ + "\u0003l.\u0000\u0417\u0418\u0001\u0000\u0000\u0000\u0418\u0419\u0006a"+ + "\u0012\u0000\u0419\u00d3\u0001\u0000\u0000\u0000\u041a\u041b\u0003n/\u0000"+ + "\u041b\u041c\u0001\u0000\u0000\u0000\u041c\u041d\u0006b\u0013\u0000\u041d"+ + "\u00d5\u0001\u0000\u0000\u0000\u041e\u041f\u0003h,\u0000\u041f\u0420\u0001"+ + "\u0000\u0000\u0000\u0420\u0421\u0006c\u0014\u0000\u0421\u00d7\u0001\u0000"+ + "\u0000\u0000\u0422\u0423\u0007\u0010\u0000\u0000\u0423\u0424\u0007\u0003"+ + "\u0000\u0000\u0424\u0425\u0007\u0005\u0000\u0000\u0425\u0426\u0007\f\u0000"+ + "\u0000\u0426\u0427\u0007\u0000\u0000\u0000\u0427\u0428\u0007\f\u0000\u0000"+ + "\u0428\u0429\u0007\u0005\u0000\u0000\u0429\u042a\u0007\f\u0000\u0000\u042a"+ + "\u00d9\u0001\u0000\u0000\u0000\u042b\u042f\b \u0000\u0000\u042c\u042d"+ + "\u0005/\u0000\u0000\u042d\u042f\b!\u0000\u0000\u042e\u042b\u0001\u0000"+ + "\u0000\u0000\u042e\u042c\u0001\u0000\u0000\u0000\u042f\u00db\u0001\u0000"+ + "\u0000\u0000\u0430\u0432\u0003\u00dae\u0000\u0431\u0430\u0001\u0000\u0000"+ + "\u0000\u0432\u0433\u0001\u0000\u0000\u0000\u0433\u0431\u0001\u0000\u0000"+ + "\u0000\u0433\u0434\u0001\u0000\u0000\u0000\u0434\u00dd\u0001\u0000\u0000"+ + "\u0000\u0435\u0436\u0003\u00dcf\u0000\u0436\u0437\u0001\u0000\u0000\u0000"+ + "\u0437\u0438\u0006g\u0015\u0000\u0438\u00df\u0001\u0000\u0000\u0000\u0439"+ + "\u043a\u0003\\&\u0000\u043a\u043b\u0001\u0000\u0000\u0000\u043b\u043c"+ + "\u0006h\u0016\u0000\u043c\u00e1\u0001\u0000\u0000\u0000\u043d\u043e\u0003"+ + "@\u0018\u0000\u043e\u043f\u0001\u0000\u0000\u0000\u043f\u0440\u0006i\u000b"+ + "\u0000\u0440\u00e3\u0001\u0000\u0000\u0000\u0441\u0442\u0003B\u0019\u0000"+ + "\u0442\u0443\u0001\u0000\u0000\u0000\u0443\u0444\u0006j\u000b\u0000\u0444"+ + "\u00e5\u0001\u0000\u0000\u0000\u0445\u0446\u0003D\u001a\u0000\u0446\u0447"+ + "\u0001\u0000\u0000\u0000\u0447\u0448\u0006k\u000b\u0000\u0448\u00e7\u0001"+ + "\u0000\u0000\u0000\u0449\u044a\u0003F\u001b\u0000\u044a\u044b\u0001\u0000"+ + "\u0000\u0000\u044b\u044c\u0006l\u0010\u0000\u044c\u044d\u0006l\f\u0000"+ + "\u044d\u00e9\u0001\u0000\u0000\u0000\u044e\u044f\u0003r1\u0000\u044f\u0450"+ + "\u0001\u0000\u0000\u0000\u0450\u0451\u0006m\u0017\u0000\u0451\u00eb\u0001"+ + "\u0000\u0000\u0000\u0452\u0453\u0003n/\u0000\u0453\u0454\u0001\u0000\u0000"+ + "\u0000\u0454\u0455\u0006n\u0013\u0000\u0455\u00ed\u0001\u0000\u0000\u0000"+ + "\u0456\u0457\u0004o\u0006\u0000\u0457\u0458\u0003\u008a=\u0000\u0458\u0459"+ + "\u0001\u0000\u0000\u0000\u0459\u045a\u0006o\u0018\u0000\u045a\u00ef\u0001"+ + "\u0000\u0000\u0000\u045b\u045c\u0004p\u0007\u0000\u045c\u045d\u0003\u00b0"+ + "P\u0000\u045d\u045e\u0001\u0000\u0000\u0000\u045e\u045f\u0006p\u0019\u0000"+ + "\u045f\u00f1\u0001\u0000\u0000\u0000\u0460\u0465\u0003J\u001d\u0000\u0461"+ + "\u0465\u0003H\u001c\u0000\u0462\u0465\u0003X$\u0000\u0463\u0465\u0003"+ + "\u00a4J\u0000\u0464\u0460\u0001\u0000\u0000\u0000\u0464\u0461\u0001\u0000"+ + "\u0000\u0000\u0464\u0462\u0001\u0000\u0000\u0000\u0464\u0463\u0001\u0000"+ + "\u0000\u0000\u0465\u00f3\u0001\u0000\u0000\u0000\u0466\u0469\u0003J\u001d"+ + "\u0000\u0467\u0469\u0003\u00a4J\u0000\u0468\u0466\u0001\u0000\u0000\u0000"+ + "\u0468\u0467\u0001\u0000\u0000\u0000\u0469\u046d\u0001\u0000\u0000\u0000"+ + "\u046a\u046c\u0003\u00f2q\u0000\u046b\u046a\u0001\u0000\u0000\u0000\u046c"+ + "\u046f\u0001\u0000\u0000\u0000\u046d\u046b\u0001\u0000\u0000\u0000\u046d"+ + "\u046e\u0001\u0000\u0000\u0000\u046e\u047a\u0001\u0000\u0000\u0000\u046f"+ + "\u046d\u0001\u0000\u0000\u0000\u0470\u0473\u0003X$\u0000\u0471\u0473\u0003"+ + "R!\u0000\u0472\u0470\u0001\u0000\u0000\u0000\u0472\u0471\u0001\u0000\u0000"+ + "\u0000\u0473\u0475\u0001\u0000\u0000\u0000\u0474\u0476\u0003\u00f2q\u0000"+ + "\u0475\u0474\u0001\u0000\u0000\u0000\u0476\u0477\u0001\u0000\u0000\u0000"+ + "\u0477\u0475\u0001\u0000\u0000\u0000\u0477\u0478\u0001\u0000\u0000\u0000"+ + "\u0478\u047a\u0001\u0000\u0000\u0000\u0479\u0468\u0001\u0000\u0000\u0000"+ + "\u0479\u0472\u0001\u0000\u0000\u0000\u047a\u00f5\u0001\u0000\u0000\u0000"+ + "\u047b\u047e\u0003\u00f4r\u0000\u047c\u047e\u0003\u00b8T\u0000\u047d\u047b"+ + "\u0001\u0000\u0000\u0000\u047d\u047c\u0001\u0000\u0000\u0000\u047e\u047f"+ + "\u0001\u0000\u0000\u0000\u047f\u047d\u0001\u0000\u0000\u0000\u047f\u0480"+ + "\u0001\u0000\u0000\u0000\u0480\u00f7\u0001\u0000\u0000\u0000\u0481\u0482"+ + "\u0003@\u0018\u0000\u0482\u0483\u0001\u0000\u0000\u0000\u0483\u0484\u0006"+ + "t\u000b\u0000\u0484\u00f9\u0001\u0000\u0000\u0000\u0485\u0486\u0003B\u0019"+ + "\u0000\u0486\u0487\u0001\u0000\u0000\u0000\u0487\u0488\u0006u\u000b\u0000"+ + "\u0488\u00fb\u0001\u0000\u0000\u0000\u0489\u048a\u0003D\u001a\u0000\u048a"+ + "\u048b\u0001\u0000\u0000\u0000\u048b\u048c\u0006v\u000b\u0000\u048c\u00fd"+ + "\u0001\u0000\u0000\u0000\u048d\u048e\u0003F\u001b\u0000\u048e\u048f\u0001"+ + "\u0000\u0000\u0000\u048f\u0490\u0006w\u0010\u0000\u0490\u0491\u0006w\f"+ + "\u0000\u0491\u00ff\u0001\u0000\u0000\u0000\u0492\u0493\u0003h,\u0000\u0493"+ + "\u0494\u0001\u0000\u0000\u0000\u0494\u0495\u0006x\u0014\u0000\u0495\u0101"+ + "\u0001\u0000\u0000\u0000\u0496\u0497\u0003n/\u0000\u0497\u0498\u0001\u0000"+ + "\u0000\u0000\u0498\u0499\u0006y\u0013\u0000\u0499\u0103\u0001\u0000\u0000"+ + "\u0000\u049a\u049b\u0003r1\u0000\u049b\u049c\u0001\u0000\u0000\u0000\u049c"+ + "\u049d\u0006z\u0017\u0000\u049d\u0105\u0001\u0000\u0000\u0000\u049e\u049f"+ + "\u0004{\b\u0000\u049f\u04a0\u0003\u008a=\u0000\u04a0\u04a1\u0001\u0000"+ + "\u0000\u0000\u04a1\u04a2\u0006{\u0018\u0000\u04a2\u0107\u0001\u0000\u0000"+ + "\u0000\u04a3\u04a4\u0004|\t\u0000\u04a4\u04a5\u0003\u00b0P\u0000\u04a5"+ + "\u04a6\u0001\u0000\u0000\u0000\u04a6\u04a7\u0006|\u0019\u0000\u04a7\u0109"+ + "\u0001\u0000\u0000\u0000\u04a8\u04a9\u0007\f\u0000\u0000\u04a9\u04aa\u0007"+ + "\u0002\u0000\u0000\u04aa\u010b\u0001\u0000\u0000\u0000\u04ab\u04ac\u0003"+ + "\u00f6s\u0000\u04ac\u04ad\u0001\u0000\u0000\u0000\u04ad\u04ae\u0006~\u001a"+ + "\u0000\u04ae\u010d\u0001\u0000\u0000\u0000\u04af\u04b0\u0003@\u0018\u0000"+ + "\u04b0\u04b1\u0001\u0000\u0000\u0000\u04b1\u04b2\u0006\u007f\u000b\u0000"+ + "\u04b2\u010f\u0001\u0000\u0000\u0000\u04b3\u04b4\u0003B\u0019\u0000\u04b4"+ + "\u04b5\u0001\u0000\u0000\u0000\u04b5\u04b6\u0006\u0080\u000b\u0000\u04b6"+ + "\u0111\u0001\u0000\u0000\u0000\u04b7\u04b8\u0003D\u001a\u0000\u04b8\u04b9"+ + "\u0001\u0000\u0000\u0000\u04b9\u04ba\u0006\u0081\u000b\u0000\u04ba\u0113"+ + "\u0001\u0000\u0000\u0000\u04bb\u04bc\u0003F\u001b\u0000\u04bc\u04bd\u0001"+ + "\u0000\u0000\u0000\u04bd\u04be\u0006\u0082\u0010\u0000\u04be\u04bf\u0006"+ + "\u0082\f\u0000\u04bf\u0115\u0001\u0000\u0000\u0000\u04c0\u04c1\u0003\u00b2"+ + "Q\u0000\u04c1\u04c2\u0001\u0000\u0000\u0000\u04c2\u04c3\u0006\u0083\u000e"+ + "\u0000\u04c3\u04c4\u0006\u0083\u001b\u0000\u04c4\u0117\u0001\u0000\u0000"+ + "\u0000\u04c5\u04c6\u0007\u0007\u0000\u0000\u04c6\u04c7\u0007\t\u0000\u0000"+ + "\u04c7\u04c8\u0001\u0000\u0000\u0000\u04c8\u04c9\u0006\u0084\u001c\u0000"+ + "\u04c9\u0119\u0001\u0000\u0000\u0000\u04ca\u04cb\u0007\u0013\u0000\u0000"+ + "\u04cb\u04cc\u0007\u0001\u0000\u0000\u04cc\u04cd\u0007\u0005\u0000\u0000"+ + "\u04cd\u04ce\u0007\n\u0000\u0000\u04ce\u04cf\u0001\u0000\u0000\u0000\u04cf"+ + "\u04d0\u0006\u0085\u001c\u0000\u04d0\u011b\u0001\u0000\u0000\u0000\u04d1"+ + "\u04d2\b\"\u0000\u0000\u04d2\u011d\u0001\u0000\u0000\u0000\u04d3\u04d5"+ + "\u0003\u011c\u0086\u0000\u04d4\u04d3\u0001\u0000\u0000\u0000\u04d5\u04d6"+ + "\u0001\u0000\u0000\u0000\u04d6\u04d4\u0001\u0000\u0000\u0000\u04d6\u04d7"+ + "\u0001\u0000\u0000\u0000\u04d7\u04d8\u0001\u0000\u0000\u0000\u04d8\u04d9"+ + "\u0003l.\u0000\u04d9\u04db\u0001\u0000\u0000\u0000\u04da\u04d4\u0001\u0000"+ + "\u0000\u0000\u04da\u04db\u0001\u0000\u0000\u0000\u04db\u04dd\u0001\u0000"+ + "\u0000\u0000\u04dc\u04de\u0003\u011c\u0086\u0000\u04dd\u04dc\u0001\u0000"+ + "\u0000\u0000\u04de\u04df\u0001\u0000\u0000\u0000\u04df\u04dd\u0001\u0000"+ + "\u0000\u0000\u04df\u04e0\u0001\u0000\u0000\u0000\u04e0\u011f\u0001\u0000"+ + "\u0000\u0000\u04e1\u04e2\u0003\u011e\u0087\u0000\u04e2\u04e3\u0001\u0000"+ + "\u0000\u0000\u04e3\u04e4\u0006\u0088\u001d\u0000\u04e4\u0121\u0001\u0000"+ + "\u0000\u0000\u04e5\u04e6\u0003@\u0018\u0000\u04e6\u04e7\u0001\u0000\u0000"+ + "\u0000\u04e7\u04e8\u0006\u0089\u000b\u0000\u04e8\u0123\u0001\u0000\u0000"+ + "\u0000\u04e9\u04ea\u0003B\u0019\u0000\u04ea\u04eb\u0001\u0000\u0000\u0000"+ + "\u04eb\u04ec\u0006\u008a\u000b\u0000\u04ec\u0125\u0001\u0000\u0000\u0000"+ + "\u04ed\u04ee\u0003D\u001a\u0000\u04ee\u04ef\u0001\u0000\u0000\u0000\u04ef"+ + "\u04f0\u0006\u008b\u000b\u0000\u04f0\u0127\u0001\u0000\u0000\u0000\u04f1"+ + "\u04f2\u0003F\u001b\u0000\u04f2\u04f3\u0001\u0000\u0000\u0000\u04f3\u04f4"+ + "\u0006\u008c\u0010\u0000\u04f4\u04f5\u0006\u008c\f\u0000\u04f5\u04f6\u0006"+ + "\u008c\f\u0000\u04f6\u0129\u0001\u0000\u0000\u0000\u04f7\u04f8\u0003h"+ + ",\u0000\u04f8\u04f9\u0001\u0000\u0000\u0000\u04f9\u04fa\u0006\u008d\u0014"+ + "\u0000\u04fa\u012b\u0001\u0000\u0000\u0000\u04fb\u04fc\u0003n/\u0000\u04fc"+ + "\u04fd\u0001\u0000\u0000\u0000\u04fd\u04fe\u0006\u008e\u0013\u0000\u04fe"+ + "\u012d\u0001\u0000\u0000\u0000\u04ff\u0500\u0003r1\u0000\u0500\u0501\u0001"+ + "\u0000\u0000\u0000\u0501\u0502\u0006\u008f\u0017\u0000\u0502\u012f\u0001"+ + "\u0000\u0000\u0000\u0503\u0504\u0003\u011a\u0085\u0000\u0504\u0505\u0001"+ + "\u0000\u0000\u0000\u0505\u0506\u0006\u0090\u001e\u0000\u0506\u0131\u0001"+ + "\u0000\u0000\u0000\u0507\u0508\u0003\u00f6s\u0000\u0508\u0509\u0001\u0000"+ + "\u0000\u0000\u0509\u050a\u0006\u0091\u001a\u0000\u050a\u0133\u0001\u0000"+ + "\u0000\u0000\u050b\u050c\u0003\u00baU\u0000\u050c\u050d\u0001\u0000\u0000"+ + "\u0000\u050d\u050e\u0006\u0092\u001f\u0000\u050e\u0135\u0001\u0000\u0000"+ + "\u0000\u050f\u0510\u0004\u0093\n\u0000\u0510\u0511\u0003\u008a=\u0000"+ + "\u0511\u0512\u0001\u0000\u0000\u0000\u0512\u0513\u0006\u0093\u0018\u0000"+ + "\u0513\u0137\u0001\u0000\u0000\u0000\u0514\u0515\u0004\u0094\u000b\u0000"+ + "\u0515\u0516\u0003\u00b0P\u0000\u0516\u0517\u0001\u0000\u0000\u0000\u0517"+ + "\u0518\u0006\u0094\u0019\u0000\u0518\u0139\u0001\u0000\u0000\u0000\u0519"+ + "\u051a\u0003@\u0018\u0000\u051a\u051b\u0001\u0000\u0000\u0000\u051b\u051c"+ + "\u0006\u0095\u000b\u0000\u051c\u013b\u0001\u0000\u0000\u0000\u051d\u051e"+ + "\u0003B\u0019\u0000\u051e\u051f\u0001\u0000\u0000\u0000\u051f\u0520\u0006"+ + "\u0096\u000b\u0000\u0520\u013d\u0001\u0000\u0000\u0000\u0521\u0522\u0003"+ + "D\u001a\u0000\u0522\u0523\u0001\u0000\u0000\u0000\u0523\u0524\u0006\u0097"+ + "\u000b\u0000\u0524\u013f\u0001\u0000\u0000\u0000\u0525\u0526\u0003F\u001b"+ + "\u0000\u0526\u0527\u0001\u0000\u0000\u0000\u0527\u0528\u0006\u0098\u0010"+ + "\u0000\u0528\u0529\u0006\u0098\f\u0000\u0529\u0141\u0001\u0000\u0000\u0000"+ + "\u052a\u052b\u0003r1\u0000\u052b\u052c\u0001\u0000\u0000\u0000\u052c\u052d"+ + "\u0006\u0099\u0017\u0000\u052d\u0143\u0001\u0000\u0000\u0000\u052e\u052f"+ + "\u0004\u009a\f\u0000\u052f\u0530\u0003\u008a=\u0000\u0530\u0531\u0001"+ + "\u0000\u0000\u0000\u0531\u0532\u0006\u009a\u0018\u0000\u0532\u0145\u0001"+ + "\u0000\u0000\u0000\u0533\u0534\u0004\u009b\r\u0000\u0534\u0535\u0003\u00b0"+ + "P\u0000\u0535\u0536\u0001\u0000\u0000\u0000\u0536\u0537\u0006\u009b\u0019"+ + "\u0000\u0537\u0147\u0001\u0000\u0000\u0000\u0538\u0539\u0003\u00baU\u0000"+ + "\u0539\u053a\u0001\u0000\u0000\u0000\u053a\u053b\u0006\u009c\u001f\u0000"+ + "\u053b\u0149\u0001\u0000\u0000\u0000\u053c\u053d\u0003\u00b6S\u0000\u053d"+ + "\u053e\u0001\u0000\u0000\u0000\u053e\u053f\u0006\u009d \u0000\u053f\u014b"+ + "\u0001\u0000\u0000\u0000\u0540\u0541\u0003@\u0018\u0000\u0541\u0542\u0001"+ + "\u0000\u0000\u0000\u0542\u0543\u0006\u009e\u000b\u0000\u0543\u014d\u0001"+ + "\u0000\u0000\u0000\u0544\u0545\u0003B\u0019\u0000\u0545\u0546\u0001\u0000"+ + "\u0000\u0000\u0546\u0547\u0006\u009f\u000b\u0000\u0547\u014f\u0001\u0000"+ + "\u0000\u0000\u0548\u0549\u0003D\u001a\u0000\u0549\u054a\u0001\u0000\u0000"+ + "\u0000\u054a\u054b\u0006\u00a0\u000b\u0000\u054b\u0151\u0001\u0000\u0000"+ + "\u0000\u054c\u054d\u0003F\u001b\u0000\u054d\u054e\u0001\u0000\u0000\u0000"+ + "\u054e\u054f\u0006\u00a1\u0010\u0000\u054f\u0550\u0006\u00a1\f\u0000\u0550"+ + "\u0153\u0001\u0000\u0000\u0000\u0551\u0552\u0007\u0001\u0000\u0000\u0552"+ + "\u0553\u0007\t\u0000\u0000\u0553\u0554\u0007\u000f\u0000\u0000\u0554\u0555"+ + "\u0007\u0007\u0000\u0000\u0555\u0155\u0001\u0000\u0000\u0000\u0556\u0557"+ + "\u0003@\u0018\u0000\u0557\u0558\u0001\u0000\u0000\u0000\u0558\u0559\u0006"+ + "\u00a3\u000b\u0000\u0559\u0157\u0001\u0000\u0000\u0000\u055a\u055b\u0003"+ + "B\u0019\u0000\u055b\u055c\u0001\u0000\u0000\u0000\u055c\u055d\u0006\u00a4"+ + "\u000b\u0000\u055d\u0159\u0001\u0000\u0000\u0000\u055e\u055f\u0003D\u001a"+ + "\u0000\u055f\u0560\u0001\u0000\u0000\u0000\u0560\u0561\u0006\u00a5\u000b"+ + "\u0000\u0561\u015b\u0001\u0000\u0000\u0000\u0562\u0563\u0003\u00b4R\u0000"+ + "\u0563\u0564\u0001\u0000\u0000\u0000\u0564\u0565\u0006\u00a6\u0011\u0000"+ + "\u0565\u0566\u0006\u00a6\f\u0000\u0566\u015d\u0001\u0000\u0000\u0000\u0567"+ + "\u0568\u0003l.\u0000\u0568\u0569\u0001\u0000\u0000\u0000\u0569\u056a\u0006"+ + "\u00a7\u0012\u0000\u056a\u015f\u0001\u0000\u0000\u0000\u056b\u0571\u0003"+ + "R!\u0000\u056c\u0571\u0003H\u001c\u0000\u056d\u0571\u0003r1\u0000\u056e"+ + "\u0571\u0003J\u001d\u0000\u056f\u0571\u0003X$\u0000\u0570\u056b\u0001"+ + "\u0000\u0000\u0000\u0570\u056c\u0001\u0000\u0000\u0000\u0570\u056d\u0001"+ + "\u0000\u0000\u0000\u0570\u056e\u0001\u0000\u0000\u0000\u0570\u056f\u0001"+ + "\u0000\u0000\u0000\u0571\u0572\u0001\u0000\u0000\u0000\u0572\u0570\u0001"+ + "\u0000\u0000\u0000\u0572\u0573\u0001\u0000\u0000\u0000\u0573\u0161\u0001"+ + "\u0000\u0000\u0000\u0574\u0575\u0003@\u0018\u0000\u0575\u0576\u0001\u0000"+ + "\u0000\u0000\u0576\u0577\u0006\u00a9\u000b\u0000\u0577\u0163\u0001\u0000"+ + "\u0000\u0000\u0578\u0579\u0003B\u0019\u0000\u0579\u057a\u0001\u0000\u0000"+ + "\u0000\u057a\u057b\u0006\u00aa\u000b\u0000\u057b\u0165\u0001\u0000\u0000"+ + "\u0000\u057c\u057d\u0003D\u001a\u0000\u057d\u057e\u0001\u0000\u0000\u0000"+ + "\u057e\u057f\u0006\u00ab\u000b\u0000\u057f\u0167\u0001\u0000\u0000\u0000"+ + "\u0580\u0581\u0003F\u001b\u0000\u0581\u0582\u0001\u0000\u0000\u0000\u0582"+ + "\u0583\u0006\u00ac\u0010\u0000\u0583\u0584\u0006\u00ac\f\u0000\u0584\u0169"+ + "\u0001\u0000\u0000\u0000\u0585\u0586\u0003l.\u0000\u0586\u0587\u0001\u0000"+ + "\u0000\u0000\u0587\u0588\u0006\u00ad\u0012\u0000\u0588\u016b\u0001\u0000"+ + "\u0000\u0000\u0589\u058a\u0003n/\u0000\u058a\u058b\u0001\u0000\u0000\u0000"+ + "\u058b\u058c\u0006\u00ae\u0013\u0000\u058c\u016d\u0001\u0000\u0000\u0000"+ + "\u058d\u058e\u0003r1\u0000\u058e\u058f\u0001\u0000\u0000\u0000\u058f\u0590"+ + "\u0006\u00af\u0017\u0000\u0590\u016f\u0001\u0000\u0000\u0000\u0591\u0592"+ + "\u0003\u0118\u0084\u0000\u0592\u0593\u0001\u0000\u0000\u0000\u0593\u0594"+ + "\u0006\u00b0!\u0000\u0594\u0595\u0006\u00b0\"\u0000\u0595\u0171\u0001"+ + "\u0000\u0000\u0000\u0596\u0597\u0003\u00dcf\u0000\u0597\u0598\u0001\u0000"+ + "\u0000\u0000\u0598\u0599\u0006\u00b1\u0015\u0000\u0599\u0173\u0001\u0000"+ + "\u0000\u0000\u059a\u059b\u0003\\&\u0000\u059b\u059c\u0001\u0000\u0000"+ + "\u0000\u059c\u059d\u0006\u00b2\u0016\u0000\u059d\u0175\u0001\u0000\u0000"+ + "\u0000\u059e\u059f\u0003@\u0018\u0000\u059f\u05a0\u0001\u0000\u0000\u0000"+ + "\u05a0\u05a1\u0006\u00b3\u000b\u0000\u05a1\u0177\u0001\u0000\u0000\u0000"+ + "\u05a2\u05a3\u0003B\u0019\u0000\u05a3\u05a4\u0001\u0000\u0000\u0000\u05a4"+ + "\u05a5\u0006\u00b4\u000b\u0000\u05a5\u0179\u0001\u0000\u0000\u0000\u05a6"+ + "\u05a7\u0003D\u001a\u0000\u05a7\u05a8\u0001\u0000\u0000\u0000\u05a8\u05a9"+ + "\u0006\u00b5\u000b\u0000\u05a9\u017b\u0001\u0000\u0000\u0000\u05aa\u05ab"+ + "\u0003F\u001b\u0000\u05ab\u05ac\u0001\u0000\u0000\u0000\u05ac\u05ad\u0006"+ + "\u00b6\u0010\u0000\u05ad\u05ae\u0006\u00b6\f\u0000\u05ae\u05af\u0006\u00b6"+ + "\f\u0000\u05af\u017d\u0001\u0000\u0000\u0000\u05b0\u05b1\u0003n/\u0000"+ + "\u05b1\u05b2\u0001\u0000\u0000\u0000\u05b2\u05b3\u0006\u00b7\u0013\u0000"+ + "\u05b3\u017f\u0001\u0000\u0000\u0000\u05b4\u05b5\u0003r1\u0000\u05b5\u05b6"+ + "\u0001\u0000\u0000\u0000\u05b6\u05b7\u0006\u00b8\u0017\u0000\u05b7\u0181"+ + "\u0001\u0000\u0000\u0000\u05b8\u05b9\u0003\u00f6s\u0000\u05b9\u05ba\u0001"+ + "\u0000\u0000\u0000\u05ba\u05bb\u0006\u00b9\u001a\u0000\u05bb\u0183\u0001"+ + "\u0000\u0000\u0000\u05bc\u05bd\u0003@\u0018\u0000\u05bd\u05be\u0001\u0000"+ + "\u0000\u0000\u05be\u05bf\u0006\u00ba\u000b\u0000\u05bf\u0185\u0001\u0000"+ + "\u0000\u0000\u05c0\u05c1\u0003B\u0019\u0000\u05c1\u05c2\u0001\u0000\u0000"+ + "\u0000\u05c2\u05c3\u0006\u00bb\u000b\u0000\u05c3\u0187\u0001\u0000\u0000"+ + "\u0000\u05c4\u05c5\u0003D\u001a\u0000\u05c5\u05c6\u0001\u0000\u0000\u0000"+ + "\u05c6\u05c7\u0006\u00bc\u000b\u0000\u05c7\u0189\u0001\u0000\u0000\u0000"+ + "\u05c8\u05c9\u0003F\u001b\u0000\u05c9\u05ca\u0001\u0000\u0000\u0000\u05ca"+ + "\u05cb\u0006\u00bd\u0010\u0000\u05cb\u05cc\u0006\u00bd\f\u0000\u05cc\u018b"+ + "\u0001\u0000\u0000\u0000\u05cd\u05ce\u0007#\u0000\u0000\u05ce\u05cf\u0007"+ + "\u0007\u0000\u0000\u05cf\u05d0\u0007\u0001\u0000\u0000\u05d0\u05d1\u0007"+ + "\t\u0000\u0000\u05d1\u018d\u0001\u0000\u0000\u0000\u05d2\u05d3\u0003\u010a"+ + "}\u0000\u05d3\u05d4\u0001\u0000\u0000\u0000\u05d4\u05d5\u0006\u00bf#\u0000"+ + "\u05d5\u018f\u0001\u0000\u0000\u0000\u05d6\u05d7\u0003\u0118\u0084\u0000"+ + "\u05d7\u05d8\u0001\u0000\u0000\u0000\u05d8\u05d9\u0006\u00c0!\u0000\u05d9"+ + "\u05da\u0006\u00c0\f\u0000\u05da\u05db\u0006\u00c0\u0000\u0000\u05db\u0191"+ + "\u0001\u0000\u0000\u0000\u05dc\u05dd\u0007\u0014\u0000\u0000\u05dd\u05de"+ + "\u0007\u0002\u0000\u0000\u05de\u05df\u0007\u0001\u0000\u0000\u05df\u05e0"+ + "\u0007\t\u0000\u0000\u05e0\u05e1\u0007\u0011\u0000\u0000\u05e1\u05e2\u0001"+ + "\u0000\u0000\u0000\u05e2\u05e3\u0006\u00c1\f\u0000\u05e3\u05e4\u0006\u00c1"+ + "\u0000\u0000\u05e4\u0193\u0001\u0000\u0000\u0000\u05e5\u05e6\u0003\u00dc"+ + "f\u0000\u05e6\u05e7\u0001\u0000\u0000\u0000\u05e7\u05e8\u0006\u00c2\u0015"+ + "\u0000\u05e8\u0195\u0001\u0000\u0000\u0000\u05e9\u05ea\u0003\\&\u0000"+ + "\u05ea\u05eb\u0001\u0000\u0000\u0000\u05eb\u05ec\u0006\u00c3\u0016\u0000"+ + "\u05ec\u0197\u0001\u0000\u0000\u0000\u05ed\u05ee\u0003l.\u0000\u05ee\u05ef"+ + "\u0001\u0000\u0000\u0000\u05ef\u05f0\u0006\u00c4\u0012\u0000\u05f0\u0199"+ + "\u0001\u0000\u0000\u0000\u05f1\u05f2\u0003\u00b6S\u0000\u05f2\u05f3\u0001"+ + "\u0000\u0000\u0000\u05f3\u05f4\u0006\u00c5 \u0000\u05f4\u019b\u0001\u0000"+ + "\u0000\u0000\u05f5\u05f6\u0003\u00baU\u0000\u05f6\u05f7\u0001\u0000\u0000"+ + "\u0000\u05f7\u05f8\u0006\u00c6\u001f\u0000\u05f8\u019d\u0001\u0000\u0000"+ + "\u0000\u05f9\u05fa\u0003@\u0018\u0000\u05fa\u05fb\u0001\u0000\u0000\u0000"+ + "\u05fb\u05fc\u0006\u00c7\u000b\u0000\u05fc\u019f\u0001\u0000\u0000\u0000"+ + "\u05fd\u05fe\u0003B\u0019\u0000\u05fe\u05ff\u0001\u0000\u0000\u0000\u05ff"+ + "\u0600\u0006\u00c8\u000b\u0000\u0600\u01a1\u0001\u0000\u0000\u0000\u0601"+ + "\u0602\u0003D\u001a\u0000\u0602\u0603\u0001\u0000\u0000\u0000\u0603\u0604"+ + "\u0006\u00c9\u000b\u0000\u0604\u01a3\u0001\u0000\u0000\u0000\u0605\u0606"+ + "\u0003F\u001b\u0000\u0606\u0607\u0001\u0000\u0000\u0000\u0607\u0608\u0006"+ + "\u00ca\u0010\u0000\u0608\u0609\u0006\u00ca\f\u0000\u0609\u01a5\u0001\u0000"+ + "\u0000\u0000\u060a\u060b\u0003\u00dcf\u0000\u060b\u060c\u0001\u0000\u0000"+ + "\u0000\u060c\u060d\u0006\u00cb\u0015\u0000\u060d\u060e\u0006\u00cb\f\u0000"+ + "\u060e\u060f\u0006\u00cb$\u0000\u060f\u01a7\u0001\u0000\u0000\u0000\u0610"+ + "\u0611\u0003\\&\u0000\u0611\u0612\u0001\u0000\u0000\u0000\u0612\u0613"+ + "\u0006\u00cc\u0016\u0000\u0613\u0614\u0006\u00cc\f\u0000\u0614\u0615\u0006"+ + "\u00cc$\u0000\u0615\u01a9\u0001\u0000\u0000\u0000\u0616\u0617\u0003@\u0018"+ + "\u0000\u0617\u0618\u0001\u0000\u0000\u0000\u0618\u0619\u0006\u00cd\u000b"+ + "\u0000\u0619\u01ab\u0001\u0000\u0000\u0000\u061a\u061b\u0003B\u0019\u0000"+ + "\u061b\u061c\u0001\u0000\u0000\u0000\u061c\u061d\u0006\u00ce\u000b\u0000"+ + "\u061d\u01ad\u0001\u0000\u0000\u0000\u061e\u061f\u0003D\u001a\u0000\u061f"+ + "\u0620\u0001\u0000\u0000\u0000\u0620\u0621\u0006\u00cf\u000b\u0000\u0621"+ + "\u01af\u0001\u0000\u0000\u0000\u0622\u0623\u0003l.\u0000\u0623\u0624\u0001"+ + "\u0000\u0000\u0000\u0624\u0625\u0006\u00d0\u0012\u0000\u0625\u0626\u0006"+ + "\u00d0\f\u0000\u0626\u0627\u0006\u00d0\n\u0000\u0627\u01b1\u0001\u0000"+ + "\u0000\u0000\u0628\u0629\u0003n/\u0000\u0629\u062a\u0001\u0000\u0000\u0000"+ + "\u062a\u062b\u0006\u00d1\u0013\u0000\u062b\u062c\u0006\u00d1\f\u0000\u062c"+ + "\u062d\u0006\u00d1\n\u0000\u062d\u01b3\u0001\u0000\u0000\u0000\u062e\u062f"+ + "\u0003@\u0018\u0000\u062f\u0630\u0001\u0000\u0000\u0000\u0630\u0631\u0006"+ + "\u00d2\u000b\u0000\u0631\u01b5\u0001\u0000\u0000\u0000\u0632\u0633\u0003"+ + "B\u0019\u0000\u0633\u0634\u0001\u0000\u0000\u0000\u0634\u0635\u0006\u00d3"+ + "\u000b\u0000\u0635\u01b7\u0001\u0000\u0000\u0000\u0636\u0637\u0003D\u001a"+ + "\u0000\u0637\u0638\u0001\u0000\u0000\u0000\u0638\u0639\u0006\u00d4\u000b"+ + "\u0000\u0639\u01b9\u0001\u0000\u0000\u0000\u063a\u063b\u0003\u00baU\u0000"+ + "\u063b\u063c\u0001\u0000\u0000\u0000\u063c\u063d\u0006\u00d5\f\u0000\u063d"+ + "\u063e\u0006\u00d5\u0000\u0000\u063e\u063f\u0006\u00d5\u001f\u0000\u063f"+ + "\u01bb\u0001\u0000\u0000\u0000\u0640\u0641\u0003\u00b6S\u0000\u0641\u0642"+ + "\u0001\u0000\u0000\u0000\u0642\u0643\u0006\u00d6\f\u0000\u0643\u0644\u0006"+ + "\u00d6\u0000\u0000\u0644\u0645\u0006\u00d6 \u0000\u0645\u01bd\u0001\u0000"+ + "\u0000\u0000\u0646\u0647\u0003b)\u0000\u0647\u0648\u0001\u0000\u0000\u0000"+ + "\u0648\u0649\u0006\u00d7\f\u0000\u0649\u064a\u0006\u00d7\u0000\u0000\u064a"+ + "\u064b\u0006\u00d7%\u0000\u064b\u01bf\u0001\u0000\u0000\u0000\u064c\u064d"+ + "\u0003F\u001b\u0000\u064d\u064e\u0001\u0000\u0000\u0000\u064e\u064f\u0006"+ + "\u00d8\u0010\u0000\u064f\u0650\u0006\u00d8\f\u0000\u0650\u01c1\u0001\u0000"+ + "\u0000\u0000B\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b"+ + "\f\r\u000e\u000f\u028e\u0298\u029c\u029f\u02a8\u02aa\u02b5\u02c8\u02cd"+ + "\u02d6\u02dd\u02e2\u02e4\u02ef\u02f7\u02fa\u02fc\u0301\u0306\u030c\u0313"+ + "\u0318\u031e\u0321\u0329\u032d\u03b1\u03b6\u03bd\u03bf\u03cf\u03d4\u03d9"+ + "\u03db\u03e1\u042e\u0433\u0464\u0468\u046d\u0472\u0477\u0479\u047d\u047f"+ + "\u04d6\u04da\u04df\u0570\u0572&\u0005\u0001\u0000\u0005\u0004\u0000\u0005"+ + "\u0006\u0000\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005"+ + "\u0000\u0005\t\u0000\u0005\r\u0000\u0005\u000b\u0000\u0005\u000e\u0000"+ + "\u0000\u0001\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007G\u0000\u0005"+ + "\u0000\u0000\u0007\u001c\u0000\u0007H\u0000\u0007%\u0000\u0007&\u0000"+ + "\u0007#\u0000\u0007R\u0000\u0007\u001d\u0000\u0007(\u0000\u00074\u0000"+ + "\u0007F\u0000\u0007V\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007`\u0000"+ + "\u0007_\u0000\u0007J\u0000\u0007I\u0000\u0007^\u0000\u0005\f\u0000\u0007"+ + "Z\u0000\u0005\u000f\u0000\u0007 \u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index a75d7e985c1d0..25a538f836472 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -16,8 +16,7 @@ null 'sort' 'stats' 'where' -null -null +'lookup' null null null @@ -120,6 +119,7 @@ null null null null +'join' 'USING' null null @@ -149,14 +149,13 @@ SHOW SORT STATS WHERE +JOIN_LOOKUP DEV_INLINESTATS DEV_LOOKUP DEV_METRICS -DEV_JOIN DEV_JOIN_FULL DEV_JOIN_LEFT DEV_JOIN_RIGHT -DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT @@ -253,6 +252,7 @@ LOOKUP_WS LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +JOIN USING JOIN_LINE_COMMENT JOIN_MULTILINE_COMMENT @@ -334,4 +334,4 @@ joinPredicate atn: -[4, 1, 130, 650, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 207, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 215, 8, 5, 10, 5, 12, 5, 218, 9, 5, 1, 6, 1, 6, 3, 6, 222, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 229, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 234, 8, 6, 1, 7, 1, 7, 1, 7, 3, 7, 239, 8, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 249, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 255, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 263, 8, 9, 10, 9, 12, 9, 266, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 276, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 281, 8, 10, 10, 10, 12, 10, 284, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 292, 8, 11, 10, 11, 12, 11, 295, 9, 11, 1, 11, 1, 11, 3, 11, 299, 8, 11, 3, 11, 301, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 311, 8, 13, 10, 13, 12, 13, 314, 9, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 5, 17, 330, 8, 17, 10, 17, 12, 17, 333, 9, 17, 1, 18, 1, 18, 1, 18, 3, 18, 338, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 346, 8, 19, 10, 19, 12, 19, 349, 9, 19, 1, 19, 3, 19, 352, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 357, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 369, 8, 23, 10, 23, 12, 23, 372, 9, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 378, 8, 24, 10, 24, 12, 24, 381, 9, 24, 1, 24, 3, 24, 384, 8, 24, 1, 24, 1, 24, 3, 24, 388, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 395, 8, 26, 1, 26, 1, 26, 3, 26, 399, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 404, 8, 27, 10, 27, 12, 27, 407, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 412, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 417, 8, 29, 10, 29, 12, 29, 420, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 425, 8, 30, 10, 30, 12, 30, 428, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 433, 8, 31, 10, 31, 12, 31, 436, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 443, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 458, 8, 34, 10, 34, 12, 34, 461, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 469, 8, 34, 10, 34, 12, 34, 472, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 480, 8, 34, 10, 34, 12, 34, 483, 9, 34, 1, 34, 1, 34, 3, 34, 487, 8, 34, 1, 35, 1, 35, 3, 35, 491, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 496, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 505, 8, 38, 10, 38, 12, 38, 508, 9, 38, 1, 39, 1, 39, 3, 39, 512, 8, 39, 1, 39, 1, 39, 3, 39, 516, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 528, 8, 42, 10, 42, 12, 42, 531, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 541, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 553, 8, 47, 10, 47, 12, 47, 556, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 566, 8, 50, 1, 51, 3, 51, 569, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 574, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 596, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 602, 8, 58, 10, 58, 12, 58, 605, 9, 58, 3, 58, 607, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 612, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 625, 8, 61, 1, 62, 3, 62, 628, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 637, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 643, 8, 64, 10, 64, 12, 64, 646, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 83, 83, 1, 0, 74, 75, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 677, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 206, 1, 0, 0, 0, 12, 233, 1, 0, 0, 0, 14, 235, 1, 0, 0, 0, 16, 248, 1, 0, 0, 0, 18, 254, 1, 0, 0, 0, 20, 275, 1, 0, 0, 0, 22, 285, 1, 0, 0, 0, 24, 304, 1, 0, 0, 0, 26, 306, 1, 0, 0, 0, 28, 317, 1, 0, 0, 0, 30, 321, 1, 0, 0, 0, 32, 323, 1, 0, 0, 0, 34, 326, 1, 0, 0, 0, 36, 337, 1, 0, 0, 0, 38, 341, 1, 0, 0, 0, 40, 356, 1, 0, 0, 0, 42, 360, 1, 0, 0, 0, 44, 362, 1, 0, 0, 0, 46, 364, 1, 0, 0, 0, 48, 373, 1, 0, 0, 0, 50, 389, 1, 0, 0, 0, 52, 392, 1, 0, 0, 0, 54, 400, 1, 0, 0, 0, 56, 408, 1, 0, 0, 0, 58, 413, 1, 0, 0, 0, 60, 421, 1, 0, 0, 0, 62, 429, 1, 0, 0, 0, 64, 437, 1, 0, 0, 0, 66, 442, 1, 0, 0, 0, 68, 486, 1, 0, 0, 0, 70, 490, 1, 0, 0, 0, 72, 495, 1, 0, 0, 0, 74, 497, 1, 0, 0, 0, 76, 500, 1, 0, 0, 0, 78, 509, 1, 0, 0, 0, 80, 517, 1, 0, 0, 0, 82, 520, 1, 0, 0, 0, 84, 523, 1, 0, 0, 0, 86, 532, 1, 0, 0, 0, 88, 536, 1, 0, 0, 0, 90, 542, 1, 0, 0, 0, 92, 546, 1, 0, 0, 0, 94, 549, 1, 0, 0, 0, 96, 557, 1, 0, 0, 0, 98, 561, 1, 0, 0, 0, 100, 565, 1, 0, 0, 0, 102, 568, 1, 0, 0, 0, 104, 573, 1, 0, 0, 0, 106, 577, 1, 0, 0, 0, 108, 579, 1, 0, 0, 0, 110, 581, 1, 0, 0, 0, 112, 584, 1, 0, 0, 0, 114, 588, 1, 0, 0, 0, 116, 591, 1, 0, 0, 0, 118, 611, 1, 0, 0, 0, 120, 615, 1, 0, 0, 0, 122, 620, 1, 0, 0, 0, 124, 627, 1, 0, 0, 0, 126, 633, 1, 0, 0, 0, 128, 638, 1, 0, 0, 0, 130, 647, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 29, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 38, 19, 0, 148, 153, 3, 32, 16, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 207, 3, 10, 5, 8, 180, 207, 3, 16, 8, 0, 181, 207, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 207, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 207, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 177, 1, 0, 0, 0, 206, 180, 1, 0, 0, 0, 206, 181, 1, 0, 0, 0, 206, 182, 1, 0, 0, 0, 206, 198, 1, 0, 0, 0, 206, 205, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 5, 0, 0, 209, 210, 5, 34, 0, 0, 210, 215, 3, 10, 5, 6, 211, 212, 10, 4, 0, 0, 212, 213, 5, 52, 0, 0, 213, 215, 3, 10, 5, 5, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 221, 3, 16, 8, 0, 220, 222, 5, 49, 0, 0, 221, 220, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 5, 47, 0, 0, 224, 225, 3, 106, 53, 0, 225, 234, 1, 0, 0, 0, 226, 228, 3, 16, 8, 0, 227, 229, 5, 49, 0, 0, 228, 227, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 5, 54, 0, 0, 231, 232, 3, 106, 53, 0, 232, 234, 1, 0, 0, 0, 233, 219, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 234, 13, 1, 0, 0, 0, 235, 238, 3, 58, 29, 0, 236, 237, 5, 37, 0, 0, 237, 239, 3, 30, 15, 0, 238, 236, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 241, 5, 38, 0, 0, 241, 242, 3, 68, 34, 0, 242, 15, 1, 0, 0, 0, 243, 249, 3, 18, 9, 0, 244, 245, 3, 18, 9, 0, 245, 246, 3, 108, 54, 0, 246, 247, 3, 18, 9, 0, 247, 249, 1, 0, 0, 0, 248, 243, 1, 0, 0, 0, 248, 244, 1, 0, 0, 0, 249, 17, 1, 0, 0, 0, 250, 251, 6, 9, -1, 0, 251, 255, 3, 20, 10, 0, 252, 253, 7, 0, 0, 0, 253, 255, 3, 18, 9, 3, 254, 250, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 255, 264, 1, 0, 0, 0, 256, 257, 10, 2, 0, 0, 257, 258, 7, 1, 0, 0, 258, 263, 3, 18, 9, 3, 259, 260, 10, 1, 0, 0, 260, 261, 7, 0, 0, 0, 261, 263, 3, 18, 9, 2, 262, 256, 1, 0, 0, 0, 262, 259, 1, 0, 0, 0, 263, 266, 1, 0, 0, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 19, 1, 0, 0, 0, 266, 264, 1, 0, 0, 0, 267, 268, 6, 10, -1, 0, 268, 276, 3, 68, 34, 0, 269, 276, 3, 58, 29, 0, 270, 276, 3, 22, 11, 0, 271, 272, 5, 48, 0, 0, 272, 273, 3, 10, 5, 0, 273, 274, 5, 55, 0, 0, 274, 276, 1, 0, 0, 0, 275, 267, 1, 0, 0, 0, 275, 269, 1, 0, 0, 0, 275, 270, 1, 0, 0, 0, 275, 271, 1, 0, 0, 0, 276, 282, 1, 0, 0, 0, 277, 278, 10, 1, 0, 0, 278, 279, 5, 37, 0, 0, 279, 281, 3, 30, 15, 0, 280, 277, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 282, 283, 1, 0, 0, 0, 283, 21, 1, 0, 0, 0, 284, 282, 1, 0, 0, 0, 285, 286, 3, 24, 12, 0, 286, 300, 5, 48, 0, 0, 287, 301, 5, 66, 0, 0, 288, 293, 3, 10, 5, 0, 289, 290, 5, 39, 0, 0, 290, 292, 3, 10, 5, 0, 291, 289, 1, 0, 0, 0, 292, 295, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 298, 1, 0, 0, 0, 295, 293, 1, 0, 0, 0, 296, 297, 5, 39, 0, 0, 297, 299, 3, 26, 13, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 301, 1, 0, 0, 0, 300, 287, 1, 0, 0, 0, 300, 288, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 303, 5, 55, 0, 0, 303, 23, 1, 0, 0, 0, 304, 305, 3, 72, 36, 0, 305, 25, 1, 0, 0, 0, 306, 307, 5, 69, 0, 0, 307, 312, 3, 28, 14, 0, 308, 309, 5, 39, 0, 0, 309, 311, 3, 28, 14, 0, 310, 308, 1, 0, 0, 0, 311, 314, 1, 0, 0, 0, 312, 310, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0, 313, 315, 1, 0, 0, 0, 314, 312, 1, 0, 0, 0, 315, 316, 5, 70, 0, 0, 316, 27, 1, 0, 0, 0, 317, 318, 3, 106, 53, 0, 318, 319, 5, 38, 0, 0, 319, 320, 3, 68, 34, 0, 320, 29, 1, 0, 0, 0, 321, 322, 3, 64, 32, 0, 322, 31, 1, 0, 0, 0, 323, 324, 5, 12, 0, 0, 324, 325, 3, 34, 17, 0, 325, 33, 1, 0, 0, 0, 326, 331, 3, 36, 18, 0, 327, 328, 5, 39, 0, 0, 328, 330, 3, 36, 18, 0, 329, 327, 1, 0, 0, 0, 330, 333, 1, 0, 0, 0, 331, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 35, 1, 0, 0, 0, 333, 331, 1, 0, 0, 0, 334, 335, 3, 58, 29, 0, 335, 336, 5, 36, 0, 0, 336, 338, 1, 0, 0, 0, 337, 334, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 339, 1, 0, 0, 0, 339, 340, 3, 10, 5, 0, 340, 37, 1, 0, 0, 0, 341, 342, 5, 6, 0, 0, 342, 347, 3, 40, 20, 0, 343, 344, 5, 39, 0, 0, 344, 346, 3, 40, 20, 0, 345, 343, 1, 0, 0, 0, 346, 349, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 347, 348, 1, 0, 0, 0, 348, 351, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 350, 352, 3, 46, 23, 0, 351, 350, 1, 0, 0, 0, 351, 352, 1, 0, 0, 0, 352, 39, 1, 0, 0, 0, 353, 354, 3, 42, 21, 0, 354, 355, 5, 38, 0, 0, 355, 357, 1, 0, 0, 0, 356, 353, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 359, 3, 44, 22, 0, 359, 41, 1, 0, 0, 0, 360, 361, 5, 83, 0, 0, 361, 43, 1, 0, 0, 0, 362, 363, 7, 2, 0, 0, 363, 45, 1, 0, 0, 0, 364, 365, 5, 82, 0, 0, 365, 370, 5, 83, 0, 0, 366, 367, 5, 39, 0, 0, 367, 369, 5, 83, 0, 0, 368, 366, 1, 0, 0, 0, 369, 372, 1, 0, 0, 0, 370, 368, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 47, 1, 0, 0, 0, 372, 370, 1, 0, 0, 0, 373, 374, 5, 19, 0, 0, 374, 379, 3, 40, 20, 0, 375, 376, 5, 39, 0, 0, 376, 378, 3, 40, 20, 0, 377, 375, 1, 0, 0, 0, 378, 381, 1, 0, 0, 0, 379, 377, 1, 0, 0, 0, 379, 380, 1, 0, 0, 0, 380, 383, 1, 0, 0, 0, 381, 379, 1, 0, 0, 0, 382, 384, 3, 54, 27, 0, 383, 382, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 387, 1, 0, 0, 0, 385, 386, 5, 33, 0, 0, 386, 388, 3, 34, 17, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 49, 1, 0, 0, 0, 389, 390, 5, 4, 0, 0, 390, 391, 3, 34, 17, 0, 391, 51, 1, 0, 0, 0, 392, 394, 5, 15, 0, 0, 393, 395, 3, 54, 27, 0, 394, 393, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 398, 1, 0, 0, 0, 396, 397, 5, 33, 0, 0, 397, 399, 3, 34, 17, 0, 398, 396, 1, 0, 0, 0, 398, 399, 1, 0, 0, 0, 399, 53, 1, 0, 0, 0, 400, 405, 3, 56, 28, 0, 401, 402, 5, 39, 0, 0, 402, 404, 3, 56, 28, 0, 403, 401, 1, 0, 0, 0, 404, 407, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 405, 406, 1, 0, 0, 0, 406, 55, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 408, 411, 3, 36, 18, 0, 409, 410, 5, 16, 0, 0, 410, 412, 3, 10, 5, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 57, 1, 0, 0, 0, 413, 418, 3, 72, 36, 0, 414, 415, 5, 41, 0, 0, 415, 417, 3, 72, 36, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 59, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 426, 3, 66, 33, 0, 422, 423, 5, 41, 0, 0, 423, 425, 3, 66, 33, 0, 424, 422, 1, 0, 0, 0, 425, 428, 1, 0, 0, 0, 426, 424, 1, 0, 0, 0, 426, 427, 1, 0, 0, 0, 427, 61, 1, 0, 0, 0, 428, 426, 1, 0, 0, 0, 429, 434, 3, 60, 30, 0, 430, 431, 5, 39, 0, 0, 431, 433, 3, 60, 30, 0, 432, 430, 1, 0, 0, 0, 433, 436, 1, 0, 0, 0, 434, 432, 1, 0, 0, 0, 434, 435, 1, 0, 0, 0, 435, 63, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 437, 438, 7, 3, 0, 0, 438, 65, 1, 0, 0, 0, 439, 443, 5, 87, 0, 0, 440, 441, 4, 33, 10, 0, 441, 443, 3, 70, 35, 0, 442, 439, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 443, 67, 1, 0, 0, 0, 444, 487, 5, 50, 0, 0, 445, 446, 3, 104, 52, 0, 446, 447, 5, 74, 0, 0, 447, 487, 1, 0, 0, 0, 448, 487, 3, 102, 51, 0, 449, 487, 3, 104, 52, 0, 450, 487, 3, 98, 49, 0, 451, 487, 3, 70, 35, 0, 452, 487, 3, 106, 53, 0, 453, 454, 5, 72, 0, 0, 454, 459, 3, 100, 50, 0, 455, 456, 5, 39, 0, 0, 456, 458, 3, 100, 50, 0, 457, 455, 1, 0, 0, 0, 458, 461, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 462, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 462, 463, 5, 73, 0, 0, 463, 487, 1, 0, 0, 0, 464, 465, 5, 72, 0, 0, 465, 470, 3, 98, 49, 0, 466, 467, 5, 39, 0, 0, 467, 469, 3, 98, 49, 0, 468, 466, 1, 0, 0, 0, 469, 472, 1, 0, 0, 0, 470, 468, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, 471, 473, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 473, 474, 5, 73, 0, 0, 474, 487, 1, 0, 0, 0, 475, 476, 5, 72, 0, 0, 476, 481, 3, 106, 53, 0, 477, 478, 5, 39, 0, 0, 478, 480, 3, 106, 53, 0, 479, 477, 1, 0, 0, 0, 480, 483, 1, 0, 0, 0, 481, 479, 1, 0, 0, 0, 481, 482, 1, 0, 0, 0, 482, 484, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 484, 485, 5, 73, 0, 0, 485, 487, 1, 0, 0, 0, 486, 444, 1, 0, 0, 0, 486, 445, 1, 0, 0, 0, 486, 448, 1, 0, 0, 0, 486, 449, 1, 0, 0, 0, 486, 450, 1, 0, 0, 0, 486, 451, 1, 0, 0, 0, 486, 452, 1, 0, 0, 0, 486, 453, 1, 0, 0, 0, 486, 464, 1, 0, 0, 0, 486, 475, 1, 0, 0, 0, 487, 69, 1, 0, 0, 0, 488, 491, 5, 53, 0, 0, 489, 491, 5, 71, 0, 0, 490, 488, 1, 0, 0, 0, 490, 489, 1, 0, 0, 0, 491, 71, 1, 0, 0, 0, 492, 496, 3, 64, 32, 0, 493, 494, 4, 36, 11, 0, 494, 496, 3, 70, 35, 0, 495, 492, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 73, 1, 0, 0, 0, 497, 498, 5, 9, 0, 0, 498, 499, 5, 31, 0, 0, 499, 75, 1, 0, 0, 0, 500, 501, 5, 14, 0, 0, 501, 506, 3, 78, 39, 0, 502, 503, 5, 39, 0, 0, 503, 505, 3, 78, 39, 0, 504, 502, 1, 0, 0, 0, 505, 508, 1, 0, 0, 0, 506, 504, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 77, 1, 0, 0, 0, 508, 506, 1, 0, 0, 0, 509, 511, 3, 10, 5, 0, 510, 512, 7, 4, 0, 0, 511, 510, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 512, 515, 1, 0, 0, 0, 513, 514, 5, 51, 0, 0, 514, 516, 7, 5, 0, 0, 515, 513, 1, 0, 0, 0, 515, 516, 1, 0, 0, 0, 516, 79, 1, 0, 0, 0, 517, 518, 5, 8, 0, 0, 518, 519, 3, 62, 31, 0, 519, 81, 1, 0, 0, 0, 520, 521, 5, 2, 0, 0, 521, 522, 3, 62, 31, 0, 522, 83, 1, 0, 0, 0, 523, 524, 5, 11, 0, 0, 524, 529, 3, 86, 43, 0, 525, 526, 5, 39, 0, 0, 526, 528, 3, 86, 43, 0, 527, 525, 1, 0, 0, 0, 528, 531, 1, 0, 0, 0, 529, 527, 1, 0, 0, 0, 529, 530, 1, 0, 0, 0, 530, 85, 1, 0, 0, 0, 531, 529, 1, 0, 0, 0, 532, 533, 3, 60, 30, 0, 533, 534, 5, 91, 0, 0, 534, 535, 3, 60, 30, 0, 535, 87, 1, 0, 0, 0, 536, 537, 5, 1, 0, 0, 537, 538, 3, 20, 10, 0, 538, 540, 3, 106, 53, 0, 539, 541, 3, 94, 47, 0, 540, 539, 1, 0, 0, 0, 540, 541, 1, 0, 0, 0, 541, 89, 1, 0, 0, 0, 542, 543, 5, 7, 0, 0, 543, 544, 3, 20, 10, 0, 544, 545, 3, 106, 53, 0, 545, 91, 1, 0, 0, 0, 546, 547, 5, 10, 0, 0, 547, 548, 3, 58, 29, 0, 548, 93, 1, 0, 0, 0, 549, 554, 3, 96, 48, 0, 550, 551, 5, 39, 0, 0, 551, 553, 3, 96, 48, 0, 552, 550, 1, 0, 0, 0, 553, 556, 1, 0, 0, 0, 554, 552, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 95, 1, 0, 0, 0, 556, 554, 1, 0, 0, 0, 557, 558, 3, 64, 32, 0, 558, 559, 5, 36, 0, 0, 559, 560, 3, 68, 34, 0, 560, 97, 1, 0, 0, 0, 561, 562, 7, 6, 0, 0, 562, 99, 1, 0, 0, 0, 563, 566, 3, 102, 51, 0, 564, 566, 3, 104, 52, 0, 565, 563, 1, 0, 0, 0, 565, 564, 1, 0, 0, 0, 566, 101, 1, 0, 0, 0, 567, 569, 7, 0, 0, 0, 568, 567, 1, 0, 0, 0, 568, 569, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 571, 5, 32, 0, 0, 571, 103, 1, 0, 0, 0, 572, 574, 7, 0, 0, 0, 573, 572, 1, 0, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 1, 0, 0, 0, 575, 576, 5, 31, 0, 0, 576, 105, 1, 0, 0, 0, 577, 578, 5, 30, 0, 0, 578, 107, 1, 0, 0, 0, 579, 580, 7, 7, 0, 0, 580, 109, 1, 0, 0, 0, 581, 582, 5, 5, 0, 0, 582, 583, 3, 112, 56, 0, 583, 111, 1, 0, 0, 0, 584, 585, 5, 72, 0, 0, 585, 586, 3, 2, 1, 0, 586, 587, 5, 73, 0, 0, 587, 113, 1, 0, 0, 0, 588, 589, 5, 13, 0, 0, 589, 590, 5, 107, 0, 0, 590, 115, 1, 0, 0, 0, 591, 592, 5, 3, 0, 0, 592, 595, 5, 97, 0, 0, 593, 594, 5, 95, 0, 0, 594, 596, 3, 60, 30, 0, 595, 593, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 606, 1, 0, 0, 0, 597, 598, 5, 96, 0, 0, 598, 603, 3, 118, 59, 0, 599, 600, 5, 39, 0, 0, 600, 602, 3, 118, 59, 0, 601, 599, 1, 0, 0, 0, 602, 605, 1, 0, 0, 0, 603, 601, 1, 0, 0, 0, 603, 604, 1, 0, 0, 0, 604, 607, 1, 0, 0, 0, 605, 603, 1, 0, 0, 0, 606, 597, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 117, 1, 0, 0, 0, 608, 609, 3, 60, 30, 0, 609, 610, 5, 36, 0, 0, 610, 612, 1, 0, 0, 0, 611, 608, 1, 0, 0, 0, 611, 612, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 3, 60, 30, 0, 614, 119, 1, 0, 0, 0, 615, 616, 5, 18, 0, 0, 616, 617, 3, 40, 20, 0, 617, 618, 5, 95, 0, 0, 618, 619, 3, 62, 31, 0, 619, 121, 1, 0, 0, 0, 620, 621, 5, 17, 0, 0, 621, 624, 3, 54, 27, 0, 622, 623, 5, 33, 0, 0, 623, 625, 3, 34, 17, 0, 624, 622, 1, 0, 0, 0, 624, 625, 1, 0, 0, 0, 625, 123, 1, 0, 0, 0, 626, 628, 7, 8, 0, 0, 627, 626, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 630, 5, 20, 0, 0, 630, 631, 3, 126, 63, 0, 631, 632, 3, 128, 64, 0, 632, 125, 1, 0, 0, 0, 633, 636, 3, 40, 20, 0, 634, 635, 5, 91, 0, 0, 635, 637, 3, 64, 32, 0, 636, 634, 1, 0, 0, 0, 636, 637, 1, 0, 0, 0, 637, 127, 1, 0, 0, 0, 638, 639, 5, 95, 0, 0, 639, 644, 3, 130, 65, 0, 640, 641, 5, 39, 0, 0, 641, 643, 3, 130, 65, 0, 642, 640, 1, 0, 0, 0, 643, 646, 1, 0, 0, 0, 644, 642, 1, 0, 0, 0, 644, 645, 1, 0, 0, 0, 645, 129, 1, 0, 0, 0, 646, 644, 1, 0, 0, 0, 647, 648, 3, 16, 8, 0, 648, 131, 1, 0, 0, 0, 63, 143, 152, 172, 184, 193, 201, 206, 214, 216, 221, 228, 233, 238, 248, 254, 262, 264, 275, 282, 293, 298, 300, 312, 331, 337, 347, 351, 356, 370, 379, 383, 387, 394, 398, 405, 411, 418, 426, 434, 442, 459, 470, 481, 486, 490, 495, 506, 511, 515, 529, 540, 554, 565, 568, 573, 595, 603, 606, 611, 624, 627, 636, 644] \ No newline at end of file +[4, 1, 130, 644, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 172, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 184, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 191, 8, 5, 10, 5, 12, 5, 194, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 201, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 206, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 214, 8, 5, 10, 5, 12, 5, 217, 9, 5, 1, 6, 1, 6, 3, 6, 221, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 228, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 233, 8, 6, 1, 7, 1, 7, 1, 7, 3, 7, 238, 8, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 248, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 254, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 262, 8, 9, 10, 9, 12, 9, 265, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 275, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 280, 8, 10, 10, 10, 12, 10, 283, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 291, 8, 11, 10, 11, 12, 11, 294, 9, 11, 1, 11, 1, 11, 3, 11, 298, 8, 11, 3, 11, 300, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 310, 8, 13, 10, 13, 12, 13, 313, 9, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 5, 17, 329, 8, 17, 10, 17, 12, 17, 332, 9, 17, 1, 18, 1, 18, 1, 18, 3, 18, 337, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 345, 8, 19, 10, 19, 12, 19, 348, 9, 19, 1, 19, 3, 19, 351, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 356, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 368, 8, 23, 10, 23, 12, 23, 371, 9, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 377, 8, 24, 10, 24, 12, 24, 380, 9, 24, 1, 24, 3, 24, 383, 8, 24, 1, 24, 1, 24, 3, 24, 387, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 394, 8, 26, 1, 26, 1, 26, 3, 26, 398, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 403, 8, 27, 10, 27, 12, 27, 406, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 411, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 416, 8, 29, 10, 29, 12, 29, 419, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 424, 8, 30, 10, 30, 12, 30, 427, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 432, 8, 31, 10, 31, 12, 31, 435, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 442, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 457, 8, 34, 10, 34, 12, 34, 460, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 468, 8, 34, 10, 34, 12, 34, 471, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 479, 8, 34, 10, 34, 12, 34, 482, 9, 34, 1, 34, 1, 34, 3, 34, 486, 8, 34, 1, 35, 1, 35, 3, 35, 490, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 495, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 504, 8, 38, 10, 38, 12, 38, 507, 9, 38, 1, 39, 1, 39, 3, 39, 511, 8, 39, 1, 39, 1, 39, 3, 39, 515, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 527, 8, 42, 10, 42, 12, 42, 530, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 540, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 552, 8, 47, 10, 47, 12, 47, 555, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 565, 8, 50, 1, 51, 3, 51, 568, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 573, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 595, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 601, 8, 58, 10, 58, 12, 58, 604, 9, 58, 3, 58, 606, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 611, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 624, 8, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 637, 8, 64, 10, 64, 12, 64, 640, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 63, 64, 1, 0, 65, 67, 2, 0, 29, 29, 82, 82, 1, 0, 73, 74, 2, 0, 34, 34, 39, 39, 2, 0, 42, 42, 45, 45, 2, 0, 41, 41, 55, 55, 2, 0, 56, 56, 58, 62, 2, 0, 17, 17, 22, 23, 669, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 171, 1, 0, 0, 0, 8, 173, 1, 0, 0, 0, 10, 205, 1, 0, 0, 0, 12, 232, 1, 0, 0, 0, 14, 234, 1, 0, 0, 0, 16, 247, 1, 0, 0, 0, 18, 253, 1, 0, 0, 0, 20, 274, 1, 0, 0, 0, 22, 284, 1, 0, 0, 0, 24, 303, 1, 0, 0, 0, 26, 305, 1, 0, 0, 0, 28, 316, 1, 0, 0, 0, 30, 320, 1, 0, 0, 0, 32, 322, 1, 0, 0, 0, 34, 325, 1, 0, 0, 0, 36, 336, 1, 0, 0, 0, 38, 340, 1, 0, 0, 0, 40, 355, 1, 0, 0, 0, 42, 359, 1, 0, 0, 0, 44, 361, 1, 0, 0, 0, 46, 363, 1, 0, 0, 0, 48, 372, 1, 0, 0, 0, 50, 388, 1, 0, 0, 0, 52, 391, 1, 0, 0, 0, 54, 399, 1, 0, 0, 0, 56, 407, 1, 0, 0, 0, 58, 412, 1, 0, 0, 0, 60, 420, 1, 0, 0, 0, 62, 428, 1, 0, 0, 0, 64, 436, 1, 0, 0, 0, 66, 441, 1, 0, 0, 0, 68, 485, 1, 0, 0, 0, 70, 489, 1, 0, 0, 0, 72, 494, 1, 0, 0, 0, 74, 496, 1, 0, 0, 0, 76, 499, 1, 0, 0, 0, 78, 508, 1, 0, 0, 0, 80, 516, 1, 0, 0, 0, 82, 519, 1, 0, 0, 0, 84, 522, 1, 0, 0, 0, 86, 531, 1, 0, 0, 0, 88, 535, 1, 0, 0, 0, 90, 541, 1, 0, 0, 0, 92, 545, 1, 0, 0, 0, 94, 548, 1, 0, 0, 0, 96, 556, 1, 0, 0, 0, 98, 560, 1, 0, 0, 0, 100, 564, 1, 0, 0, 0, 102, 567, 1, 0, 0, 0, 104, 572, 1, 0, 0, 0, 106, 576, 1, 0, 0, 0, 108, 578, 1, 0, 0, 0, 110, 580, 1, 0, 0, 0, 112, 583, 1, 0, 0, 0, 114, 587, 1, 0, 0, 0, 116, 590, 1, 0, 0, 0, 118, 610, 1, 0, 0, 0, 120, 614, 1, 0, 0, 0, 122, 619, 1, 0, 0, 0, 124, 625, 1, 0, 0, 0, 126, 630, 1, 0, 0, 0, 128, 632, 1, 0, 0, 0, 130, 641, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 28, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 38, 19, 0, 148, 153, 3, 32, 16, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 172, 3, 50, 25, 0, 155, 172, 3, 8, 4, 0, 156, 172, 3, 80, 40, 0, 157, 172, 3, 74, 37, 0, 158, 172, 3, 52, 26, 0, 159, 172, 3, 76, 38, 0, 160, 172, 3, 82, 41, 0, 161, 172, 3, 84, 42, 0, 162, 172, 3, 88, 44, 0, 163, 172, 3, 90, 45, 0, 164, 172, 3, 116, 58, 0, 165, 172, 3, 92, 46, 0, 166, 172, 3, 124, 62, 0, 167, 168, 4, 3, 2, 0, 168, 172, 3, 122, 61, 0, 169, 170, 4, 3, 3, 0, 170, 172, 3, 120, 60, 0, 171, 154, 1, 0, 0, 0, 171, 155, 1, 0, 0, 0, 171, 156, 1, 0, 0, 0, 171, 157, 1, 0, 0, 0, 171, 158, 1, 0, 0, 0, 171, 159, 1, 0, 0, 0, 171, 160, 1, 0, 0, 0, 171, 161, 1, 0, 0, 0, 171, 162, 1, 0, 0, 0, 171, 163, 1, 0, 0, 0, 171, 164, 1, 0, 0, 0, 171, 165, 1, 0, 0, 0, 171, 166, 1, 0, 0, 0, 171, 167, 1, 0, 0, 0, 171, 169, 1, 0, 0, 0, 172, 7, 1, 0, 0, 0, 173, 174, 5, 16, 0, 0, 174, 175, 3, 10, 5, 0, 175, 9, 1, 0, 0, 0, 176, 177, 6, 5, -1, 0, 177, 178, 5, 48, 0, 0, 178, 206, 3, 10, 5, 8, 179, 206, 3, 16, 8, 0, 180, 206, 3, 12, 6, 0, 181, 183, 3, 16, 8, 0, 182, 184, 5, 48, 0, 0, 183, 182, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 5, 43, 0, 0, 186, 187, 5, 47, 0, 0, 187, 192, 3, 16, 8, 0, 188, 189, 5, 38, 0, 0, 189, 191, 3, 16, 8, 0, 190, 188, 1, 0, 0, 0, 191, 194, 1, 0, 0, 0, 192, 190, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 195, 1, 0, 0, 0, 194, 192, 1, 0, 0, 0, 195, 196, 5, 54, 0, 0, 196, 206, 1, 0, 0, 0, 197, 198, 3, 16, 8, 0, 198, 200, 5, 44, 0, 0, 199, 201, 5, 48, 0, 0, 200, 199, 1, 0, 0, 0, 200, 201, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 5, 49, 0, 0, 203, 206, 1, 0, 0, 0, 204, 206, 3, 14, 7, 0, 205, 176, 1, 0, 0, 0, 205, 179, 1, 0, 0, 0, 205, 180, 1, 0, 0, 0, 205, 181, 1, 0, 0, 0, 205, 197, 1, 0, 0, 0, 205, 204, 1, 0, 0, 0, 206, 215, 1, 0, 0, 0, 207, 208, 10, 5, 0, 0, 208, 209, 5, 33, 0, 0, 209, 214, 3, 10, 5, 6, 210, 211, 10, 4, 0, 0, 211, 212, 5, 51, 0, 0, 212, 214, 3, 10, 5, 5, 213, 207, 1, 0, 0, 0, 213, 210, 1, 0, 0, 0, 214, 217, 1, 0, 0, 0, 215, 213, 1, 0, 0, 0, 215, 216, 1, 0, 0, 0, 216, 11, 1, 0, 0, 0, 217, 215, 1, 0, 0, 0, 218, 220, 3, 16, 8, 0, 219, 221, 5, 48, 0, 0, 220, 219, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 5, 46, 0, 0, 223, 224, 3, 106, 53, 0, 224, 233, 1, 0, 0, 0, 225, 227, 3, 16, 8, 0, 226, 228, 5, 48, 0, 0, 227, 226, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 5, 53, 0, 0, 230, 231, 3, 106, 53, 0, 231, 233, 1, 0, 0, 0, 232, 218, 1, 0, 0, 0, 232, 225, 1, 0, 0, 0, 233, 13, 1, 0, 0, 0, 234, 237, 3, 58, 29, 0, 235, 236, 5, 36, 0, 0, 236, 238, 3, 30, 15, 0, 237, 235, 1, 0, 0, 0, 237, 238, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 5, 37, 0, 0, 240, 241, 3, 68, 34, 0, 241, 15, 1, 0, 0, 0, 242, 248, 3, 18, 9, 0, 243, 244, 3, 18, 9, 0, 244, 245, 3, 108, 54, 0, 245, 246, 3, 18, 9, 0, 246, 248, 1, 0, 0, 0, 247, 242, 1, 0, 0, 0, 247, 243, 1, 0, 0, 0, 248, 17, 1, 0, 0, 0, 249, 250, 6, 9, -1, 0, 250, 254, 3, 20, 10, 0, 251, 252, 7, 0, 0, 0, 252, 254, 3, 18, 9, 3, 253, 249, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 254, 263, 1, 0, 0, 0, 255, 256, 10, 2, 0, 0, 256, 257, 7, 1, 0, 0, 257, 262, 3, 18, 9, 3, 258, 259, 10, 1, 0, 0, 259, 260, 7, 0, 0, 0, 260, 262, 3, 18, 9, 2, 261, 255, 1, 0, 0, 0, 261, 258, 1, 0, 0, 0, 262, 265, 1, 0, 0, 0, 263, 261, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 19, 1, 0, 0, 0, 265, 263, 1, 0, 0, 0, 266, 267, 6, 10, -1, 0, 267, 275, 3, 68, 34, 0, 268, 275, 3, 58, 29, 0, 269, 275, 3, 22, 11, 0, 270, 271, 5, 47, 0, 0, 271, 272, 3, 10, 5, 0, 272, 273, 5, 54, 0, 0, 273, 275, 1, 0, 0, 0, 274, 266, 1, 0, 0, 0, 274, 268, 1, 0, 0, 0, 274, 269, 1, 0, 0, 0, 274, 270, 1, 0, 0, 0, 275, 281, 1, 0, 0, 0, 276, 277, 10, 1, 0, 0, 277, 278, 5, 36, 0, 0, 278, 280, 3, 30, 15, 0, 279, 276, 1, 0, 0, 0, 280, 283, 1, 0, 0, 0, 281, 279, 1, 0, 0, 0, 281, 282, 1, 0, 0, 0, 282, 21, 1, 0, 0, 0, 283, 281, 1, 0, 0, 0, 284, 285, 3, 24, 12, 0, 285, 299, 5, 47, 0, 0, 286, 300, 5, 65, 0, 0, 287, 292, 3, 10, 5, 0, 288, 289, 5, 38, 0, 0, 289, 291, 3, 10, 5, 0, 290, 288, 1, 0, 0, 0, 291, 294, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 297, 1, 0, 0, 0, 294, 292, 1, 0, 0, 0, 295, 296, 5, 38, 0, 0, 296, 298, 3, 26, 13, 0, 297, 295, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 300, 1, 0, 0, 0, 299, 286, 1, 0, 0, 0, 299, 287, 1, 0, 0, 0, 299, 300, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301, 302, 5, 54, 0, 0, 302, 23, 1, 0, 0, 0, 303, 304, 3, 72, 36, 0, 304, 25, 1, 0, 0, 0, 305, 306, 5, 68, 0, 0, 306, 311, 3, 28, 14, 0, 307, 308, 5, 38, 0, 0, 308, 310, 3, 28, 14, 0, 309, 307, 1, 0, 0, 0, 310, 313, 1, 0, 0, 0, 311, 309, 1, 0, 0, 0, 311, 312, 1, 0, 0, 0, 312, 314, 1, 0, 0, 0, 313, 311, 1, 0, 0, 0, 314, 315, 5, 69, 0, 0, 315, 27, 1, 0, 0, 0, 316, 317, 3, 106, 53, 0, 317, 318, 5, 37, 0, 0, 318, 319, 3, 68, 34, 0, 319, 29, 1, 0, 0, 0, 320, 321, 3, 64, 32, 0, 321, 31, 1, 0, 0, 0, 322, 323, 5, 12, 0, 0, 323, 324, 3, 34, 17, 0, 324, 33, 1, 0, 0, 0, 325, 330, 3, 36, 18, 0, 326, 327, 5, 38, 0, 0, 327, 329, 3, 36, 18, 0, 328, 326, 1, 0, 0, 0, 329, 332, 1, 0, 0, 0, 330, 328, 1, 0, 0, 0, 330, 331, 1, 0, 0, 0, 331, 35, 1, 0, 0, 0, 332, 330, 1, 0, 0, 0, 333, 334, 3, 58, 29, 0, 334, 335, 5, 35, 0, 0, 335, 337, 1, 0, 0, 0, 336, 333, 1, 0, 0, 0, 336, 337, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 339, 3, 10, 5, 0, 339, 37, 1, 0, 0, 0, 340, 341, 5, 6, 0, 0, 341, 346, 3, 40, 20, 0, 342, 343, 5, 38, 0, 0, 343, 345, 3, 40, 20, 0, 344, 342, 1, 0, 0, 0, 345, 348, 1, 0, 0, 0, 346, 344, 1, 0, 0, 0, 346, 347, 1, 0, 0, 0, 347, 350, 1, 0, 0, 0, 348, 346, 1, 0, 0, 0, 349, 351, 3, 46, 23, 0, 350, 349, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 39, 1, 0, 0, 0, 352, 353, 3, 42, 21, 0, 353, 354, 5, 37, 0, 0, 354, 356, 1, 0, 0, 0, 355, 352, 1, 0, 0, 0, 355, 356, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 358, 3, 44, 22, 0, 358, 41, 1, 0, 0, 0, 359, 360, 5, 82, 0, 0, 360, 43, 1, 0, 0, 0, 361, 362, 7, 2, 0, 0, 362, 45, 1, 0, 0, 0, 363, 364, 5, 81, 0, 0, 364, 369, 5, 82, 0, 0, 365, 366, 5, 38, 0, 0, 366, 368, 5, 82, 0, 0, 367, 365, 1, 0, 0, 0, 368, 371, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 47, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 372, 373, 5, 20, 0, 0, 373, 378, 3, 40, 20, 0, 374, 375, 5, 38, 0, 0, 375, 377, 3, 40, 20, 0, 376, 374, 1, 0, 0, 0, 377, 380, 1, 0, 0, 0, 378, 376, 1, 0, 0, 0, 378, 379, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 381, 383, 3, 54, 27, 0, 382, 381, 1, 0, 0, 0, 382, 383, 1, 0, 0, 0, 383, 386, 1, 0, 0, 0, 384, 385, 5, 32, 0, 0, 385, 387, 3, 34, 17, 0, 386, 384, 1, 0, 0, 0, 386, 387, 1, 0, 0, 0, 387, 49, 1, 0, 0, 0, 388, 389, 5, 4, 0, 0, 389, 390, 3, 34, 17, 0, 390, 51, 1, 0, 0, 0, 391, 393, 5, 15, 0, 0, 392, 394, 3, 54, 27, 0, 393, 392, 1, 0, 0, 0, 393, 394, 1, 0, 0, 0, 394, 397, 1, 0, 0, 0, 395, 396, 5, 32, 0, 0, 396, 398, 3, 34, 17, 0, 397, 395, 1, 0, 0, 0, 397, 398, 1, 0, 0, 0, 398, 53, 1, 0, 0, 0, 399, 404, 3, 56, 28, 0, 400, 401, 5, 38, 0, 0, 401, 403, 3, 56, 28, 0, 402, 400, 1, 0, 0, 0, 403, 406, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 404, 405, 1, 0, 0, 0, 405, 55, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 407, 410, 3, 36, 18, 0, 408, 409, 5, 16, 0, 0, 409, 411, 3, 10, 5, 0, 410, 408, 1, 0, 0, 0, 410, 411, 1, 0, 0, 0, 411, 57, 1, 0, 0, 0, 412, 417, 3, 72, 36, 0, 413, 414, 5, 40, 0, 0, 414, 416, 3, 72, 36, 0, 415, 413, 1, 0, 0, 0, 416, 419, 1, 0, 0, 0, 417, 415, 1, 0, 0, 0, 417, 418, 1, 0, 0, 0, 418, 59, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 420, 425, 3, 66, 33, 0, 421, 422, 5, 40, 0, 0, 422, 424, 3, 66, 33, 0, 423, 421, 1, 0, 0, 0, 424, 427, 1, 0, 0, 0, 425, 423, 1, 0, 0, 0, 425, 426, 1, 0, 0, 0, 426, 61, 1, 0, 0, 0, 427, 425, 1, 0, 0, 0, 428, 433, 3, 60, 30, 0, 429, 430, 5, 38, 0, 0, 430, 432, 3, 60, 30, 0, 431, 429, 1, 0, 0, 0, 432, 435, 1, 0, 0, 0, 433, 431, 1, 0, 0, 0, 433, 434, 1, 0, 0, 0, 434, 63, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 436, 437, 7, 3, 0, 0, 437, 65, 1, 0, 0, 0, 438, 442, 5, 86, 0, 0, 439, 440, 4, 33, 9, 0, 440, 442, 3, 70, 35, 0, 441, 438, 1, 0, 0, 0, 441, 439, 1, 0, 0, 0, 442, 67, 1, 0, 0, 0, 443, 486, 5, 49, 0, 0, 444, 445, 3, 104, 52, 0, 445, 446, 5, 73, 0, 0, 446, 486, 1, 0, 0, 0, 447, 486, 3, 102, 51, 0, 448, 486, 3, 104, 52, 0, 449, 486, 3, 98, 49, 0, 450, 486, 3, 70, 35, 0, 451, 486, 3, 106, 53, 0, 452, 453, 5, 71, 0, 0, 453, 458, 3, 100, 50, 0, 454, 455, 5, 38, 0, 0, 455, 457, 3, 100, 50, 0, 456, 454, 1, 0, 0, 0, 457, 460, 1, 0, 0, 0, 458, 456, 1, 0, 0, 0, 458, 459, 1, 0, 0, 0, 459, 461, 1, 0, 0, 0, 460, 458, 1, 0, 0, 0, 461, 462, 5, 72, 0, 0, 462, 486, 1, 0, 0, 0, 463, 464, 5, 71, 0, 0, 464, 469, 3, 98, 49, 0, 465, 466, 5, 38, 0, 0, 466, 468, 3, 98, 49, 0, 467, 465, 1, 0, 0, 0, 468, 471, 1, 0, 0, 0, 469, 467, 1, 0, 0, 0, 469, 470, 1, 0, 0, 0, 470, 472, 1, 0, 0, 0, 471, 469, 1, 0, 0, 0, 472, 473, 5, 72, 0, 0, 473, 486, 1, 0, 0, 0, 474, 475, 5, 71, 0, 0, 475, 480, 3, 106, 53, 0, 476, 477, 5, 38, 0, 0, 477, 479, 3, 106, 53, 0, 478, 476, 1, 0, 0, 0, 479, 482, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 480, 481, 1, 0, 0, 0, 481, 483, 1, 0, 0, 0, 482, 480, 1, 0, 0, 0, 483, 484, 5, 72, 0, 0, 484, 486, 1, 0, 0, 0, 485, 443, 1, 0, 0, 0, 485, 444, 1, 0, 0, 0, 485, 447, 1, 0, 0, 0, 485, 448, 1, 0, 0, 0, 485, 449, 1, 0, 0, 0, 485, 450, 1, 0, 0, 0, 485, 451, 1, 0, 0, 0, 485, 452, 1, 0, 0, 0, 485, 463, 1, 0, 0, 0, 485, 474, 1, 0, 0, 0, 486, 69, 1, 0, 0, 0, 487, 490, 5, 52, 0, 0, 488, 490, 5, 70, 0, 0, 489, 487, 1, 0, 0, 0, 489, 488, 1, 0, 0, 0, 490, 71, 1, 0, 0, 0, 491, 495, 3, 64, 32, 0, 492, 493, 4, 36, 10, 0, 493, 495, 3, 70, 35, 0, 494, 491, 1, 0, 0, 0, 494, 492, 1, 0, 0, 0, 495, 73, 1, 0, 0, 0, 496, 497, 5, 9, 0, 0, 497, 498, 5, 30, 0, 0, 498, 75, 1, 0, 0, 0, 499, 500, 5, 14, 0, 0, 500, 505, 3, 78, 39, 0, 501, 502, 5, 38, 0, 0, 502, 504, 3, 78, 39, 0, 503, 501, 1, 0, 0, 0, 504, 507, 1, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 77, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 508, 510, 3, 10, 5, 0, 509, 511, 7, 4, 0, 0, 510, 509, 1, 0, 0, 0, 510, 511, 1, 0, 0, 0, 511, 514, 1, 0, 0, 0, 512, 513, 5, 50, 0, 0, 513, 515, 7, 5, 0, 0, 514, 512, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 79, 1, 0, 0, 0, 516, 517, 5, 8, 0, 0, 517, 518, 3, 62, 31, 0, 518, 81, 1, 0, 0, 0, 519, 520, 5, 2, 0, 0, 520, 521, 3, 62, 31, 0, 521, 83, 1, 0, 0, 0, 522, 523, 5, 11, 0, 0, 523, 528, 3, 86, 43, 0, 524, 525, 5, 38, 0, 0, 525, 527, 3, 86, 43, 0, 526, 524, 1, 0, 0, 0, 527, 530, 1, 0, 0, 0, 528, 526, 1, 0, 0, 0, 528, 529, 1, 0, 0, 0, 529, 85, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 531, 532, 3, 60, 30, 0, 532, 533, 5, 90, 0, 0, 533, 534, 3, 60, 30, 0, 534, 87, 1, 0, 0, 0, 535, 536, 5, 1, 0, 0, 536, 537, 3, 20, 10, 0, 537, 539, 3, 106, 53, 0, 538, 540, 3, 94, 47, 0, 539, 538, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 89, 1, 0, 0, 0, 541, 542, 5, 7, 0, 0, 542, 543, 3, 20, 10, 0, 543, 544, 3, 106, 53, 0, 544, 91, 1, 0, 0, 0, 545, 546, 5, 10, 0, 0, 546, 547, 3, 58, 29, 0, 547, 93, 1, 0, 0, 0, 548, 553, 3, 96, 48, 0, 549, 550, 5, 38, 0, 0, 550, 552, 3, 96, 48, 0, 551, 549, 1, 0, 0, 0, 552, 555, 1, 0, 0, 0, 553, 551, 1, 0, 0, 0, 553, 554, 1, 0, 0, 0, 554, 95, 1, 0, 0, 0, 555, 553, 1, 0, 0, 0, 556, 557, 3, 64, 32, 0, 557, 558, 5, 35, 0, 0, 558, 559, 3, 68, 34, 0, 559, 97, 1, 0, 0, 0, 560, 561, 7, 6, 0, 0, 561, 99, 1, 0, 0, 0, 562, 565, 3, 102, 51, 0, 563, 565, 3, 104, 52, 0, 564, 562, 1, 0, 0, 0, 564, 563, 1, 0, 0, 0, 565, 101, 1, 0, 0, 0, 566, 568, 7, 0, 0, 0, 567, 566, 1, 0, 0, 0, 567, 568, 1, 0, 0, 0, 568, 569, 1, 0, 0, 0, 569, 570, 5, 31, 0, 0, 570, 103, 1, 0, 0, 0, 571, 573, 7, 0, 0, 0, 572, 571, 1, 0, 0, 0, 572, 573, 1, 0, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 5, 30, 0, 0, 575, 105, 1, 0, 0, 0, 576, 577, 5, 29, 0, 0, 577, 107, 1, 0, 0, 0, 578, 579, 7, 7, 0, 0, 579, 109, 1, 0, 0, 0, 580, 581, 5, 5, 0, 0, 581, 582, 3, 112, 56, 0, 582, 111, 1, 0, 0, 0, 583, 584, 5, 71, 0, 0, 584, 585, 3, 2, 1, 0, 585, 586, 5, 72, 0, 0, 586, 113, 1, 0, 0, 0, 587, 588, 5, 13, 0, 0, 588, 589, 5, 106, 0, 0, 589, 115, 1, 0, 0, 0, 590, 591, 5, 3, 0, 0, 591, 594, 5, 96, 0, 0, 592, 593, 5, 94, 0, 0, 593, 595, 3, 60, 30, 0, 594, 592, 1, 0, 0, 0, 594, 595, 1, 0, 0, 0, 595, 605, 1, 0, 0, 0, 596, 597, 5, 95, 0, 0, 597, 602, 3, 118, 59, 0, 598, 599, 5, 38, 0, 0, 599, 601, 3, 118, 59, 0, 600, 598, 1, 0, 0, 0, 601, 604, 1, 0, 0, 0, 602, 600, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 596, 1, 0, 0, 0, 605, 606, 1, 0, 0, 0, 606, 117, 1, 0, 0, 0, 607, 608, 3, 60, 30, 0, 608, 609, 5, 35, 0, 0, 609, 611, 1, 0, 0, 0, 610, 607, 1, 0, 0, 0, 610, 611, 1, 0, 0, 0, 611, 612, 1, 0, 0, 0, 612, 613, 3, 60, 30, 0, 613, 119, 1, 0, 0, 0, 614, 615, 5, 19, 0, 0, 615, 616, 3, 40, 20, 0, 616, 617, 5, 94, 0, 0, 617, 618, 3, 62, 31, 0, 618, 121, 1, 0, 0, 0, 619, 620, 5, 18, 0, 0, 620, 623, 3, 54, 27, 0, 621, 622, 5, 32, 0, 0, 622, 624, 3, 34, 17, 0, 623, 621, 1, 0, 0, 0, 623, 624, 1, 0, 0, 0, 624, 123, 1, 0, 0, 0, 625, 626, 7, 8, 0, 0, 626, 627, 5, 120, 0, 0, 627, 628, 3, 126, 63, 0, 628, 629, 3, 128, 64, 0, 629, 125, 1, 0, 0, 0, 630, 631, 3, 40, 20, 0, 631, 127, 1, 0, 0, 0, 632, 633, 5, 94, 0, 0, 633, 638, 3, 130, 65, 0, 634, 635, 5, 38, 0, 0, 635, 637, 3, 130, 65, 0, 636, 634, 1, 0, 0, 0, 637, 640, 1, 0, 0, 0, 638, 636, 1, 0, 0, 0, 638, 639, 1, 0, 0, 0, 639, 129, 1, 0, 0, 0, 640, 638, 1, 0, 0, 0, 641, 642, 3, 16, 8, 0, 642, 131, 1, 0, 0, 0, 61, 143, 152, 171, 183, 192, 200, 205, 213, 215, 220, 227, 232, 237, 247, 253, 261, 263, 274, 281, 292, 297, 299, 311, 330, 336, 346, 350, 355, 369, 378, 382, 386, 393, 397, 404, 410, 417, 425, 433, 441, 458, 469, 480, 485, 489, 494, 505, 510, 514, 528, 539, 553, 564, 567, 572, 594, 602, 605, 610, 623, 638] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 1e0a636d67182..b28f8ed4e1fad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -27,31 +27,31 @@ public class EsqlBaseParser extends ParserConfig { public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, - DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, - UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, PIPE=29, - QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, - ASC=35, ASSIGN=36, CAST_OP=37, COLON=38, COMMA=39, DESC=40, DOT=41, FALSE=42, - FIRST=43, IN=44, IS=45, LAST=46, LIKE=47, LP=48, NOT=49, NULL=50, NULLS=51, - OR=52, PARAM=53, RLIKE=54, RP=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, - LTE=61, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, - LEFT_BRACES=69, RIGHT_BRACES=70, NAMED_OR_POSITIONAL_PARAM=71, OPENING_BRACKET=72, - CLOSING_BRACKET=73, UNQUOTED_IDENTIFIER=74, QUOTED_IDENTIFIER=75, EXPR_LINE_COMMENT=76, - EXPR_MULTILINE_COMMENT=77, EXPR_WS=78, EXPLAIN_WS=79, EXPLAIN_LINE_COMMENT=80, - EXPLAIN_MULTILINE_COMMENT=81, METADATA=82, UNQUOTED_SOURCE=83, FROM_LINE_COMMENT=84, - FROM_MULTILINE_COMMENT=85, FROM_WS=86, ID_PATTERN=87, PROJECT_LINE_COMMENT=88, - PROJECT_MULTILINE_COMMENT=89, PROJECT_WS=90, AS=91, RENAME_LINE_COMMENT=92, - RENAME_MULTILINE_COMMENT=93, RENAME_WS=94, ON=95, WITH=96, ENRICH_POLICY_NAME=97, - ENRICH_LINE_COMMENT=98, ENRICH_MULTILINE_COMMENT=99, ENRICH_WS=100, ENRICH_FIELD_LINE_COMMENT=101, - ENRICH_FIELD_MULTILINE_COMMENT=102, ENRICH_FIELD_WS=103, MVEXPAND_LINE_COMMENT=104, - MVEXPAND_MULTILINE_COMMENT=105, MVEXPAND_WS=106, INFO=107, SHOW_LINE_COMMENT=108, - SHOW_MULTILINE_COMMENT=109, SHOW_WS=110, SETTING=111, SETTING_LINE_COMMENT=112, - SETTTING_MULTILINE_COMMENT=113, SETTING_WS=114, LOOKUP_LINE_COMMENT=115, - LOOKUP_MULTILINE_COMMENT=116, LOOKUP_WS=117, LOOKUP_FIELD_LINE_COMMENT=118, - LOOKUP_FIELD_MULTILINE_COMMENT=119, LOOKUP_FIELD_WS=120, USING=121, JOIN_LINE_COMMENT=122, - JOIN_MULTILINE_COMMENT=123, JOIN_WS=124, METRICS_LINE_COMMENT=125, METRICS_MULTILINE_COMMENT=126, - METRICS_WS=127, CLOSING_METRICS_LINE_COMMENT=128, CLOSING_METRICS_MULTILINE_COMMENT=129, - CLOSING_METRICS_WS=130; + WHERE=16, JOIN_LOOKUP=17, DEV_INLINESTATS=18, DEV_LOOKUP=19, DEV_METRICS=20, + DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, UNKNOWN_CMD=24, + LINE_COMMENT=25, MULTILINE_COMMENT=26, WS=27, PIPE=28, QUOTED_STRING=29, + INTEGER_LITERAL=30, DECIMAL_LITERAL=31, BY=32, AND=33, ASC=34, ASSIGN=35, + CAST_OP=36, COLON=37, COMMA=38, DESC=39, DOT=40, FALSE=41, FIRST=42, IN=43, + IS=44, LAST=45, LIKE=46, LP=47, NOT=48, NULL=49, NULLS=50, OR=51, PARAM=52, + RLIKE=53, RP=54, TRUE=55, EQ=56, CIEQ=57, NEQ=58, LT=59, LTE=60, GT=61, + GTE=62, PLUS=63, MINUS=64, ASTERISK=65, SLASH=66, PERCENT=67, LEFT_BRACES=68, + RIGHT_BRACES=69, NAMED_OR_POSITIONAL_PARAM=70, OPENING_BRACKET=71, CLOSING_BRACKET=72, + UNQUOTED_IDENTIFIER=73, QUOTED_IDENTIFIER=74, EXPR_LINE_COMMENT=75, EXPR_MULTILINE_COMMENT=76, + EXPR_WS=77, EXPLAIN_WS=78, EXPLAIN_LINE_COMMENT=79, EXPLAIN_MULTILINE_COMMENT=80, + METADATA=81, UNQUOTED_SOURCE=82, FROM_LINE_COMMENT=83, FROM_MULTILINE_COMMENT=84, + FROM_WS=85, ID_PATTERN=86, PROJECT_LINE_COMMENT=87, PROJECT_MULTILINE_COMMENT=88, + PROJECT_WS=89, AS=90, RENAME_LINE_COMMENT=91, RENAME_MULTILINE_COMMENT=92, + RENAME_WS=93, ON=94, WITH=95, ENRICH_POLICY_NAME=96, ENRICH_LINE_COMMENT=97, + ENRICH_MULTILINE_COMMENT=98, ENRICH_WS=99, ENRICH_FIELD_LINE_COMMENT=100, + ENRICH_FIELD_MULTILINE_COMMENT=101, ENRICH_FIELD_WS=102, MVEXPAND_LINE_COMMENT=103, + MVEXPAND_MULTILINE_COMMENT=104, MVEXPAND_WS=105, INFO=106, SHOW_LINE_COMMENT=107, + SHOW_MULTILINE_COMMENT=108, SHOW_WS=109, SETTING=110, SETTING_LINE_COMMENT=111, + SETTTING_MULTILINE_COMMENT=112, SETTING_WS=113, LOOKUP_LINE_COMMENT=114, + LOOKUP_MULTILINE_COMMENT=115, LOOKUP_WS=116, LOOKUP_FIELD_LINE_COMMENT=117, + LOOKUP_FIELD_MULTILINE_COMMENT=118, LOOKUP_FIELD_WS=119, JOIN=120, USING=121, + JOIN_LINE_COMMENT=122, JOIN_MULTILINE_COMMENT=123, JOIN_WS=124, METRICS_LINE_COMMENT=125, + METRICS_MULTILINE_COMMENT=126, METRICS_WS=127, CLOSING_METRICS_LINE_COMMENT=128, + CLOSING_METRICS_MULTILINE_COMMENT=129, CLOSING_METRICS_WS=130; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, @@ -98,7 +98,7 @@ private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + "'sort'", "'stats'", "'where'", "'lookup'", null, null, null, null, null, null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", @@ -108,7 +108,7 @@ private static String[] makeLiteralNames() { null, null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, null, null, null, null, null, "'info'", null, null, null, null, null, null, null, null, null, - null, null, null, null, "'USING'" + null, null, null, null, "'join'", "'USING'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -116,13 +116,13 @@ private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", - "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", - "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", - "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", - "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "WHERE", "JOIN_LOOKUP", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", + "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "UNKNOWN_CMD", "LINE_COMMENT", + "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COLON", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", + "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", + "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "LEFT_BRACES", "RIGHT_BRACES", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", @@ -137,8 +137,8 @@ private static String[] makeSymbolicNames() { "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", - "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", + "JOIN", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", + "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } @@ -497,15 +497,15 @@ public EnrichCommandContext enrichCommand() { public MvExpandCommandContext mvExpandCommand() { return getRuleContext(MvExpandCommandContext.class,0); } + public JoinCommandContext joinCommand() { + return getRuleContext(JoinCommandContext.class,0); + } public InlinestatsCommandContext inlinestatsCommand() { return getRuleContext(InlinestatsCommandContext.class,0); } public LookupCommandContext lookupCommand() { return getRuleContext(LookupCommandContext.class,0); } - public JoinCommandContext joinCommand() { - return getRuleContext(JoinCommandContext.class,0); - } @SuppressWarnings("this-escape") public ProcessingCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -530,7 +530,7 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(172); + setState(171); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: @@ -621,27 +621,25 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce enterOuterAlt(_localctx, 13); { setState(166); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(167); - inlinestatsCommand(); + joinCommand(); } break; case 14: enterOuterAlt(_localctx, 14); { - setState(168); + setState(167); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(169); - lookupCommand(); + setState(168); + inlinestatsCommand(); } break; case 15: enterOuterAlt(_localctx, 15); { - setState(170); + setState(169); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(171); - joinCommand(); + setState(170); + lookupCommand(); } break; } @@ -689,9 +687,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(174); + setState(173); match(WHERE); - setState(175); + setState(174); booleanExpression(0); } } @@ -907,7 +905,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(206); + setState(205); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -916,9 +914,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(178); + setState(177); match(NOT); - setState(179); + setState(178); booleanExpression(8); } break; @@ -927,7 +925,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(180); + setState(179); valueExpression(); } break; @@ -936,7 +934,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(181); + setState(180); regexBooleanExpression(); } break; @@ -945,41 +943,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(182); + setState(181); valueExpression(); - setState(184); + setState(183); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(183); + setState(182); match(NOT); } } - setState(186); + setState(185); match(IN); - setState(187); + setState(186); match(LP); - setState(188); + setState(187); valueExpression(); - setState(193); + setState(192); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(189); + setState(188); match(COMMA); - setState(190); + setState(189); valueExpression(); } } - setState(195); + setState(194); _errHandler.sync(this); _la = _input.LA(1); } - setState(196); + setState(195); match(RP); } break; @@ -988,21 +986,21 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(198); + setState(197); valueExpression(); - setState(199); + setState(198); match(IS); - setState(201); + setState(200); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(200); + setState(199); match(NOT); } } - setState(203); + setState(202); match(NULL); } break; @@ -1011,13 +1009,13 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MatchExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(205); + setState(204); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(216); + setState(215); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1025,7 +1023,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(214); + setState(213); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -1033,11 +1031,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(208); + setState(207); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(209); + setState(208); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(210); + setState(209); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -1046,18 +1044,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(211); + setState(210); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(212); + setState(211); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(213); + setState(212); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } } } - setState(218); + setState(217); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1112,48 +1110,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(233); + setState(232); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(219); + setState(218); valueExpression(); - setState(221); + setState(220); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(220); + setState(219); match(NOT); } } - setState(223); + setState(222); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(224); + setState(223); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(226); + setState(225); valueExpression(); - setState(228); + setState(227); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(227); + setState(226); match(NOT); } } - setState(230); + setState(229); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(231); + setState(230); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1213,23 +1211,23 @@ public final MatchBooleanExpressionContext matchBooleanExpression() throws Recog try { enterOuterAlt(_localctx, 1); { - setState(235); + setState(234); ((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName(); - setState(238); + setState(237); _errHandler.sync(this); _la = _input.LA(1); if (_la==CAST_OP) { { - setState(236); + setState(235); match(CAST_OP); - setState(237); + setState(236); ((MatchBooleanExpressionContext)_localctx).fieldType = dataType(); } } - setState(240); + setState(239); match(COLON); - setState(241); + setState(240); ((MatchBooleanExpressionContext)_localctx).matchQuery = constant(); } } @@ -1313,14 +1311,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 16, RULE_valueExpression); try { - setState(248); + setState(247); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(243); + setState(242); operatorExpression(0); } break; @@ -1328,11 +1326,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(244); + setState(243); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(245); + setState(244); comparisonOperator(); - setState(246); + setState(245); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1457,7 +1455,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(254); + setState(253); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1466,7 +1464,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(251); + setState(250); primaryExpression(0); } break; @@ -1475,7 +1473,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(252); + setState(251); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1486,13 +1484,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(253); + setState(252); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(264); + setState(263); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,16,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1500,7 +1498,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(262); + setState(261); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,15,_ctx) ) { case 1: @@ -1508,12 +1506,12 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(256); + setState(255); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(257); + setState(256); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & 7L) != 0)) ) { + if ( !(((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & 7L) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1521,7 +1519,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(258); + setState(257); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1530,9 +1528,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(259); + setState(258); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(260); + setState(259); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1543,14 +1541,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(261); + setState(260); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(266); + setState(265); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,16,_ctx); } @@ -1708,7 +1706,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(275); + setState(274); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,17,_ctx) ) { case 1: @@ -1717,7 +1715,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(268); + setState(267); constant(); } break; @@ -1726,7 +1724,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(269); + setState(268); qualifiedName(); } break; @@ -1735,7 +1733,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(270); + setState(269); functionExpression(); } break; @@ -1744,17 +1742,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(271); + setState(270); match(LP); - setState(272); + setState(271); booleanExpression(0); - setState(273); + setState(272); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(282); + setState(281); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,18,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1765,16 +1763,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(277); + setState(276); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(278); + setState(277); match(CAST_OP); - setState(279); + setState(278); dataType(); } } } - setState(284); + setState(283); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,18,_ctx); } @@ -1840,50 +1838,50 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx int _alt; enterOuterAlt(_localctx, 1); { - setState(285); + setState(284); functionName(); - setState(286); + setState(285); match(LP); - setState(300); + setState(299); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: { - setState(287); + setState(286); match(ASTERISK); } break; case 2: { { - setState(288); + setState(287); booleanExpression(0); - setState(293); + setState(292); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(289); + setState(288); match(COMMA); - setState(290); + setState(289); booleanExpression(0); } } } - setState(295); + setState(294); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); } - setState(298); + setState(297); _errHandler.sync(this); _la = _input.LA(1); if (_la==COMMA) { { - setState(296); + setState(295); match(COMMA); - setState(297); + setState(296); mapExpression(); } } @@ -1892,7 +1890,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx } break; } - setState(302); + setState(301); match(RP); } } @@ -1938,7 +1936,7 @@ public final FunctionNameContext functionName() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(304); + setState(303); identifierOrParameter(); } } @@ -1994,27 +1992,27 @@ public final MapExpressionContext mapExpression() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(306); + setState(305); match(LEFT_BRACES); - setState(307); + setState(306); entryExpression(); - setState(312); + setState(311); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(308); + setState(307); match(COMMA); - setState(309); + setState(308); entryExpression(); } } - setState(314); + setState(313); _errHandler.sync(this); _la = _input.LA(1); } - setState(315); + setState(314); match(RIGHT_BRACES); } } @@ -2066,11 +2064,11 @@ public final EntryExpressionContext entryExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(317); + setState(316); ((EntryExpressionContext)_localctx).key = string(); - setState(318); + setState(317); match(COLON); - setState(319); + setState(318); ((EntryExpressionContext)_localctx).value = constant(); } } @@ -2128,7 +2126,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(321); + setState(320); identifier(); } } @@ -2175,9 +2173,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(323); + setState(322); match(ROW); - setState(324); + setState(323); fields(); } } @@ -2231,23 +2229,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(326); + setState(325); field(); - setState(331); + setState(330); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(327); + setState(326); match(COMMA); - setState(328); + setState(327); field(); } } } - setState(333); + setState(332); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); } @@ -2299,19 +2297,19 @@ public final FieldContext field() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(337); + setState(336); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(334); + setState(333); qualifiedName(); - setState(335); + setState(334); match(ASSIGN); } break; } - setState(339); + setState(338); booleanExpression(0); } } @@ -2369,34 +2367,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(341); + setState(340); match(FROM); - setState(342); + setState(341); indexPattern(); - setState(347); + setState(346); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(343); + setState(342); match(COMMA); - setState(344); + setState(343); indexPattern(); } } } - setState(349); + setState(348); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); } - setState(351); + setState(350); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: { - setState(350); + setState(349); metadata(); } break; @@ -2449,19 +2447,19 @@ public final IndexPatternContext indexPattern() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(356); + setState(355); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { case 1: { - setState(353); + setState(352); clusterString(); - setState(354); + setState(353); match(COLON); } break; } - setState(358); + setState(357); indexString(); } } @@ -2505,7 +2503,7 @@ public final ClusterStringContext clusterString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(360); + setState(359); match(UNQUOTED_SOURCE); } } @@ -2551,7 +2549,7 @@ public final IndexStringContext indexString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(362); + setState(361); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2612,25 +2610,25 @@ public final MetadataContext metadata() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(364); + setState(363); match(METADATA); - setState(365); + setState(364); match(UNQUOTED_SOURCE); - setState(370); + setState(369); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(366); + setState(365); match(COMMA); - setState(367); + setState(366); match(UNQUOTED_SOURCE); } } } - setState(372); + setState(371); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } @@ -2696,46 +2694,46 @@ public final MetricsCommandContext metricsCommand() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(373); + setState(372); match(DEV_METRICS); - setState(374); + setState(373); indexPattern(); - setState(379); + setState(378); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,29,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(375); + setState(374); match(COMMA); - setState(376); + setState(375); indexPattern(); } } } - setState(381); + setState(380); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,29,_ctx); } - setState(383); + setState(382); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(382); + setState(381); ((MetricsCommandContext)_localctx).aggregates = aggFields(); } break; } - setState(387); + setState(386); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(385); + setState(384); match(BY); - setState(386); + setState(385); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2785,9 +2783,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(389); + setState(388); match(EVAL); - setState(390); + setState(389); fields(); } } @@ -2840,26 +2838,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(392); + setState(391); match(STATS); - setState(394); + setState(393); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(393); + setState(392); ((StatsCommandContext)_localctx).stats = aggFields(); } break; } - setState(398); + setState(397); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(396); + setState(395); match(BY); - setState(397); + setState(396); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2916,23 +2914,23 @@ public final AggFieldsContext aggFields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(400); + setState(399); aggField(); - setState(405); + setState(404); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(401); + setState(400); match(COMMA); - setState(402); + setState(401); aggField(); } } } - setState(407); + setState(406); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -2984,16 +2982,16 @@ public final AggFieldContext aggField() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(408); + setState(407); field(); - setState(411); + setState(410); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(409); + setState(408); match(WHERE); - setState(410); + setState(409); booleanExpression(0); } break; @@ -3050,23 +3048,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(413); + setState(412); identifierOrParameter(); - setState(418); + setState(417); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(414); + setState(413); match(DOT); - setState(415); + setState(414); identifierOrParameter(); } } } - setState(420); + setState(419); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } @@ -3122,23 +3120,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(421); + setState(420); identifierPattern(); - setState(426); + setState(425); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(422); + setState(421); match(DOT); - setState(423); + setState(422); identifierPattern(); } } } - setState(428); + setState(427); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } @@ -3194,23 +3192,23 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni int _alt; enterOuterAlt(_localctx, 1); { - setState(429); + setState(428); qualifiedNamePattern(); - setState(434); + setState(433); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(430); + setState(429); match(COMMA); - setState(431); + setState(430); qualifiedNamePattern(); } } } - setState(436); + setState(435); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); } @@ -3258,7 +3256,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(437); + setState(436); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3311,22 +3309,22 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); enterRule(_localctx, 66, RULE_identifierPattern); try { - setState(442); + setState(441); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(439); + setState(438); match(ID_PATTERN); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(440); + setState(439); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(441); + setState(440); parameter(); } break; @@ -3599,14 +3597,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 68, RULE_constant); int _la; try { - setState(486); + setState(485); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(444); + setState(443); match(NULL); } break; @@ -3614,9 +3612,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(445); + setState(444); integerValue(); - setState(446); + setState(445); match(UNQUOTED_IDENTIFIER); } break; @@ -3624,7 +3622,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(448); + setState(447); decimalValue(); } break; @@ -3632,7 +3630,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(449); + setState(448); integerValue(); } break; @@ -3640,7 +3638,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(450); + setState(449); booleanValue(); } break; @@ -3648,7 +3646,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(451); + setState(450); parameter(); } break; @@ -3656,7 +3654,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(452); + setState(451); string(); } break; @@ -3664,27 +3662,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(453); + setState(452); match(OPENING_BRACKET); - setState(454); + setState(453); numericValue(); - setState(459); + setState(458); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(455); + setState(454); match(COMMA); - setState(456); + setState(455); numericValue(); } } - setState(461); + setState(460); _errHandler.sync(this); _la = _input.LA(1); } - setState(462); + setState(461); match(CLOSING_BRACKET); } break; @@ -3692,27 +3690,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(464); + setState(463); match(OPENING_BRACKET); - setState(465); + setState(464); booleanValue(); - setState(470); + setState(469); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(466); + setState(465); match(COMMA); - setState(467); + setState(466); booleanValue(); } } - setState(472); + setState(471); _errHandler.sync(this); _la = _input.LA(1); } - setState(473); + setState(472); match(CLOSING_BRACKET); } break; @@ -3720,27 +3718,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(475); + setState(474); match(OPENING_BRACKET); - setState(476); + setState(475); string(); - setState(481); + setState(480); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(477); + setState(476); match(COMMA); - setState(478); + setState(477); string(); } } - setState(483); + setState(482); _errHandler.sync(this); _la = _input.LA(1); } - setState(484); + setState(483); match(CLOSING_BRACKET); } break; @@ -3814,14 +3812,14 @@ public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); enterRule(_localctx, 70, RULE_parameter); try { - setState(490); + setState(489); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(488); + setState(487); match(PARAM); } break; @@ -3829,7 +3827,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(489); + setState(488); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3880,22 +3878,22 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); enterRule(_localctx, 72, RULE_identifierOrParameter); try { - setState(495); + setState(494); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(492); + setState(491); identifier(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(493); + setState(492); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(494); + setState(493); parameter(); } break; @@ -3942,9 +3940,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(497); + setState(496); match(LIMIT); - setState(498); + setState(497); match(INTEGER_LITERAL); } } @@ -3999,25 +3997,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(500); + setState(499); match(SORT); - setState(501); + setState(500); orderExpression(); - setState(506); + setState(505); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,46,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(502); + setState(501); match(COMMA); - setState(503); + setState(502); orderExpression(); } } } - setState(508); + setState(507); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,46,_ctx); } @@ -4073,14 +4071,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(509); + setState(508); booleanExpression(0); - setState(511); + setState(510); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: { - setState(510); + setState(509); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -4094,14 +4092,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(515); + setState(514); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(513); + setState(512); match(NULLS); - setState(514); + setState(513); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -4160,9 +4158,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(517); + setState(516); match(KEEP); - setState(518); + setState(517); qualifiedNamePatterns(); } } @@ -4209,9 +4207,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(520); + setState(519); match(DROP); - setState(521); + setState(520); qualifiedNamePatterns(); } } @@ -4266,25 +4264,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(523); + setState(522); match(RENAME); - setState(524); + setState(523); renameClause(); - setState(529); + setState(528); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(525); + setState(524); match(COMMA); - setState(526); + setState(525); renameClause(); } } } - setState(531); + setState(530); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } @@ -4338,11 +4336,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(532); + setState(531); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(533); + setState(532); match(AS); - setState(534); + setState(533); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4395,18 +4393,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(536); + setState(535); match(DISSECT); - setState(537); + setState(536); primaryExpression(0); - setState(538); + setState(537); string(); - setState(540); + setState(539); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: { - setState(539); + setState(538); commandOptions(); } break; @@ -4459,11 +4457,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(542); + setState(541); match(GROK); - setState(543); + setState(542); primaryExpression(0); - setState(544); + setState(543); string(); } } @@ -4510,9 +4508,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(546); + setState(545); match(MV_EXPAND); - setState(547); + setState(546); qualifiedName(); } } @@ -4566,23 +4564,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(549); + setState(548); commandOption(); - setState(554); + setState(553); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,51,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(550); + setState(549); match(COMMA); - setState(551); + setState(550); commandOption(); } } } - setState(556); + setState(555); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,51,_ctx); } @@ -4634,11 +4632,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(557); + setState(556); identifier(); - setState(558); + setState(557); match(ASSIGN); - setState(559); + setState(558); constant(); } } @@ -4684,7 +4682,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(561); + setState(560); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4739,20 +4737,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 100, RULE_numericValue); try { - setState(565); + setState(564); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(563); + setState(562); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(564); + setState(563); integerValue(); } break; @@ -4801,12 +4799,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(568); + setState(567); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(567); + setState(566); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4819,7 +4817,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(570); + setState(569); match(DECIMAL_LITERAL); } } @@ -4866,12 +4864,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(573); + setState(572); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(572); + setState(571); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4884,7 +4882,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(575); + setState(574); match(INTEGER_LITERAL); } } @@ -4928,7 +4926,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(577); + setState(576); match(QUOTED_STRING); } } @@ -4978,9 +4976,9 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(579); + setState(578); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -432345564227567616L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 9007199254740992000L) != 0)) ) { _errHandler.recoverInline(this); } else { @@ -5033,9 +5031,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(581); + setState(580); match(EXPLAIN); - setState(582); + setState(581); subqueryExpression(); } } @@ -5083,11 +5081,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(584); + setState(583); match(OPENING_BRACKET); - setState(585); + setState(584); query(0); - setState(586); + setState(585); match(CLOSING_BRACKET); } } @@ -5144,9 +5142,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(588); + setState(587); match(SHOW); - setState(589); + setState(588); match(INFO); } } @@ -5209,46 +5207,46 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(591); + setState(590); match(ENRICH); - setState(592); + setState(591); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(595); + setState(594); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(593); + setState(592); match(ON); - setState(594); + setState(593); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(606); + setState(605); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(597); + setState(596); match(WITH); - setState(598); + setState(597); enrichWithClause(); - setState(603); + setState(602); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,56,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(599); + setState(598); match(COMMA); - setState(600); + setState(599); enrichWithClause(); } } } - setState(605); + setState(604); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,56,_ctx); } @@ -5305,19 +5303,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(611); + setState(610); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { case 1: { - setState(608); + setState(607); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(609); + setState(608); match(ASSIGN); } break; } - setState(613); + setState(612); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5370,13 +5368,13 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(615); + setState(614); match(DEV_LOOKUP); - setState(616); + setState(615); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(617); + setState(616); match(ON); - setState(618); + setState(617); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5429,18 +5427,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(620); + setState(619); match(DEV_INLINESTATS); - setState(621); + setState(620); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(624); + setState(623); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,59,_ctx) ) { case 1: { - setState(622); + setState(621); match(BY); - setState(623); + setState(622); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5461,14 +5459,14 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx @SuppressWarnings("CheckReturnValue") public static class JoinCommandContext extends ParserRuleContext { public Token type; - public TerminalNode DEV_JOIN() { return getToken(EsqlBaseParser.DEV_JOIN, 0); } + public TerminalNode JOIN() { return getToken(EsqlBaseParser.JOIN, 0); } public JoinTargetContext joinTarget() { return getRuleContext(JoinTargetContext.class,0); } public JoinConditionContext joinCondition() { return getRuleContext(JoinConditionContext.class,0); } - public TerminalNode DEV_JOIN_LOOKUP() { return getToken(EsqlBaseParser.DEV_JOIN_LOOKUP, 0); } + public TerminalNode JOIN_LOOKUP() { return getToken(EsqlBaseParser.JOIN_LOOKUP, 0); } public TerminalNode DEV_JOIN_LEFT() { return getToken(EsqlBaseParser.DEV_JOIN_LEFT, 0); } public TerminalNode DEV_JOIN_RIGHT() { return getToken(EsqlBaseParser.DEV_JOIN_RIGHT, 0); } @SuppressWarnings("this-escape") @@ -5498,30 +5496,22 @@ public final JoinCommandContext joinCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(627); - _errHandler.sync(this); + setState(625); + ((JoinCommandContext)_localctx).type = _input.LT(1); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) { - { - setState(626); - ((JoinCommandContext)_localctx).type = _input.LT(1); - _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) ) { - ((JoinCommandContext)_localctx).type = (Token)_errHandler.recoverInline(this); - } - else { - if ( _input.LA(1)==Token.EOF ) matchedEOF = true; - _errHandler.reportMatch(this); - consume(); - } - } + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 12713984L) != 0)) ) { + ((JoinCommandContext)_localctx).type = (Token)_errHandler.recoverInline(this); } - - setState(629); - match(DEV_JOIN); - setState(630); + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + setState(626); + match(JOIN); + setState(627); joinTarget(); - setState(631); + setState(628); joinCondition(); } } @@ -5539,14 +5529,9 @@ public final JoinCommandContext joinCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class JoinTargetContext extends ParserRuleContext { public IndexPatternContext index; - public IdentifierContext alias; public IndexPatternContext indexPattern() { return getRuleContext(IndexPatternContext.class,0); } - public TerminalNode AS() { return getToken(EsqlBaseParser.AS, 0); } - public IdentifierContext identifier() { - return getRuleContext(IdentifierContext.class,0); - } @SuppressWarnings("this-escape") public JoinTargetContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -5570,24 +5555,11 @@ public T accept(ParseTreeVisitor visitor) { public final JoinTargetContext joinTarget() throws RecognitionException { JoinTargetContext _localctx = new JoinTargetContext(_ctx, getState()); enterRule(_localctx, 126, RULE_joinTarget); - int _la; try { enterOuterAlt(_localctx, 1); { - setState(633); + setState(630); ((JoinTargetContext)_localctx).index = indexPattern(); - setState(636); - _errHandler.sync(this); - _la = _input.LA(1); - if (_la==AS) { - { - setState(634); - match(AS); - setState(635); - ((JoinTargetContext)_localctx).alias = identifier(); - } - } - } } catch (RecognitionException re) { @@ -5641,27 +5613,27 @@ public final JoinConditionContext joinCondition() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(638); + setState(632); match(ON); - setState(639); + setState(633); joinPredicate(); - setState(644); + setState(638); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,62,_ctx); + _alt = getInterpreter().adaptivePredict(_input,60,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(640); + setState(634); match(COMMA); - setState(641); + setState(635); joinPredicate(); } } } - setState(646); + setState(640); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,62,_ctx); + _alt = getInterpreter().adaptivePredict(_input,60,_ctx); } } } @@ -5707,7 +5679,7 @@ public final JoinPredicateContext joinPredicate() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(647); + setState(641); valueExpression(); } } @@ -5763,53 +5735,51 @@ private boolean processingCommand_sempred(ProcessingCommandContext _localctx, in return this.isDevVersion(); case 3: return this.isDevVersion(); - case 4: - return this.isDevVersion(); } return true; } private boolean booleanExpression_sempred(BooleanExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 5: + case 4: return precpred(_ctx, 5); - case 6: + case 5: return precpred(_ctx, 4); } return true; } private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 7: + case 6: return precpred(_ctx, 2); - case 8: + case 7: return precpred(_ctx, 1); } return true; } private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 9: + case 8: return precpred(_ctx, 1); } return true; } private boolean identifierPattern_sempred(IdentifierPatternContext _localctx, int predIndex) { switch (predIndex) { - case 10: + case 9: return this.isDevVersion(); } return true; } private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _localctx, int predIndex) { switch (predIndex) { - case 11: + case 10: return this.isDevVersion(); } return true; } public static final String _serializedATN = - "\u0004\u0001\u0082\u028a\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0004\u0001\u0082\u0284\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ @@ -5832,393 +5802,388 @@ private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _loca "\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0099\b\u0002\u0001\u0003"+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003"+ - "\u00ad\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005"+ - "\u00b9\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0005\u0005\u00c0\b\u0005\n\u0005\f\u0005\u00c3\t\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00ca\b\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00cf\b\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00d7"+ - "\b\u0005\n\u0005\f\u0005\u00da\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006"+ - "\u00de\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ - "\u0003\u0006\u00e5\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006"+ - "\u00ea\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00ef\b"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0003\b\u00f9\b\b\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00ff"+ - "\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005\t\u0107\b\t"+ - "\n\t\f\t\u010a\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0003\n\u0114\b\n\u0001\n\u0001\n\u0001\n\u0005\n\u0119\b\n"+ - "\n\n\f\n\u011c\t\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0005\u000b\u0124\b\u000b\n\u000b\f\u000b\u0127\t\u000b"+ - "\u0001\u000b\u0001\u000b\u0003\u000b\u012b\b\u000b\u0003\u000b\u012d\b"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u00ac\b\u0003"+ + "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00b8\b\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005"+ + "\u00bf\b\u0005\n\u0005\f\u0005\u00c2\t\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00c9\b\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0003\u0005\u00ce\b\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00d6\b\u0005\n"+ + "\u0005\f\u0005\u00d9\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00dd"+ + "\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003"+ + "\u0006\u00e4\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00e9"+ + "\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00ee\b\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ + "\b\u0003\b\u00f8\b\b\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00fe\b\t"+ + "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005\t\u0106\b\t\n\t"+ + "\f\t\u0109\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n"+ + "\u0001\n\u0003\n\u0113\b\n\u0001\n\u0001\n\u0001\n\u0005\n\u0118\b\n\n"+ + "\n\f\n\u011b\t\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ + "\u000b\u0001\u000b\u0005\u000b\u0123\b\u000b\n\u000b\f\u000b\u0126\t\u000b"+ + "\u0001\u000b\u0001\u000b\u0003\u000b\u012a\b\u000b\u0003\u000b\u012c\b"+ "\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r"+ - "\u0001\r\u0005\r\u0137\b\r\n\r\f\r\u013a\t\r\u0001\r\u0001\r\u0001\u000e"+ + "\u0001\r\u0005\r\u0136\b\r\n\r\f\r\u0139\t\r\u0001\r\u0001\r\u0001\u000e"+ "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u0010"+ "\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0005\u0011"+ - "\u014a\b\u0011\n\u0011\f\u0011\u014d\t\u0011\u0001\u0012\u0001\u0012\u0001"+ - "\u0012\u0003\u0012\u0152\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u015a\b\u0013\n\u0013\f\u0013"+ - "\u015d\t\u0013\u0001\u0013\u0003\u0013\u0160\b\u0013\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0003\u0014\u0165\b\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0149\b\u0011\n\u0011\f\u0011\u014c\t\u0011\u0001\u0012\u0001\u0012\u0001"+ + "\u0012\u0003\u0012\u0151\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ + "\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u0159\b\u0013\n\u0013\f\u0013"+ + "\u015c\t\u0013\u0001\u0013\u0003\u0013\u015f\b\u0013\u0001\u0014\u0001"+ + "\u0014\u0001\u0014\u0003\u0014\u0164\b\u0014\u0001\u0014\u0001\u0014\u0001"+ "\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0005\u0017\u0171\b\u0017\n\u0017\f\u0017\u0174\t\u0017"+ - "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u017a\b\u0018"+ - "\n\u0018\f\u0018\u017d\t\u0018\u0001\u0018\u0003\u0018\u0180\b\u0018\u0001"+ - "\u0018\u0001\u0018\u0003\u0018\u0184\b\u0018\u0001\u0019\u0001\u0019\u0001"+ - "\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u018b\b\u001a\u0001\u001a\u0001"+ - "\u001a\u0003\u001a\u018f\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005"+ - "\u001b\u0194\b\u001b\n\u001b\f\u001b\u0197\t\u001b\u0001\u001c\u0001\u001c"+ - "\u0001\u001c\u0003\u001c\u019c\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0005\u001d\u01a1\b\u001d\n\u001d\f\u001d\u01a4\t\u001d\u0001\u001e\u0001"+ - "\u001e\u0001\u001e\u0005\u001e\u01a9\b\u001e\n\u001e\f\u001e\u01ac\t\u001e"+ - "\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01b1\b\u001f\n\u001f"+ - "\f\u001f\u01b4\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u01bb"+ + "\u0017\u0001\u0017\u0005\u0017\u0170\b\u0017\n\u0017\f\u0017\u0173\t\u0017"+ + "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u0179\b\u0018"+ + "\n\u0018\f\u0018\u017c\t\u0018\u0001\u0018\u0003\u0018\u017f\b\u0018\u0001"+ + "\u0018\u0001\u0018\u0003\u0018\u0183\b\u0018\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u018a\b\u001a\u0001\u001a\u0001"+ + "\u001a\u0003\u001a\u018e\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005"+ + "\u001b\u0193\b\u001b\n\u001b\f\u001b\u0196\t\u001b\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0003\u001c\u019b\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0005\u001d\u01a0\b\u001d\n\u001d\f\u001d\u01a3\t\u001d\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0005\u001e\u01a8\b\u001e\n\u001e\f\u001e\u01ab\t\u001e"+ + "\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01b0\b\u001f\n\u001f"+ + "\f\u001f\u01b3\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u01ba"+ "\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01ca\b\"\n\"\f\"\u01cd\t\""+ - "\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01d5\b\"\n\""+ - "\f\"\u01d8\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\""+ - "\u01e0\b\"\n\"\f\"\u01e3\t\"\u0001\"\u0001\"\u0003\"\u01e7\b\"\u0001#"+ - "\u0001#\u0003#\u01eb\b#\u0001$\u0001$\u0001$\u0003$\u01f0\b$\u0001%\u0001"+ - "%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01f9\b&\n&\f&\u01fc\t&\u0001"+ - "\'\u0001\'\u0003\'\u0200\b\'\u0001\'\u0001\'\u0003\'\u0204\b\'\u0001("+ + "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c9\b\"\n\"\f\"\u01cc\t\""+ + "\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01d4\b\"\n\""+ + "\f\"\u01d7\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\""+ + "\u01df\b\"\n\"\f\"\u01e2\t\"\u0001\"\u0001\"\u0003\"\u01e6\b\"\u0001#"+ + "\u0001#\u0003#\u01ea\b#\u0001$\u0001$\u0001$\u0003$\u01ef\b$\u0001%\u0001"+ + "%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01f8\b&\n&\f&\u01fb\t&\u0001"+ + "\'\u0001\'\u0003\'\u01ff\b\'\u0001\'\u0001\'\u0003\'\u0203\b\'\u0001("+ "\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0005"+ - "*\u0210\b*\n*\f*\u0213\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ - ",\u0001,\u0003,\u021d\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001"+ - ".\u0001/\u0001/\u0001/\u0005/\u0229\b/\n/\f/\u022c\t/\u00010\u00010\u0001"+ - "0\u00010\u00011\u00011\u00012\u00012\u00032\u0236\b2\u00013\u00033\u0239"+ - "\b3\u00013\u00013\u00014\u00034\u023e\b4\u00014\u00014\u00015\u00015\u0001"+ + "*\u020f\b*\n*\f*\u0212\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ + ",\u0001,\u0003,\u021c\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001"+ + ".\u0001/\u0001/\u0001/\u0005/\u0228\b/\n/\f/\u022b\t/\u00010\u00010\u0001"+ + "0\u00010\u00011\u00011\u00012\u00012\u00032\u0235\b2\u00013\u00033\u0238"+ + "\b3\u00013\u00013\u00014\u00034\u023d\b4\u00014\u00014\u00015\u00015\u0001"+ "6\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00019\u0001"+ - "9\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0254\b:\u0001:\u0001:\u0001"+ - ":\u0001:\u0005:\u025a\b:\n:\f:\u025d\t:\u0003:\u025f\b:\u0001;\u0001;"+ - "\u0001;\u0003;\u0264\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001"+ - "<\u0001=\u0001=\u0001=\u0001=\u0003=\u0271\b=\u0001>\u0003>\u0274\b>\u0001"+ - ">\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0003?\u027d\b?\u0001@\u0001"+ - "@\u0001@\u0001@\u0005@\u0283\b@\n@\f@\u0286\t@\u0001A\u0001A\u0001A\u0000"+ - "\u0004\u0002\n\u0012\u0014B\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ - "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ - "TVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0000\t\u0001\u0000@A\u0001\u0000"+ - "BD\u0002\u0000\u001e\u001eSS\u0001\u0000JK\u0002\u0000##((\u0002\u0000"+ - "++..\u0002\u0000**88\u0002\u000099;?\u0001\u0000\u0016\u0018\u02a5\u0000"+ - "\u0084\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000\u0000\u0000\u0004"+ - "\u0098\u0001\u0000\u0000\u0000\u0006\u00ac\u0001\u0000\u0000\u0000\b\u00ae"+ - "\u0001\u0000\u0000\u0000\n\u00ce\u0001\u0000\u0000\u0000\f\u00e9\u0001"+ - "\u0000\u0000\u0000\u000e\u00eb\u0001\u0000\u0000\u0000\u0010\u00f8\u0001"+ - "\u0000\u0000\u0000\u0012\u00fe\u0001\u0000\u0000\u0000\u0014\u0113\u0001"+ - "\u0000\u0000\u0000\u0016\u011d\u0001\u0000\u0000\u0000\u0018\u0130\u0001"+ - "\u0000\u0000\u0000\u001a\u0132\u0001\u0000\u0000\u0000\u001c\u013d\u0001"+ - "\u0000\u0000\u0000\u001e\u0141\u0001\u0000\u0000\u0000 \u0143\u0001\u0000"+ - "\u0000\u0000\"\u0146\u0001\u0000\u0000\u0000$\u0151\u0001\u0000\u0000"+ - "\u0000&\u0155\u0001\u0000\u0000\u0000(\u0164\u0001\u0000\u0000\u0000*"+ - "\u0168\u0001\u0000\u0000\u0000,\u016a\u0001\u0000\u0000\u0000.\u016c\u0001"+ - "\u0000\u0000\u00000\u0175\u0001\u0000\u0000\u00002\u0185\u0001\u0000\u0000"+ - "\u00004\u0188\u0001\u0000\u0000\u00006\u0190\u0001\u0000\u0000\u00008"+ - "\u0198\u0001\u0000\u0000\u0000:\u019d\u0001\u0000\u0000\u0000<\u01a5\u0001"+ - "\u0000\u0000\u0000>\u01ad\u0001\u0000\u0000\u0000@\u01b5\u0001\u0000\u0000"+ - "\u0000B\u01ba\u0001\u0000\u0000\u0000D\u01e6\u0001\u0000\u0000\u0000F"+ - "\u01ea\u0001\u0000\u0000\u0000H\u01ef\u0001\u0000\u0000\u0000J\u01f1\u0001"+ - "\u0000\u0000\u0000L\u01f4\u0001\u0000\u0000\u0000N\u01fd\u0001\u0000\u0000"+ - "\u0000P\u0205\u0001\u0000\u0000\u0000R\u0208\u0001\u0000\u0000\u0000T"+ - "\u020b\u0001\u0000\u0000\u0000V\u0214\u0001\u0000\u0000\u0000X\u0218\u0001"+ - "\u0000\u0000\u0000Z\u021e\u0001\u0000\u0000\u0000\\\u0222\u0001\u0000"+ - "\u0000\u0000^\u0225\u0001\u0000\u0000\u0000`\u022d\u0001\u0000\u0000\u0000"+ - "b\u0231\u0001\u0000\u0000\u0000d\u0235\u0001\u0000\u0000\u0000f\u0238"+ - "\u0001\u0000\u0000\u0000h\u023d\u0001\u0000\u0000\u0000j\u0241\u0001\u0000"+ - "\u0000\u0000l\u0243\u0001\u0000\u0000\u0000n\u0245\u0001\u0000\u0000\u0000"+ - "p\u0248\u0001\u0000\u0000\u0000r\u024c\u0001\u0000\u0000\u0000t\u024f"+ - "\u0001\u0000\u0000\u0000v\u0263\u0001\u0000\u0000\u0000x\u0267\u0001\u0000"+ - "\u0000\u0000z\u026c\u0001\u0000\u0000\u0000|\u0273\u0001\u0000\u0000\u0000"+ - "~\u0279\u0001\u0000\u0000\u0000\u0080\u027e\u0001\u0000\u0000\u0000\u0082"+ - "\u0287\u0001\u0000\u0000\u0000\u0084\u0085\u0003\u0002\u0001\u0000\u0085"+ - "\u0086\u0005\u0000\u0000\u0001\u0086\u0001\u0001\u0000\u0000\u0000\u0087"+ - "\u0088\u0006\u0001\uffff\uffff\u0000\u0088\u0089\u0003\u0004\u0002\u0000"+ - "\u0089\u008f\u0001\u0000\u0000\u0000\u008a\u008b\n\u0001\u0000\u0000\u008b"+ - "\u008c\u0005\u001d\u0000\u0000\u008c\u008e\u0003\u0006\u0003\u0000\u008d"+ - "\u008a\u0001\u0000\u0000\u0000\u008e\u0091\u0001\u0000\u0000\u0000\u008f"+ - "\u008d\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000\u0000\u0090"+ - "\u0003\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000\u0000\u0092"+ - "\u0099\u0003n7\u0000\u0093\u0099\u0003&\u0013\u0000\u0094\u0099\u0003"+ - " \u0010\u0000\u0095\u0099\u0003r9\u0000\u0096\u0097\u0004\u0002\u0001"+ - "\u0000\u0097\u0099\u00030\u0018\u0000\u0098\u0092\u0001\u0000\u0000\u0000"+ - "\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001\u0000\u0000\u0000"+ - "\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000\u0000\u0000"+ - "\u0099\u0005\u0001\u0000\u0000\u0000\u009a\u00ad\u00032\u0019\u0000\u009b"+ - "\u00ad\u0003\b\u0004\u0000\u009c\u00ad\u0003P(\u0000\u009d\u00ad\u0003"+ - "J%\u0000\u009e\u00ad\u00034\u001a\u0000\u009f\u00ad\u0003L&\u0000\u00a0"+ - "\u00ad\u0003R)\u0000\u00a1\u00ad\u0003T*\u0000\u00a2\u00ad\u0003X,\u0000"+ - "\u00a3\u00ad\u0003Z-\u0000\u00a4\u00ad\u0003t:\u0000\u00a5\u00ad\u0003"+ - "\\.\u0000\u00a6\u00a7\u0004\u0003\u0002\u0000\u00a7\u00ad\u0003z=\u0000"+ - "\u00a8\u00a9\u0004\u0003\u0003\u0000\u00a9\u00ad\u0003x<\u0000\u00aa\u00ab"+ - "\u0004\u0003\u0004\u0000\u00ab\u00ad\u0003|>\u0000\u00ac\u009a\u0001\u0000"+ - "\u0000\u0000\u00ac\u009b\u0001\u0000\u0000\u0000\u00ac\u009c\u0001\u0000"+ - "\u0000\u0000\u00ac\u009d\u0001\u0000\u0000\u0000\u00ac\u009e\u0001\u0000"+ - "\u0000\u0000\u00ac\u009f\u0001\u0000\u0000\u0000\u00ac\u00a0\u0001\u0000"+ - "\u0000\u0000\u00ac\u00a1\u0001\u0000\u0000\u0000\u00ac\u00a2\u0001\u0000"+ - "\u0000\u0000\u00ac\u00a3\u0001\u0000\u0000\u0000\u00ac\u00a4\u0001\u0000"+ - "\u0000\u0000\u00ac\u00a5\u0001\u0000\u0000\u0000\u00ac\u00a6\u0001\u0000"+ - "\u0000\u0000\u00ac\u00a8\u0001\u0000\u0000\u0000\u00ac\u00aa\u0001\u0000"+ - "\u0000\u0000\u00ad\u0007\u0001\u0000\u0000\u0000\u00ae\u00af\u0005\u0010"+ - "\u0000\u0000\u00af\u00b0\u0003\n\u0005\u0000\u00b0\t\u0001\u0000\u0000"+ - "\u0000\u00b1\u00b2\u0006\u0005\uffff\uffff\u0000\u00b2\u00b3\u00051\u0000"+ - "\u0000\u00b3\u00cf\u0003\n\u0005\b\u00b4\u00cf\u0003\u0010\b\u0000\u00b5"+ - "\u00cf\u0003\f\u0006\u0000\u00b6\u00b8\u0003\u0010\b\u0000\u00b7\u00b9"+ - "\u00051\u0000\u0000\u00b8\u00b7\u0001\u0000\u0000\u0000\u00b8\u00b9\u0001"+ - "\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u00bb\u0005"+ - ",\u0000\u0000\u00bb\u00bc\u00050\u0000\u0000\u00bc\u00c1\u0003\u0010\b"+ - "\u0000\u00bd\u00be\u0005\'\u0000\u0000\u00be\u00c0\u0003\u0010\b\u0000"+ - "\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c3\u0001\u0000\u0000\u0000"+ - "\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c1\u00c2\u0001\u0000\u0000\u0000"+ - "\u00c2\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001\u0000\u0000\u0000"+ - "\u00c4\u00c5\u00057\u0000\u0000\u00c5\u00cf\u0001\u0000\u0000\u0000\u00c6"+ - "\u00c7\u0003\u0010\b\u0000\u00c7\u00c9\u0005-\u0000\u0000\u00c8\u00ca"+ - "\u00051\u0000\u0000\u00c9\u00c8\u0001\u0000\u0000\u0000\u00c9\u00ca\u0001"+ - "\u0000\u0000\u0000\u00ca\u00cb\u0001\u0000\u0000\u0000\u00cb\u00cc\u0005"+ - "2\u0000\u0000\u00cc\u00cf\u0001\u0000\u0000\u0000\u00cd\u00cf\u0003\u000e"+ - "\u0007\u0000\u00ce\u00b1\u0001\u0000\u0000\u0000\u00ce\u00b4\u0001\u0000"+ - "\u0000\u0000\u00ce\u00b5\u0001\u0000\u0000\u0000\u00ce\u00b6\u0001\u0000"+ - "\u0000\u0000\u00ce\u00c6\u0001\u0000\u0000\u0000\u00ce\u00cd\u0001\u0000"+ - "\u0000\u0000\u00cf\u00d8\u0001\u0000\u0000\u0000\u00d0\u00d1\n\u0005\u0000"+ - "\u0000\u00d1\u00d2\u0005\"\u0000\u0000\u00d2\u00d7\u0003\n\u0005\u0006"+ - "\u00d3\u00d4\n\u0004\u0000\u0000\u00d4\u00d5\u00054\u0000\u0000\u00d5"+ - "\u00d7\u0003\n\u0005\u0005\u00d6\u00d0\u0001\u0000\u0000\u0000\u00d6\u00d3"+ - "\u0001\u0000\u0000\u0000\u00d7\u00da\u0001\u0000\u0000\u0000\u00d8\u00d6"+ - "\u0001\u0000\u0000\u0000\u00d8\u00d9\u0001\u0000\u0000\u0000\u00d9\u000b"+ - "\u0001\u0000\u0000\u0000\u00da\u00d8\u0001\u0000\u0000\u0000\u00db\u00dd"+ - "\u0003\u0010\b\u0000\u00dc\u00de\u00051\u0000\u0000\u00dd\u00dc\u0001"+ - "\u0000\u0000\u0000\u00dd\u00de\u0001\u0000\u0000\u0000\u00de\u00df\u0001"+ - "\u0000\u0000\u0000\u00df\u00e0\u0005/\u0000\u0000\u00e0\u00e1\u0003j5"+ - "\u0000\u00e1\u00ea\u0001\u0000\u0000\u0000\u00e2\u00e4\u0003\u0010\b\u0000"+ - "\u00e3\u00e5\u00051\u0000\u0000\u00e4\u00e3\u0001\u0000\u0000\u0000\u00e4"+ - "\u00e5\u0001\u0000\u0000\u0000\u00e5\u00e6\u0001\u0000\u0000\u0000\u00e6"+ - "\u00e7\u00056\u0000\u0000\u00e7\u00e8\u0003j5\u0000\u00e8\u00ea\u0001"+ - "\u0000\u0000\u0000\u00e9\u00db\u0001\u0000\u0000\u0000\u00e9\u00e2\u0001"+ - "\u0000\u0000\u0000\u00ea\r\u0001\u0000\u0000\u0000\u00eb\u00ee\u0003:"+ - "\u001d\u0000\u00ec\u00ed\u0005%\u0000\u0000\u00ed\u00ef\u0003\u001e\u000f"+ - "\u0000\u00ee\u00ec\u0001\u0000\u0000\u0000\u00ee\u00ef\u0001\u0000\u0000"+ - "\u0000\u00ef\u00f0\u0001\u0000\u0000\u0000\u00f0\u00f1\u0005&\u0000\u0000"+ - "\u00f1\u00f2\u0003D\"\u0000\u00f2\u000f\u0001\u0000\u0000\u0000\u00f3"+ - "\u00f9\u0003\u0012\t\u0000\u00f4\u00f5\u0003\u0012\t\u0000\u00f5\u00f6"+ - "\u0003l6\u0000\u00f6\u00f7\u0003\u0012\t\u0000\u00f7\u00f9\u0001\u0000"+ - "\u0000\u0000\u00f8\u00f3\u0001\u0000\u0000\u0000\u00f8\u00f4\u0001\u0000"+ - "\u0000\u0000\u00f9\u0011\u0001\u0000\u0000\u0000\u00fa\u00fb\u0006\t\uffff"+ - "\uffff\u0000\u00fb\u00ff\u0003\u0014\n\u0000\u00fc\u00fd\u0007\u0000\u0000"+ - "\u0000\u00fd\u00ff\u0003\u0012\t\u0003\u00fe\u00fa\u0001\u0000\u0000\u0000"+ - "\u00fe\u00fc\u0001\u0000\u0000\u0000\u00ff\u0108\u0001\u0000\u0000\u0000"+ - "\u0100\u0101\n\u0002\u0000\u0000\u0101\u0102\u0007\u0001\u0000\u0000\u0102"+ - "\u0107\u0003\u0012\t\u0003\u0103\u0104\n\u0001\u0000\u0000\u0104\u0105"+ - "\u0007\u0000\u0000\u0000\u0105\u0107\u0003\u0012\t\u0002\u0106\u0100\u0001"+ - "\u0000\u0000\u0000\u0106\u0103\u0001\u0000\u0000\u0000\u0107\u010a\u0001"+ - "\u0000\u0000\u0000\u0108\u0106\u0001\u0000\u0000\u0000\u0108\u0109\u0001"+ - "\u0000\u0000\u0000\u0109\u0013\u0001\u0000\u0000\u0000\u010a\u0108\u0001"+ - "\u0000\u0000\u0000\u010b\u010c\u0006\n\uffff\uffff\u0000\u010c\u0114\u0003"+ - "D\"\u0000\u010d\u0114\u0003:\u001d\u0000\u010e\u0114\u0003\u0016\u000b"+ - "\u0000\u010f\u0110\u00050\u0000\u0000\u0110\u0111\u0003\n\u0005\u0000"+ - "\u0111\u0112\u00057\u0000\u0000\u0112\u0114\u0001\u0000\u0000\u0000\u0113"+ - "\u010b\u0001\u0000\u0000\u0000\u0113\u010d\u0001\u0000\u0000\u0000\u0113"+ - "\u010e\u0001\u0000\u0000\u0000\u0113\u010f\u0001\u0000\u0000\u0000\u0114"+ - "\u011a\u0001\u0000\u0000\u0000\u0115\u0116\n\u0001\u0000\u0000\u0116\u0117"+ - "\u0005%\u0000\u0000\u0117\u0119\u0003\u001e\u000f\u0000\u0118\u0115\u0001"+ - "\u0000\u0000\u0000\u0119\u011c\u0001\u0000\u0000\u0000\u011a\u0118\u0001"+ - "\u0000\u0000\u0000\u011a\u011b\u0001\u0000\u0000\u0000\u011b\u0015\u0001"+ - "\u0000\u0000\u0000\u011c\u011a\u0001\u0000\u0000\u0000\u011d\u011e\u0003"+ - "\u0018\f\u0000\u011e\u012c\u00050\u0000\u0000\u011f\u012d\u0005B\u0000"+ - "\u0000\u0120\u0125\u0003\n\u0005\u0000\u0121\u0122\u0005\'\u0000\u0000"+ - "\u0122\u0124\u0003\n\u0005\u0000\u0123\u0121\u0001\u0000\u0000\u0000\u0124"+ - "\u0127\u0001\u0000\u0000\u0000\u0125\u0123\u0001\u0000\u0000\u0000\u0125"+ - "\u0126\u0001\u0000\u0000\u0000\u0126\u012a\u0001\u0000\u0000\u0000\u0127"+ - "\u0125\u0001\u0000\u0000\u0000\u0128\u0129\u0005\'\u0000\u0000\u0129\u012b"+ - "\u0003\u001a\r\u0000\u012a\u0128\u0001\u0000\u0000\u0000\u012a\u012b\u0001"+ - "\u0000\u0000\u0000\u012b\u012d\u0001\u0000\u0000\u0000\u012c\u011f\u0001"+ - "\u0000\u0000\u0000\u012c\u0120\u0001\u0000\u0000\u0000\u012c\u012d\u0001"+ - "\u0000\u0000\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e\u012f\u0005"+ - "7\u0000\u0000\u012f\u0017\u0001\u0000\u0000\u0000\u0130\u0131\u0003H$"+ - "\u0000\u0131\u0019\u0001\u0000\u0000\u0000\u0132\u0133\u0005E\u0000\u0000"+ - "\u0133\u0138\u0003\u001c\u000e\u0000\u0134\u0135\u0005\'\u0000\u0000\u0135"+ - "\u0137\u0003\u001c\u000e\u0000\u0136\u0134\u0001\u0000\u0000\u0000\u0137"+ - "\u013a\u0001\u0000\u0000\u0000\u0138\u0136\u0001\u0000\u0000\u0000\u0138"+ - "\u0139\u0001\u0000\u0000\u0000\u0139\u013b\u0001\u0000\u0000\u0000\u013a"+ - "\u0138\u0001\u0000\u0000\u0000\u013b\u013c\u0005F\u0000\u0000\u013c\u001b"+ - "\u0001\u0000\u0000\u0000\u013d\u013e\u0003j5\u0000\u013e\u013f\u0005&"+ - "\u0000\u0000\u013f\u0140\u0003D\"\u0000\u0140\u001d\u0001\u0000\u0000"+ - "\u0000\u0141\u0142\u0003@ \u0000\u0142\u001f\u0001\u0000\u0000\u0000\u0143"+ - "\u0144\u0005\f\u0000\u0000\u0144\u0145\u0003\"\u0011\u0000\u0145!\u0001"+ - "\u0000\u0000\u0000\u0146\u014b\u0003$\u0012\u0000\u0147\u0148\u0005\'"+ - "\u0000\u0000\u0148\u014a\u0003$\u0012\u0000\u0149\u0147\u0001\u0000\u0000"+ - "\u0000\u014a\u014d\u0001\u0000\u0000\u0000\u014b\u0149\u0001\u0000\u0000"+ - "\u0000\u014b\u014c\u0001\u0000\u0000\u0000\u014c#\u0001\u0000\u0000\u0000"+ - "\u014d\u014b\u0001\u0000\u0000\u0000\u014e\u014f\u0003:\u001d\u0000\u014f"+ - "\u0150\u0005$\u0000\u0000\u0150\u0152\u0001\u0000\u0000\u0000\u0151\u014e"+ - "\u0001\u0000\u0000\u0000\u0151\u0152\u0001\u0000\u0000\u0000\u0152\u0153"+ - "\u0001\u0000\u0000\u0000\u0153\u0154\u0003\n\u0005\u0000\u0154%\u0001"+ - "\u0000\u0000\u0000\u0155\u0156\u0005\u0006\u0000\u0000\u0156\u015b\u0003"+ - "(\u0014\u0000\u0157\u0158\u0005\'\u0000\u0000\u0158\u015a\u0003(\u0014"+ - "\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u015a\u015d\u0001\u0000\u0000"+ - "\u0000\u015b\u0159\u0001\u0000\u0000\u0000\u015b\u015c\u0001\u0000\u0000"+ - "\u0000\u015c\u015f\u0001\u0000\u0000\u0000\u015d\u015b\u0001\u0000\u0000"+ - "\u0000\u015e\u0160\u0003.\u0017\u0000\u015f\u015e\u0001\u0000\u0000\u0000"+ - "\u015f\u0160\u0001\u0000\u0000\u0000\u0160\'\u0001\u0000\u0000\u0000\u0161"+ - "\u0162\u0003*\u0015\u0000\u0162\u0163\u0005&\u0000\u0000\u0163\u0165\u0001"+ - "\u0000\u0000\u0000\u0164\u0161\u0001\u0000\u0000\u0000\u0164\u0165\u0001"+ - "\u0000\u0000\u0000\u0165\u0166\u0001\u0000\u0000\u0000\u0166\u0167\u0003"+ - ",\u0016\u0000\u0167)\u0001\u0000\u0000\u0000\u0168\u0169\u0005S\u0000"+ - "\u0000\u0169+\u0001\u0000\u0000\u0000\u016a\u016b\u0007\u0002\u0000\u0000"+ - "\u016b-\u0001\u0000\u0000\u0000\u016c\u016d\u0005R\u0000\u0000\u016d\u0172"+ - "\u0005S\u0000\u0000\u016e\u016f\u0005\'\u0000\u0000\u016f\u0171\u0005"+ - "S\u0000\u0000\u0170\u016e\u0001\u0000\u0000\u0000\u0171\u0174\u0001\u0000"+ - "\u0000\u0000\u0172\u0170\u0001\u0000\u0000\u0000\u0172\u0173\u0001\u0000"+ - "\u0000\u0000\u0173/\u0001\u0000\u0000\u0000\u0174\u0172\u0001\u0000\u0000"+ - "\u0000\u0175\u0176\u0005\u0013\u0000\u0000\u0176\u017b\u0003(\u0014\u0000"+ - "\u0177\u0178\u0005\'\u0000\u0000\u0178\u017a\u0003(\u0014\u0000\u0179"+ - "\u0177\u0001\u0000\u0000\u0000\u017a\u017d\u0001\u0000\u0000\u0000\u017b"+ - "\u0179\u0001\u0000\u0000\u0000\u017b\u017c\u0001\u0000\u0000\u0000\u017c"+ - "\u017f\u0001\u0000\u0000\u0000\u017d\u017b\u0001\u0000\u0000\u0000\u017e"+ - "\u0180\u00036\u001b\u0000\u017f\u017e\u0001\u0000\u0000\u0000\u017f\u0180"+ - "\u0001\u0000\u0000\u0000\u0180\u0183\u0001\u0000\u0000\u0000\u0181\u0182"+ - "\u0005!\u0000\u0000\u0182\u0184\u0003\"\u0011\u0000\u0183\u0181\u0001"+ - "\u0000\u0000\u0000\u0183\u0184\u0001\u0000\u0000\u0000\u01841\u0001\u0000"+ - "\u0000\u0000\u0185\u0186\u0005\u0004\u0000\u0000\u0186\u0187\u0003\"\u0011"+ - "\u0000\u01873\u0001\u0000\u0000\u0000\u0188\u018a\u0005\u000f\u0000\u0000"+ - "\u0189\u018b\u00036\u001b\u0000\u018a\u0189\u0001\u0000\u0000\u0000\u018a"+ - "\u018b\u0001\u0000\u0000\u0000\u018b\u018e\u0001\u0000\u0000\u0000\u018c"+ - "\u018d\u0005!\u0000\u0000\u018d\u018f\u0003\"\u0011\u0000\u018e\u018c"+ - "\u0001\u0000\u0000\u0000\u018e\u018f\u0001\u0000\u0000\u0000\u018f5\u0001"+ - "\u0000\u0000\u0000\u0190\u0195\u00038\u001c\u0000\u0191\u0192\u0005\'"+ - "\u0000\u0000\u0192\u0194\u00038\u001c\u0000\u0193\u0191\u0001\u0000\u0000"+ - "\u0000\u0194\u0197\u0001\u0000\u0000\u0000\u0195\u0193\u0001\u0000\u0000"+ - "\u0000\u0195\u0196\u0001\u0000\u0000\u0000\u01967\u0001\u0000\u0000\u0000"+ - "\u0197\u0195\u0001\u0000\u0000\u0000\u0198\u019b\u0003$\u0012\u0000\u0199"+ - "\u019a\u0005\u0010\u0000\u0000\u019a\u019c\u0003\n\u0005\u0000\u019b\u0199"+ - "\u0001\u0000\u0000\u0000\u019b\u019c\u0001\u0000\u0000\u0000\u019c9\u0001"+ - "\u0000\u0000\u0000\u019d\u01a2\u0003H$\u0000\u019e\u019f\u0005)\u0000"+ - "\u0000\u019f\u01a1\u0003H$\u0000\u01a0\u019e\u0001\u0000\u0000\u0000\u01a1"+ - "\u01a4\u0001\u0000\u0000\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000\u01a2"+ - "\u01a3\u0001\u0000\u0000\u0000\u01a3;\u0001\u0000\u0000\u0000\u01a4\u01a2"+ - "\u0001\u0000\u0000\u0000\u01a5\u01aa\u0003B!\u0000\u01a6\u01a7\u0005)"+ - "\u0000\u0000\u01a7\u01a9\u0003B!\u0000\u01a8\u01a6\u0001\u0000\u0000\u0000"+ - "\u01a9\u01ac\u0001\u0000\u0000\u0000\u01aa\u01a8\u0001\u0000\u0000\u0000"+ - "\u01aa\u01ab\u0001\u0000\u0000\u0000\u01ab=\u0001\u0000\u0000\u0000\u01ac"+ - "\u01aa\u0001\u0000\u0000\u0000\u01ad\u01b2\u0003<\u001e\u0000\u01ae\u01af"+ - "\u0005\'\u0000\u0000\u01af\u01b1\u0003<\u001e\u0000\u01b0\u01ae\u0001"+ - "\u0000\u0000\u0000\u01b1\u01b4\u0001\u0000\u0000\u0000\u01b2\u01b0\u0001"+ - "\u0000\u0000\u0000\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3?\u0001\u0000"+ - "\u0000\u0000\u01b4\u01b2\u0001\u0000\u0000\u0000\u01b5\u01b6\u0007\u0003"+ - "\u0000\u0000\u01b6A\u0001\u0000\u0000\u0000\u01b7\u01bb\u0005W\u0000\u0000"+ - "\u01b8\u01b9\u0004!\n\u0000\u01b9\u01bb\u0003F#\u0000\u01ba\u01b7\u0001"+ - "\u0000\u0000\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bbC\u0001\u0000"+ - "\u0000\u0000\u01bc\u01e7\u00052\u0000\u0000\u01bd\u01be\u0003h4\u0000"+ - "\u01be\u01bf\u0005J\u0000\u0000\u01bf\u01e7\u0001\u0000\u0000\u0000\u01c0"+ - "\u01e7\u0003f3\u0000\u01c1\u01e7\u0003h4\u0000\u01c2\u01e7\u0003b1\u0000"+ - "\u01c3\u01e7\u0003F#\u0000\u01c4\u01e7\u0003j5\u0000\u01c5\u01c6\u0005"+ - "H\u0000\u0000\u01c6\u01cb\u0003d2\u0000\u01c7\u01c8\u0005\'\u0000\u0000"+ - "\u01c8\u01ca\u0003d2\u0000\u01c9\u01c7\u0001\u0000\u0000\u0000\u01ca\u01cd"+ - "\u0001\u0000\u0000\u0000\u01cb\u01c9\u0001\u0000\u0000\u0000\u01cb\u01cc"+ - "\u0001\u0000\u0000\u0000\u01cc\u01ce\u0001\u0000\u0000\u0000\u01cd\u01cb"+ - "\u0001\u0000\u0000\u0000\u01ce\u01cf\u0005I\u0000\u0000\u01cf\u01e7\u0001"+ - "\u0000\u0000\u0000\u01d0\u01d1\u0005H\u0000\u0000\u01d1\u01d6\u0003b1"+ - "\u0000\u01d2\u01d3\u0005\'\u0000\u0000\u01d3\u01d5\u0003b1\u0000\u01d4"+ - "\u01d2\u0001\u0000\u0000\u0000\u01d5\u01d8\u0001\u0000\u0000\u0000\u01d6"+ - "\u01d4\u0001\u0000\u0000\u0000\u01d6\u01d7\u0001\u0000\u0000\u0000\u01d7"+ - "\u01d9\u0001\u0000\u0000\u0000\u01d8\u01d6\u0001\u0000\u0000\u0000\u01d9"+ - "\u01da\u0005I\u0000\u0000\u01da\u01e7\u0001\u0000\u0000\u0000\u01db\u01dc"+ - "\u0005H\u0000\u0000\u01dc\u01e1\u0003j5\u0000\u01dd\u01de\u0005\'\u0000"+ - "\u0000\u01de\u01e0\u0003j5\u0000\u01df\u01dd\u0001\u0000\u0000\u0000\u01e0"+ - "\u01e3\u0001\u0000\u0000\u0000\u01e1\u01df\u0001\u0000\u0000\u0000\u01e1"+ - "\u01e2\u0001\u0000\u0000\u0000\u01e2\u01e4\u0001\u0000\u0000\u0000\u01e3"+ - "\u01e1\u0001\u0000\u0000\u0000\u01e4\u01e5\u0005I\u0000\u0000\u01e5\u01e7"+ - "\u0001\u0000\u0000\u0000\u01e6\u01bc\u0001\u0000\u0000\u0000\u01e6\u01bd"+ - "\u0001\u0000\u0000\u0000\u01e6\u01c0\u0001\u0000\u0000\u0000\u01e6\u01c1"+ - "\u0001\u0000\u0000\u0000\u01e6\u01c2\u0001\u0000\u0000\u0000\u01e6\u01c3"+ - "\u0001\u0000\u0000\u0000\u01e6\u01c4\u0001\u0000\u0000\u0000\u01e6\u01c5"+ - "\u0001\u0000\u0000\u0000\u01e6\u01d0\u0001\u0000\u0000\u0000\u01e6\u01db"+ - "\u0001\u0000\u0000\u0000\u01e7E\u0001\u0000\u0000\u0000\u01e8\u01eb\u0005"+ - "5\u0000\u0000\u01e9\u01eb\u0005G\u0000\u0000\u01ea\u01e8\u0001\u0000\u0000"+ - "\u0000\u01ea\u01e9\u0001\u0000\u0000\u0000\u01ebG\u0001\u0000\u0000\u0000"+ - "\u01ec\u01f0\u0003@ \u0000\u01ed\u01ee\u0004$\u000b\u0000\u01ee\u01f0"+ - "\u0003F#\u0000\u01ef\u01ec\u0001\u0000\u0000\u0000\u01ef\u01ed\u0001\u0000"+ - "\u0000\u0000\u01f0I\u0001\u0000\u0000\u0000\u01f1\u01f2\u0005\t\u0000"+ - "\u0000\u01f2\u01f3\u0005\u001f\u0000\u0000\u01f3K\u0001\u0000\u0000\u0000"+ - "\u01f4\u01f5\u0005\u000e\u0000\u0000\u01f5\u01fa\u0003N\'\u0000\u01f6"+ - "\u01f7\u0005\'\u0000\u0000\u01f7\u01f9\u0003N\'\u0000\u01f8\u01f6\u0001"+ - "\u0000\u0000\u0000\u01f9\u01fc\u0001\u0000\u0000\u0000\u01fa\u01f8\u0001"+ - "\u0000\u0000\u0000\u01fa\u01fb\u0001\u0000\u0000\u0000\u01fbM\u0001\u0000"+ - "\u0000\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fd\u01ff\u0003\n\u0005"+ - "\u0000\u01fe\u0200\u0007\u0004\u0000\u0000\u01ff\u01fe\u0001\u0000\u0000"+ - "\u0000\u01ff\u0200\u0001\u0000\u0000\u0000\u0200\u0203\u0001\u0000\u0000"+ - "\u0000\u0201\u0202\u00053\u0000\u0000\u0202\u0204\u0007\u0005\u0000\u0000"+ - "\u0203\u0201\u0001\u0000\u0000\u0000\u0203\u0204\u0001\u0000\u0000\u0000"+ - "\u0204O\u0001\u0000\u0000\u0000\u0205\u0206\u0005\b\u0000\u0000\u0206"+ - "\u0207\u0003>\u001f\u0000\u0207Q\u0001\u0000\u0000\u0000\u0208\u0209\u0005"+ - "\u0002\u0000\u0000\u0209\u020a\u0003>\u001f\u0000\u020aS\u0001\u0000\u0000"+ - "\u0000\u020b\u020c\u0005\u000b\u0000\u0000\u020c\u0211\u0003V+\u0000\u020d"+ - "\u020e\u0005\'\u0000\u0000\u020e\u0210\u0003V+\u0000\u020f\u020d\u0001"+ - "\u0000\u0000\u0000\u0210\u0213\u0001\u0000\u0000\u0000\u0211\u020f\u0001"+ - "\u0000\u0000\u0000\u0211\u0212\u0001\u0000\u0000\u0000\u0212U\u0001\u0000"+ - "\u0000\u0000\u0213\u0211\u0001\u0000\u0000\u0000\u0214\u0215\u0003<\u001e"+ - "\u0000\u0215\u0216\u0005[\u0000\u0000\u0216\u0217\u0003<\u001e\u0000\u0217"+ - "W\u0001\u0000\u0000\u0000\u0218\u0219\u0005\u0001\u0000\u0000\u0219\u021a"+ - "\u0003\u0014\n\u0000\u021a\u021c\u0003j5\u0000\u021b\u021d\u0003^/\u0000"+ - "\u021c\u021b\u0001\u0000\u0000\u0000\u021c\u021d\u0001\u0000\u0000\u0000"+ - "\u021dY\u0001\u0000\u0000\u0000\u021e\u021f\u0005\u0007\u0000\u0000\u021f"+ - "\u0220\u0003\u0014\n\u0000\u0220\u0221\u0003j5\u0000\u0221[\u0001\u0000"+ - "\u0000\u0000\u0222\u0223\u0005\n\u0000\u0000\u0223\u0224\u0003:\u001d"+ - "\u0000\u0224]\u0001\u0000\u0000\u0000\u0225\u022a\u0003`0\u0000\u0226"+ - "\u0227\u0005\'\u0000\u0000\u0227\u0229\u0003`0\u0000\u0228\u0226\u0001"+ - "\u0000\u0000\u0000\u0229\u022c\u0001\u0000\u0000\u0000\u022a\u0228\u0001"+ - "\u0000\u0000\u0000\u022a\u022b\u0001\u0000\u0000\u0000\u022b_\u0001\u0000"+ - "\u0000\u0000\u022c\u022a\u0001\u0000\u0000\u0000\u022d\u022e\u0003@ \u0000"+ - "\u022e\u022f\u0005$\u0000\u0000\u022f\u0230\u0003D\"\u0000\u0230a\u0001"+ - "\u0000\u0000\u0000\u0231\u0232\u0007\u0006\u0000\u0000\u0232c\u0001\u0000"+ - "\u0000\u0000\u0233\u0236\u0003f3\u0000\u0234\u0236\u0003h4\u0000\u0235"+ - "\u0233\u0001\u0000\u0000\u0000\u0235\u0234\u0001\u0000\u0000\u0000\u0236"+ - "e\u0001\u0000\u0000\u0000\u0237\u0239\u0007\u0000\u0000\u0000\u0238\u0237"+ - "\u0001\u0000\u0000\u0000\u0238\u0239\u0001\u0000\u0000\u0000\u0239\u023a"+ - "\u0001\u0000\u0000\u0000\u023a\u023b\u0005 \u0000\u0000\u023bg\u0001\u0000"+ - "\u0000\u0000\u023c\u023e\u0007\u0000\u0000\u0000\u023d\u023c\u0001\u0000"+ - "\u0000\u0000\u023d\u023e\u0001\u0000\u0000\u0000\u023e\u023f\u0001\u0000"+ - "\u0000\u0000\u023f\u0240\u0005\u001f\u0000\u0000\u0240i\u0001\u0000\u0000"+ - "\u0000\u0241\u0242\u0005\u001e\u0000\u0000\u0242k\u0001\u0000\u0000\u0000"+ - "\u0243\u0244\u0007\u0007\u0000\u0000\u0244m\u0001\u0000\u0000\u0000\u0245"+ - "\u0246\u0005\u0005\u0000\u0000\u0246\u0247\u0003p8\u0000\u0247o\u0001"+ - "\u0000\u0000\u0000\u0248\u0249\u0005H\u0000\u0000\u0249\u024a\u0003\u0002"+ - "\u0001\u0000\u024a\u024b\u0005I\u0000\u0000\u024bq\u0001\u0000\u0000\u0000"+ - "\u024c\u024d\u0005\r\u0000\u0000\u024d\u024e\u0005k\u0000\u0000\u024e"+ - "s\u0001\u0000\u0000\u0000\u024f\u0250\u0005\u0003\u0000\u0000\u0250\u0253"+ - "\u0005a\u0000\u0000\u0251\u0252\u0005_\u0000\u0000\u0252\u0254\u0003<"+ - "\u001e\u0000\u0253\u0251\u0001\u0000\u0000\u0000\u0253\u0254\u0001\u0000"+ - "\u0000\u0000\u0254\u025e\u0001\u0000\u0000\u0000\u0255\u0256\u0005`\u0000"+ - "\u0000\u0256\u025b\u0003v;\u0000\u0257\u0258\u0005\'\u0000\u0000\u0258"+ - "\u025a\u0003v;\u0000\u0259\u0257\u0001\u0000\u0000\u0000\u025a\u025d\u0001"+ - "\u0000\u0000\u0000\u025b\u0259\u0001\u0000\u0000\u0000\u025b\u025c\u0001"+ - "\u0000\u0000\u0000\u025c\u025f\u0001\u0000\u0000\u0000\u025d\u025b\u0001"+ - "\u0000\u0000\u0000\u025e\u0255\u0001\u0000\u0000\u0000\u025e\u025f\u0001"+ - "\u0000\u0000\u0000\u025fu\u0001\u0000\u0000\u0000\u0260\u0261\u0003<\u001e"+ - "\u0000\u0261\u0262\u0005$\u0000\u0000\u0262\u0264\u0001\u0000\u0000\u0000"+ - "\u0263\u0260\u0001\u0000\u0000\u0000\u0263\u0264\u0001\u0000\u0000\u0000"+ - "\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0266\u0003<\u001e\u0000\u0266"+ - "w\u0001\u0000\u0000\u0000\u0267\u0268\u0005\u0012\u0000\u0000\u0268\u0269"+ - "\u0003(\u0014\u0000\u0269\u026a\u0005_\u0000\u0000\u026a\u026b\u0003>"+ - "\u001f\u0000\u026by\u0001\u0000\u0000\u0000\u026c\u026d\u0005\u0011\u0000"+ - "\u0000\u026d\u0270\u00036\u001b\u0000\u026e\u026f\u0005!\u0000\u0000\u026f"+ - "\u0271\u0003\"\u0011\u0000\u0270\u026e\u0001\u0000\u0000\u0000\u0270\u0271"+ - "\u0001\u0000\u0000\u0000\u0271{\u0001\u0000\u0000\u0000\u0272\u0274\u0007"+ - "\b\u0000\u0000\u0273\u0272\u0001\u0000\u0000\u0000\u0273\u0274\u0001\u0000"+ - "\u0000\u0000\u0274\u0275\u0001\u0000\u0000\u0000\u0275\u0276\u0005\u0014"+ - "\u0000\u0000\u0276\u0277\u0003~?\u0000\u0277\u0278\u0003\u0080@\u0000"+ - "\u0278}\u0001\u0000\u0000\u0000\u0279\u027c\u0003(\u0014\u0000\u027a\u027b"+ - "\u0005[\u0000\u0000\u027b\u027d\u0003@ \u0000\u027c\u027a\u0001\u0000"+ - "\u0000\u0000\u027c\u027d\u0001\u0000\u0000\u0000\u027d\u007f\u0001\u0000"+ - "\u0000\u0000\u027e\u027f\u0005_\u0000\u0000\u027f\u0284\u0003\u0082A\u0000"+ - "\u0280\u0281\u0005\'\u0000\u0000\u0281\u0283\u0003\u0082A\u0000\u0282"+ - "\u0280\u0001\u0000\u0000\u0000\u0283\u0286\u0001\u0000\u0000\u0000\u0284"+ - "\u0282\u0001\u0000\u0000\u0000\u0284\u0285\u0001\u0000\u0000\u0000\u0285"+ - "\u0081\u0001\u0000\u0000\u0000\u0286\u0284\u0001\u0000\u0000\u0000\u0287"+ - "\u0288\u0003\u0010\b\u0000\u0288\u0083\u0001\u0000\u0000\u0000?\u008f"+ - "\u0098\u00ac\u00b8\u00c1\u00c9\u00ce\u00d6\u00d8\u00dd\u00e4\u00e9\u00ee"+ - "\u00f8\u00fe\u0106\u0108\u0113\u011a\u0125\u012a\u012c\u0138\u014b\u0151"+ - "\u015b\u015f\u0164\u0172\u017b\u017f\u0183\u018a\u018e\u0195\u019b\u01a2"+ - "\u01aa\u01b2\u01ba\u01cb\u01d6\u01e1\u01e6\u01ea\u01ef\u01fa\u01ff\u0203"+ - "\u0211\u021c\u022a\u0235\u0238\u023d\u0253\u025b\u025e\u0263\u0270\u0273"+ - "\u027c\u0284"; + "9\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0253\b:\u0001:\u0001:\u0001"+ + ":\u0001:\u0005:\u0259\b:\n:\f:\u025c\t:\u0003:\u025e\b:\u0001;\u0001;"+ + "\u0001;\u0003;\u0263\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001"+ + "<\u0001=\u0001=\u0001=\u0001=\u0003=\u0270\b=\u0001>\u0001>\u0001>\u0001"+ + ">\u0001>\u0001?\u0001?\u0001@\u0001@\u0001@\u0001@\u0005@\u027d\b@\n@"+ + "\f@\u0280\t@\u0001A\u0001A\u0001A\u0000\u0004\u0002\n\u0012\u0014B\u0000"+ + "\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c"+ + "\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0000"+ + "\t\u0001\u0000?@\u0001\u0000AC\u0002\u0000\u001d\u001dRR\u0001\u0000I"+ + "J\u0002\u0000\"\"\'\'\u0002\u0000**--\u0002\u0000))77\u0002\u000088:>"+ + "\u0002\u0000\u0011\u0011\u0016\u0017\u029d\u0000\u0084\u0001\u0000\u0000"+ + "\u0000\u0002\u0087\u0001\u0000\u0000\u0000\u0004\u0098\u0001\u0000\u0000"+ + "\u0000\u0006\u00ab\u0001\u0000\u0000\u0000\b\u00ad\u0001\u0000\u0000\u0000"+ + "\n\u00cd\u0001\u0000\u0000\u0000\f\u00e8\u0001\u0000\u0000\u0000\u000e"+ + "\u00ea\u0001\u0000\u0000\u0000\u0010\u00f7\u0001\u0000\u0000\u0000\u0012"+ + "\u00fd\u0001\u0000\u0000\u0000\u0014\u0112\u0001\u0000\u0000\u0000\u0016"+ + "\u011c\u0001\u0000\u0000\u0000\u0018\u012f\u0001\u0000\u0000\u0000\u001a"+ + "\u0131\u0001\u0000\u0000\u0000\u001c\u013c\u0001\u0000\u0000\u0000\u001e"+ + "\u0140\u0001\u0000\u0000\u0000 \u0142\u0001\u0000\u0000\u0000\"\u0145"+ + "\u0001\u0000\u0000\u0000$\u0150\u0001\u0000\u0000\u0000&\u0154\u0001\u0000"+ + "\u0000\u0000(\u0163\u0001\u0000\u0000\u0000*\u0167\u0001\u0000\u0000\u0000"+ + ",\u0169\u0001\u0000\u0000\u0000.\u016b\u0001\u0000\u0000\u00000\u0174"+ + "\u0001\u0000\u0000\u00002\u0184\u0001\u0000\u0000\u00004\u0187\u0001\u0000"+ + "\u0000\u00006\u018f\u0001\u0000\u0000\u00008\u0197\u0001\u0000\u0000\u0000"+ + ":\u019c\u0001\u0000\u0000\u0000<\u01a4\u0001\u0000\u0000\u0000>\u01ac"+ + "\u0001\u0000\u0000\u0000@\u01b4\u0001\u0000\u0000\u0000B\u01b9\u0001\u0000"+ + "\u0000\u0000D\u01e5\u0001\u0000\u0000\u0000F\u01e9\u0001\u0000\u0000\u0000"+ + "H\u01ee\u0001\u0000\u0000\u0000J\u01f0\u0001\u0000\u0000\u0000L\u01f3"+ + "\u0001\u0000\u0000\u0000N\u01fc\u0001\u0000\u0000\u0000P\u0204\u0001\u0000"+ + "\u0000\u0000R\u0207\u0001\u0000\u0000\u0000T\u020a\u0001\u0000\u0000\u0000"+ + "V\u0213\u0001\u0000\u0000\u0000X\u0217\u0001\u0000\u0000\u0000Z\u021d"+ + "\u0001\u0000\u0000\u0000\\\u0221\u0001\u0000\u0000\u0000^\u0224\u0001"+ + "\u0000\u0000\u0000`\u022c\u0001\u0000\u0000\u0000b\u0230\u0001\u0000\u0000"+ + "\u0000d\u0234\u0001\u0000\u0000\u0000f\u0237\u0001\u0000\u0000\u0000h"+ + "\u023c\u0001\u0000\u0000\u0000j\u0240\u0001\u0000\u0000\u0000l\u0242\u0001"+ + "\u0000\u0000\u0000n\u0244\u0001\u0000\u0000\u0000p\u0247\u0001\u0000\u0000"+ + "\u0000r\u024b\u0001\u0000\u0000\u0000t\u024e\u0001\u0000\u0000\u0000v"+ + "\u0262\u0001\u0000\u0000\u0000x\u0266\u0001\u0000\u0000\u0000z\u026b\u0001"+ + "\u0000\u0000\u0000|\u0271\u0001\u0000\u0000\u0000~\u0276\u0001\u0000\u0000"+ + "\u0000\u0080\u0278\u0001\u0000\u0000\u0000\u0082\u0281\u0001\u0000\u0000"+ + "\u0000\u0084\u0085\u0003\u0002\u0001\u0000\u0085\u0086\u0005\u0000\u0000"+ + "\u0001\u0086\u0001\u0001\u0000\u0000\u0000\u0087\u0088\u0006\u0001\uffff"+ + "\uffff\u0000\u0088\u0089\u0003\u0004\u0002\u0000\u0089\u008f\u0001\u0000"+ + "\u0000\u0000\u008a\u008b\n\u0001\u0000\u0000\u008b\u008c\u0005\u001c\u0000"+ + "\u0000\u008c\u008e\u0003\u0006\u0003\u0000\u008d\u008a\u0001\u0000\u0000"+ + "\u0000\u008e\u0091\u0001\u0000\u0000\u0000\u008f\u008d\u0001\u0000\u0000"+ + "\u0000\u008f\u0090\u0001\u0000\u0000\u0000\u0090\u0003\u0001\u0000\u0000"+ + "\u0000\u0091\u008f\u0001\u0000\u0000\u0000\u0092\u0099\u0003n7\u0000\u0093"+ + "\u0099\u0003&\u0013\u0000\u0094\u0099\u0003 \u0010\u0000\u0095\u0099\u0003"+ + "r9\u0000\u0096\u0097\u0004\u0002\u0001\u0000\u0097\u0099\u00030\u0018"+ + "\u0000\u0098\u0092\u0001\u0000\u0000\u0000\u0098\u0093\u0001\u0000\u0000"+ + "\u0000\u0098\u0094\u0001\u0000\u0000\u0000\u0098\u0095\u0001\u0000\u0000"+ + "\u0000\u0098\u0096\u0001\u0000\u0000\u0000\u0099\u0005\u0001\u0000\u0000"+ + "\u0000\u009a\u00ac\u00032\u0019\u0000\u009b\u00ac\u0003\b\u0004\u0000"+ + "\u009c\u00ac\u0003P(\u0000\u009d\u00ac\u0003J%\u0000\u009e\u00ac\u0003"+ + "4\u001a\u0000\u009f\u00ac\u0003L&\u0000\u00a0\u00ac\u0003R)\u0000\u00a1"+ + "\u00ac\u0003T*\u0000\u00a2\u00ac\u0003X,\u0000\u00a3\u00ac\u0003Z-\u0000"+ + "\u00a4\u00ac\u0003t:\u0000\u00a5\u00ac\u0003\\.\u0000\u00a6\u00ac\u0003"+ + "|>\u0000\u00a7\u00a8\u0004\u0003\u0002\u0000\u00a8\u00ac\u0003z=\u0000"+ + "\u00a9\u00aa\u0004\u0003\u0003\u0000\u00aa\u00ac\u0003x<\u0000\u00ab\u009a"+ + "\u0001\u0000\u0000\u0000\u00ab\u009b\u0001\u0000\u0000\u0000\u00ab\u009c"+ + "\u0001\u0000\u0000\u0000\u00ab\u009d\u0001\u0000\u0000\u0000\u00ab\u009e"+ + "\u0001\u0000\u0000\u0000\u00ab\u009f\u0001\u0000\u0000\u0000\u00ab\u00a0"+ + "\u0001\u0000\u0000\u0000\u00ab\u00a1\u0001\u0000\u0000\u0000\u00ab\u00a2"+ + "\u0001\u0000\u0000\u0000\u00ab\u00a3\u0001\u0000\u0000\u0000\u00ab\u00a4"+ + "\u0001\u0000\u0000\u0000\u00ab\u00a5\u0001\u0000\u0000\u0000\u00ab\u00a6"+ + "\u0001\u0000\u0000\u0000\u00ab\u00a7\u0001\u0000\u0000\u0000\u00ab\u00a9"+ + "\u0001\u0000\u0000\u0000\u00ac\u0007\u0001\u0000\u0000\u0000\u00ad\u00ae"+ + "\u0005\u0010\u0000\u0000\u00ae\u00af\u0003\n\u0005\u0000\u00af\t\u0001"+ + "\u0000\u0000\u0000\u00b0\u00b1\u0006\u0005\uffff\uffff\u0000\u00b1\u00b2"+ + "\u00050\u0000\u0000\u00b2\u00ce\u0003\n\u0005\b\u00b3\u00ce\u0003\u0010"+ + "\b\u0000\u00b4\u00ce\u0003\f\u0006\u0000\u00b5\u00b7\u0003\u0010\b\u0000"+ + "\u00b6\u00b8\u00050\u0000\u0000\u00b7\u00b6\u0001\u0000\u0000\u0000\u00b7"+ + "\u00b8\u0001\u0000\u0000\u0000\u00b8\u00b9\u0001\u0000\u0000\u0000\u00b9"+ + "\u00ba\u0005+\u0000\u0000\u00ba\u00bb\u0005/\u0000\u0000\u00bb\u00c0\u0003"+ + "\u0010\b\u0000\u00bc\u00bd\u0005&\u0000\u0000\u00bd\u00bf\u0003\u0010"+ + "\b\u0000\u00be\u00bc\u0001\u0000\u0000\u0000\u00bf\u00c2\u0001\u0000\u0000"+ + "\u0000\u00c0\u00be\u0001\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000\u0000"+ + "\u0000\u00c1\u00c3\u0001\u0000\u0000\u0000\u00c2\u00c0\u0001\u0000\u0000"+ + "\u0000\u00c3\u00c4\u00056\u0000\u0000\u00c4\u00ce\u0001\u0000\u0000\u0000"+ + "\u00c5\u00c6\u0003\u0010\b\u0000\u00c6\u00c8\u0005,\u0000\u0000\u00c7"+ + "\u00c9\u00050\u0000\u0000\u00c8\u00c7\u0001\u0000\u0000\u0000\u00c8\u00c9"+ + "\u0001\u0000\u0000\u0000\u00c9\u00ca\u0001\u0000\u0000\u0000\u00ca\u00cb"+ + "\u00051\u0000\u0000\u00cb\u00ce\u0001\u0000\u0000\u0000\u00cc\u00ce\u0003"+ + "\u000e\u0007\u0000\u00cd\u00b0\u0001\u0000\u0000\u0000\u00cd\u00b3\u0001"+ + "\u0000\u0000\u0000\u00cd\u00b4\u0001\u0000\u0000\u0000\u00cd\u00b5\u0001"+ + "\u0000\u0000\u0000\u00cd\u00c5\u0001\u0000\u0000\u0000\u00cd\u00cc\u0001"+ + "\u0000\u0000\u0000\u00ce\u00d7\u0001\u0000\u0000\u0000\u00cf\u00d0\n\u0005"+ + "\u0000\u0000\u00d0\u00d1\u0005!\u0000\u0000\u00d1\u00d6\u0003\n\u0005"+ + "\u0006\u00d2\u00d3\n\u0004\u0000\u0000\u00d3\u00d4\u00053\u0000\u0000"+ + "\u00d4\u00d6\u0003\n\u0005\u0005\u00d5\u00cf\u0001\u0000\u0000\u0000\u00d5"+ + "\u00d2\u0001\u0000\u0000\u0000\u00d6\u00d9\u0001\u0000\u0000\u0000\u00d7"+ + "\u00d5\u0001\u0000\u0000\u0000\u00d7\u00d8\u0001\u0000\u0000\u0000\u00d8"+ + "\u000b\u0001\u0000\u0000\u0000\u00d9\u00d7\u0001\u0000\u0000\u0000\u00da"+ + "\u00dc\u0003\u0010\b\u0000\u00db\u00dd\u00050\u0000\u0000\u00dc\u00db"+ + "\u0001\u0000\u0000\u0000\u00dc\u00dd\u0001\u0000\u0000\u0000\u00dd\u00de"+ + "\u0001\u0000\u0000\u0000\u00de\u00df\u0005.\u0000\u0000\u00df\u00e0\u0003"+ + "j5\u0000\u00e0\u00e9\u0001\u0000\u0000\u0000\u00e1\u00e3\u0003\u0010\b"+ + "\u0000\u00e2\u00e4\u00050\u0000\u0000\u00e3\u00e2\u0001\u0000\u0000\u0000"+ + "\u00e3\u00e4\u0001\u0000\u0000\u0000\u00e4\u00e5\u0001\u0000\u0000\u0000"+ + "\u00e5\u00e6\u00055\u0000\u0000\u00e6\u00e7\u0003j5\u0000\u00e7\u00e9"+ + "\u0001\u0000\u0000\u0000\u00e8\u00da\u0001\u0000\u0000\u0000\u00e8\u00e1"+ + "\u0001\u0000\u0000\u0000\u00e9\r\u0001\u0000\u0000\u0000\u00ea\u00ed\u0003"+ + ":\u001d\u0000\u00eb\u00ec\u0005$\u0000\u0000\u00ec\u00ee\u0003\u001e\u000f"+ + "\u0000\u00ed\u00eb\u0001\u0000\u0000\u0000\u00ed\u00ee\u0001\u0000\u0000"+ + "\u0000\u00ee\u00ef\u0001\u0000\u0000\u0000\u00ef\u00f0\u0005%\u0000\u0000"+ + "\u00f0\u00f1\u0003D\"\u0000\u00f1\u000f\u0001\u0000\u0000\u0000\u00f2"+ + "\u00f8\u0003\u0012\t\u0000\u00f3\u00f4\u0003\u0012\t\u0000\u00f4\u00f5"+ + "\u0003l6\u0000\u00f5\u00f6\u0003\u0012\t\u0000\u00f6\u00f8\u0001\u0000"+ + "\u0000\u0000\u00f7\u00f2\u0001\u0000\u0000\u0000\u00f7\u00f3\u0001\u0000"+ + "\u0000\u0000\u00f8\u0011\u0001\u0000\u0000\u0000\u00f9\u00fa\u0006\t\uffff"+ + "\uffff\u0000\u00fa\u00fe\u0003\u0014\n\u0000\u00fb\u00fc\u0007\u0000\u0000"+ + "\u0000\u00fc\u00fe\u0003\u0012\t\u0003\u00fd\u00f9\u0001\u0000\u0000\u0000"+ + "\u00fd\u00fb\u0001\u0000\u0000\u0000\u00fe\u0107\u0001\u0000\u0000\u0000"+ + "\u00ff\u0100\n\u0002\u0000\u0000\u0100\u0101\u0007\u0001\u0000\u0000\u0101"+ + "\u0106\u0003\u0012\t\u0003\u0102\u0103\n\u0001\u0000\u0000\u0103\u0104"+ + "\u0007\u0000\u0000\u0000\u0104\u0106\u0003\u0012\t\u0002\u0105\u00ff\u0001"+ + "\u0000\u0000\u0000\u0105\u0102\u0001\u0000\u0000\u0000\u0106\u0109\u0001"+ + "\u0000\u0000\u0000\u0107\u0105\u0001\u0000\u0000\u0000\u0107\u0108\u0001"+ + "\u0000\u0000\u0000\u0108\u0013\u0001\u0000\u0000\u0000\u0109\u0107\u0001"+ + "\u0000\u0000\u0000\u010a\u010b\u0006\n\uffff\uffff\u0000\u010b\u0113\u0003"+ + "D\"\u0000\u010c\u0113\u0003:\u001d\u0000\u010d\u0113\u0003\u0016\u000b"+ + "\u0000\u010e\u010f\u0005/\u0000\u0000\u010f\u0110\u0003\n\u0005\u0000"+ + "\u0110\u0111\u00056\u0000\u0000\u0111\u0113\u0001\u0000\u0000\u0000\u0112"+ + "\u010a\u0001\u0000\u0000\u0000\u0112\u010c\u0001\u0000\u0000\u0000\u0112"+ + "\u010d\u0001\u0000\u0000\u0000\u0112\u010e\u0001\u0000\u0000\u0000\u0113"+ + "\u0119\u0001\u0000\u0000\u0000\u0114\u0115\n\u0001\u0000\u0000\u0115\u0116"+ + "\u0005$\u0000\u0000\u0116\u0118\u0003\u001e\u000f\u0000\u0117\u0114\u0001"+ + "\u0000\u0000\u0000\u0118\u011b\u0001\u0000\u0000\u0000\u0119\u0117\u0001"+ + "\u0000\u0000\u0000\u0119\u011a\u0001\u0000\u0000\u0000\u011a\u0015\u0001"+ + "\u0000\u0000\u0000\u011b\u0119\u0001\u0000\u0000\u0000\u011c\u011d\u0003"+ + "\u0018\f\u0000\u011d\u012b\u0005/\u0000\u0000\u011e\u012c\u0005A\u0000"+ + "\u0000\u011f\u0124\u0003\n\u0005\u0000\u0120\u0121\u0005&\u0000\u0000"+ + "\u0121\u0123\u0003\n\u0005\u0000\u0122\u0120\u0001\u0000\u0000\u0000\u0123"+ + "\u0126\u0001\u0000\u0000\u0000\u0124\u0122\u0001\u0000\u0000\u0000\u0124"+ + "\u0125\u0001\u0000\u0000\u0000\u0125\u0129\u0001\u0000\u0000\u0000\u0126"+ + "\u0124\u0001\u0000\u0000\u0000\u0127\u0128\u0005&\u0000\u0000\u0128\u012a"+ + "\u0003\u001a\r\u0000\u0129\u0127\u0001\u0000\u0000\u0000\u0129\u012a\u0001"+ + "\u0000\u0000\u0000\u012a\u012c\u0001\u0000\u0000\u0000\u012b\u011e\u0001"+ + "\u0000\u0000\u0000\u012b\u011f\u0001\u0000\u0000\u0000\u012b\u012c\u0001"+ + "\u0000\u0000\u0000\u012c\u012d\u0001\u0000\u0000\u0000\u012d\u012e\u0005"+ + "6\u0000\u0000\u012e\u0017\u0001\u0000\u0000\u0000\u012f\u0130\u0003H$"+ + "\u0000\u0130\u0019\u0001\u0000\u0000\u0000\u0131\u0132\u0005D\u0000\u0000"+ + "\u0132\u0137\u0003\u001c\u000e\u0000\u0133\u0134\u0005&\u0000\u0000\u0134"+ + "\u0136\u0003\u001c\u000e\u0000\u0135\u0133\u0001\u0000\u0000\u0000\u0136"+ + "\u0139\u0001\u0000\u0000\u0000\u0137\u0135\u0001\u0000\u0000\u0000\u0137"+ + "\u0138\u0001\u0000\u0000\u0000\u0138\u013a\u0001\u0000\u0000\u0000\u0139"+ + "\u0137\u0001\u0000\u0000\u0000\u013a\u013b\u0005E\u0000\u0000\u013b\u001b"+ + "\u0001\u0000\u0000\u0000\u013c\u013d\u0003j5\u0000\u013d\u013e\u0005%"+ + "\u0000\u0000\u013e\u013f\u0003D\"\u0000\u013f\u001d\u0001\u0000\u0000"+ + "\u0000\u0140\u0141\u0003@ \u0000\u0141\u001f\u0001\u0000\u0000\u0000\u0142"+ + "\u0143\u0005\f\u0000\u0000\u0143\u0144\u0003\"\u0011\u0000\u0144!\u0001"+ + "\u0000\u0000\u0000\u0145\u014a\u0003$\u0012\u0000\u0146\u0147\u0005&\u0000"+ + "\u0000\u0147\u0149\u0003$\u0012\u0000\u0148\u0146\u0001\u0000\u0000\u0000"+ + "\u0149\u014c\u0001\u0000\u0000\u0000\u014a\u0148\u0001\u0000\u0000\u0000"+ + "\u014a\u014b\u0001\u0000\u0000\u0000\u014b#\u0001\u0000\u0000\u0000\u014c"+ + "\u014a\u0001\u0000\u0000\u0000\u014d\u014e\u0003:\u001d\u0000\u014e\u014f"+ + "\u0005#\u0000\u0000\u014f\u0151\u0001\u0000\u0000\u0000\u0150\u014d\u0001"+ + "\u0000\u0000\u0000\u0150\u0151\u0001\u0000\u0000\u0000\u0151\u0152\u0001"+ + "\u0000\u0000\u0000\u0152\u0153\u0003\n\u0005\u0000\u0153%\u0001\u0000"+ + "\u0000\u0000\u0154\u0155\u0005\u0006\u0000\u0000\u0155\u015a\u0003(\u0014"+ + "\u0000\u0156\u0157\u0005&\u0000\u0000\u0157\u0159\u0003(\u0014\u0000\u0158"+ + "\u0156\u0001\u0000\u0000\u0000\u0159\u015c\u0001\u0000\u0000\u0000\u015a"+ + "\u0158\u0001\u0000\u0000\u0000\u015a\u015b\u0001\u0000\u0000\u0000\u015b"+ + "\u015e\u0001\u0000\u0000\u0000\u015c\u015a\u0001\u0000\u0000\u0000\u015d"+ + "\u015f\u0003.\u0017\u0000\u015e\u015d\u0001\u0000\u0000\u0000\u015e\u015f"+ + "\u0001\u0000\u0000\u0000\u015f\'\u0001\u0000\u0000\u0000\u0160\u0161\u0003"+ + "*\u0015\u0000\u0161\u0162\u0005%\u0000\u0000\u0162\u0164\u0001\u0000\u0000"+ + "\u0000\u0163\u0160\u0001\u0000\u0000\u0000\u0163\u0164\u0001\u0000\u0000"+ + "\u0000\u0164\u0165\u0001\u0000\u0000\u0000\u0165\u0166\u0003,\u0016\u0000"+ + "\u0166)\u0001\u0000\u0000\u0000\u0167\u0168\u0005R\u0000\u0000\u0168+"+ + "\u0001\u0000\u0000\u0000\u0169\u016a\u0007\u0002\u0000\u0000\u016a-\u0001"+ + "\u0000\u0000\u0000\u016b\u016c\u0005Q\u0000\u0000\u016c\u0171\u0005R\u0000"+ + "\u0000\u016d\u016e\u0005&\u0000\u0000\u016e\u0170\u0005R\u0000\u0000\u016f"+ + "\u016d\u0001\u0000\u0000\u0000\u0170\u0173\u0001\u0000\u0000\u0000\u0171"+ + "\u016f\u0001\u0000\u0000\u0000\u0171\u0172\u0001\u0000\u0000\u0000\u0172"+ + "/\u0001\u0000\u0000\u0000\u0173\u0171\u0001\u0000\u0000\u0000\u0174\u0175"+ + "\u0005\u0014\u0000\u0000\u0175\u017a\u0003(\u0014\u0000\u0176\u0177\u0005"+ + "&\u0000\u0000\u0177\u0179\u0003(\u0014\u0000\u0178\u0176\u0001\u0000\u0000"+ + "\u0000\u0179\u017c\u0001\u0000\u0000\u0000\u017a\u0178\u0001\u0000\u0000"+ + "\u0000\u017a\u017b\u0001\u0000\u0000\u0000\u017b\u017e\u0001\u0000\u0000"+ + "\u0000\u017c\u017a\u0001\u0000\u0000\u0000\u017d\u017f\u00036\u001b\u0000"+ + "\u017e\u017d\u0001\u0000\u0000\u0000\u017e\u017f\u0001\u0000\u0000\u0000"+ + "\u017f\u0182\u0001\u0000\u0000\u0000\u0180\u0181\u0005 \u0000\u0000\u0181"+ + "\u0183\u0003\"\u0011\u0000\u0182\u0180\u0001\u0000\u0000\u0000\u0182\u0183"+ + "\u0001\u0000\u0000\u0000\u01831\u0001\u0000\u0000\u0000\u0184\u0185\u0005"+ + "\u0004\u0000\u0000\u0185\u0186\u0003\"\u0011\u0000\u01863\u0001\u0000"+ + "\u0000\u0000\u0187\u0189\u0005\u000f\u0000\u0000\u0188\u018a\u00036\u001b"+ + "\u0000\u0189\u0188\u0001\u0000\u0000\u0000\u0189\u018a\u0001\u0000\u0000"+ + "\u0000\u018a\u018d\u0001\u0000\u0000\u0000\u018b\u018c\u0005 \u0000\u0000"+ + "\u018c\u018e\u0003\"\u0011\u0000\u018d\u018b\u0001\u0000\u0000\u0000\u018d"+ + "\u018e\u0001\u0000\u0000\u0000\u018e5\u0001\u0000\u0000\u0000\u018f\u0194"+ + "\u00038\u001c\u0000\u0190\u0191\u0005&\u0000\u0000\u0191\u0193\u00038"+ + "\u001c\u0000\u0192\u0190\u0001\u0000\u0000\u0000\u0193\u0196\u0001\u0000"+ + "\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0194\u0195\u0001\u0000"+ + "\u0000\u0000\u01957\u0001\u0000\u0000\u0000\u0196\u0194\u0001\u0000\u0000"+ + "\u0000\u0197\u019a\u0003$\u0012\u0000\u0198\u0199\u0005\u0010\u0000\u0000"+ + "\u0199\u019b\u0003\n\u0005\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019a"+ + "\u019b\u0001\u0000\u0000\u0000\u019b9\u0001\u0000\u0000\u0000\u019c\u01a1"+ + "\u0003H$\u0000\u019d\u019e\u0005(\u0000\u0000\u019e\u01a0\u0003H$\u0000"+ + "\u019f\u019d\u0001\u0000\u0000\u0000\u01a0\u01a3\u0001\u0000\u0000\u0000"+ + "\u01a1\u019f\u0001\u0000\u0000\u0000\u01a1\u01a2\u0001\u0000\u0000\u0000"+ + "\u01a2;\u0001\u0000\u0000\u0000\u01a3\u01a1\u0001\u0000\u0000\u0000\u01a4"+ + "\u01a9\u0003B!\u0000\u01a5\u01a6\u0005(\u0000\u0000\u01a6\u01a8\u0003"+ + "B!\u0000\u01a7\u01a5\u0001\u0000\u0000\u0000\u01a8\u01ab\u0001\u0000\u0000"+ + "\u0000\u01a9\u01a7\u0001\u0000\u0000\u0000\u01a9\u01aa\u0001\u0000\u0000"+ + "\u0000\u01aa=\u0001\u0000\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000"+ + "\u01ac\u01b1\u0003<\u001e\u0000\u01ad\u01ae\u0005&\u0000\u0000\u01ae\u01b0"+ + "\u0003<\u001e\u0000\u01af\u01ad\u0001\u0000\u0000\u0000\u01b0\u01b3\u0001"+ + "\u0000\u0000\u0000\u01b1\u01af\u0001\u0000\u0000\u0000\u01b1\u01b2\u0001"+ + "\u0000\u0000\u0000\u01b2?\u0001\u0000\u0000\u0000\u01b3\u01b1\u0001\u0000"+ + "\u0000\u0000\u01b4\u01b5\u0007\u0003\u0000\u0000\u01b5A\u0001\u0000\u0000"+ + "\u0000\u01b6\u01ba\u0005V\u0000\u0000\u01b7\u01b8\u0004!\t\u0000\u01b8"+ + "\u01ba\u0003F#\u0000\u01b9\u01b6\u0001\u0000\u0000\u0000\u01b9\u01b7\u0001"+ + "\u0000\u0000\u0000\u01baC\u0001\u0000\u0000\u0000\u01bb\u01e6\u00051\u0000"+ + "\u0000\u01bc\u01bd\u0003h4\u0000\u01bd\u01be\u0005I\u0000\u0000\u01be"+ + "\u01e6\u0001\u0000\u0000\u0000\u01bf\u01e6\u0003f3\u0000\u01c0\u01e6\u0003"+ + "h4\u0000\u01c1\u01e6\u0003b1\u0000\u01c2\u01e6\u0003F#\u0000\u01c3\u01e6"+ + "\u0003j5\u0000\u01c4\u01c5\u0005G\u0000\u0000\u01c5\u01ca\u0003d2\u0000"+ + "\u01c6\u01c7\u0005&\u0000\u0000\u01c7\u01c9\u0003d2\u0000\u01c8\u01c6"+ + "\u0001\u0000\u0000\u0000\u01c9\u01cc\u0001\u0000\u0000\u0000\u01ca\u01c8"+ + "\u0001\u0000\u0000\u0000\u01ca\u01cb\u0001\u0000\u0000\u0000\u01cb\u01cd"+ + "\u0001\u0000\u0000\u0000\u01cc\u01ca\u0001\u0000\u0000\u0000\u01cd\u01ce"+ + "\u0005H\u0000\u0000\u01ce\u01e6\u0001\u0000\u0000\u0000\u01cf\u01d0\u0005"+ + "G\u0000\u0000\u01d0\u01d5\u0003b1\u0000\u01d1\u01d2\u0005&\u0000\u0000"+ + "\u01d2\u01d4\u0003b1\u0000\u01d3\u01d1\u0001\u0000\u0000\u0000\u01d4\u01d7"+ + "\u0001\u0000\u0000\u0000\u01d5\u01d3\u0001\u0000\u0000\u0000\u01d5\u01d6"+ + "\u0001\u0000\u0000\u0000\u01d6\u01d8\u0001\u0000\u0000\u0000\u01d7\u01d5"+ + "\u0001\u0000\u0000\u0000\u01d8\u01d9\u0005H\u0000\u0000\u01d9\u01e6\u0001"+ + "\u0000\u0000\u0000\u01da\u01db\u0005G\u0000\u0000\u01db\u01e0\u0003j5"+ + "\u0000\u01dc\u01dd\u0005&\u0000\u0000\u01dd\u01df\u0003j5\u0000\u01de"+ + "\u01dc\u0001\u0000\u0000\u0000\u01df\u01e2\u0001\u0000\u0000\u0000\u01e0"+ + "\u01de\u0001\u0000\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000\u01e1"+ + "\u01e3\u0001\u0000\u0000\u0000\u01e2\u01e0\u0001\u0000\u0000\u0000\u01e3"+ + "\u01e4\u0005H\u0000\u0000\u01e4\u01e6\u0001\u0000\u0000\u0000\u01e5\u01bb"+ + "\u0001\u0000\u0000\u0000\u01e5\u01bc\u0001\u0000\u0000\u0000\u01e5\u01bf"+ + "\u0001\u0000\u0000\u0000\u01e5\u01c0\u0001\u0000\u0000\u0000\u01e5\u01c1"+ + "\u0001\u0000\u0000\u0000\u01e5\u01c2\u0001\u0000\u0000\u0000\u01e5\u01c3"+ + "\u0001\u0000\u0000\u0000\u01e5\u01c4\u0001\u0000\u0000\u0000\u01e5\u01cf"+ + "\u0001\u0000\u0000\u0000\u01e5\u01da\u0001\u0000\u0000\u0000\u01e6E\u0001"+ + "\u0000\u0000\u0000\u01e7\u01ea\u00054\u0000\u0000\u01e8\u01ea\u0005F\u0000"+ + "\u0000\u01e9\u01e7\u0001\u0000\u0000\u0000\u01e9\u01e8\u0001\u0000\u0000"+ + "\u0000\u01eaG\u0001\u0000\u0000\u0000\u01eb\u01ef\u0003@ \u0000\u01ec"+ + "\u01ed\u0004$\n\u0000\u01ed\u01ef\u0003F#\u0000\u01ee\u01eb\u0001\u0000"+ + "\u0000\u0000\u01ee\u01ec\u0001\u0000\u0000\u0000\u01efI\u0001\u0000\u0000"+ + "\u0000\u01f0\u01f1\u0005\t\u0000\u0000\u01f1\u01f2\u0005\u001e\u0000\u0000"+ + "\u01f2K\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u000e\u0000\u0000\u01f4"+ + "\u01f9\u0003N\'\u0000\u01f5\u01f6\u0005&\u0000\u0000\u01f6\u01f8\u0003"+ + "N\'\u0000\u01f7\u01f5\u0001\u0000\u0000\u0000\u01f8\u01fb\u0001\u0000"+ + "\u0000\u0000\u01f9\u01f7\u0001\u0000\u0000\u0000\u01f9\u01fa\u0001\u0000"+ + "\u0000\u0000\u01faM\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000\u0000"+ + "\u0000\u01fc\u01fe\u0003\n\u0005\u0000\u01fd\u01ff\u0007\u0004\u0000\u0000"+ + "\u01fe\u01fd\u0001\u0000\u0000\u0000\u01fe\u01ff\u0001\u0000\u0000\u0000"+ + "\u01ff\u0202\u0001\u0000\u0000\u0000\u0200\u0201\u00052\u0000\u0000\u0201"+ + "\u0203\u0007\u0005\u0000\u0000\u0202\u0200\u0001\u0000\u0000\u0000\u0202"+ + "\u0203\u0001\u0000\u0000\u0000\u0203O\u0001\u0000\u0000\u0000\u0204\u0205"+ + "\u0005\b\u0000\u0000\u0205\u0206\u0003>\u001f\u0000\u0206Q\u0001\u0000"+ + "\u0000\u0000\u0207\u0208\u0005\u0002\u0000\u0000\u0208\u0209\u0003>\u001f"+ + "\u0000\u0209S\u0001\u0000\u0000\u0000\u020a\u020b\u0005\u000b\u0000\u0000"+ + "\u020b\u0210\u0003V+\u0000\u020c\u020d\u0005&\u0000\u0000\u020d\u020f"+ + "\u0003V+\u0000\u020e\u020c\u0001\u0000\u0000\u0000\u020f\u0212\u0001\u0000"+ + "\u0000\u0000\u0210\u020e\u0001\u0000\u0000\u0000\u0210\u0211\u0001\u0000"+ + "\u0000\u0000\u0211U\u0001\u0000\u0000\u0000\u0212\u0210\u0001\u0000\u0000"+ + "\u0000\u0213\u0214\u0003<\u001e\u0000\u0214\u0215\u0005Z\u0000\u0000\u0215"+ + "\u0216\u0003<\u001e\u0000\u0216W\u0001\u0000\u0000\u0000\u0217\u0218\u0005"+ + "\u0001\u0000\u0000\u0218\u0219\u0003\u0014\n\u0000\u0219\u021b\u0003j"+ + "5\u0000\u021a\u021c\u0003^/\u0000\u021b\u021a\u0001\u0000\u0000\u0000"+ + "\u021b\u021c\u0001\u0000\u0000\u0000\u021cY\u0001\u0000\u0000\u0000\u021d"+ + "\u021e\u0005\u0007\u0000\u0000\u021e\u021f\u0003\u0014\n\u0000\u021f\u0220"+ + "\u0003j5\u0000\u0220[\u0001\u0000\u0000\u0000\u0221\u0222\u0005\n\u0000"+ + "\u0000\u0222\u0223\u0003:\u001d\u0000\u0223]\u0001\u0000\u0000\u0000\u0224"+ + "\u0229\u0003`0\u0000\u0225\u0226\u0005&\u0000\u0000\u0226\u0228\u0003"+ + "`0\u0000\u0227\u0225\u0001\u0000\u0000\u0000\u0228\u022b\u0001\u0000\u0000"+ + "\u0000\u0229\u0227\u0001\u0000\u0000\u0000\u0229\u022a\u0001\u0000\u0000"+ + "\u0000\u022a_\u0001\u0000\u0000\u0000\u022b\u0229\u0001\u0000\u0000\u0000"+ + "\u022c\u022d\u0003@ \u0000\u022d\u022e\u0005#\u0000\u0000\u022e\u022f"+ + "\u0003D\"\u0000\u022fa\u0001\u0000\u0000\u0000\u0230\u0231\u0007\u0006"+ + "\u0000\u0000\u0231c\u0001\u0000\u0000\u0000\u0232\u0235\u0003f3\u0000"+ + "\u0233\u0235\u0003h4\u0000\u0234\u0232\u0001\u0000\u0000\u0000\u0234\u0233"+ + "\u0001\u0000\u0000\u0000\u0235e\u0001\u0000\u0000\u0000\u0236\u0238\u0007"+ + "\u0000\u0000\u0000\u0237\u0236\u0001\u0000\u0000\u0000\u0237\u0238\u0001"+ + "\u0000\u0000\u0000\u0238\u0239\u0001\u0000\u0000\u0000\u0239\u023a\u0005"+ + "\u001f\u0000\u0000\u023ag\u0001\u0000\u0000\u0000\u023b\u023d\u0007\u0000"+ + "\u0000\u0000\u023c\u023b\u0001\u0000\u0000\u0000\u023c\u023d\u0001\u0000"+ + "\u0000\u0000\u023d\u023e\u0001\u0000\u0000\u0000\u023e\u023f\u0005\u001e"+ + "\u0000\u0000\u023fi\u0001\u0000\u0000\u0000\u0240\u0241\u0005\u001d\u0000"+ + "\u0000\u0241k\u0001\u0000\u0000\u0000\u0242\u0243\u0007\u0007\u0000\u0000"+ + "\u0243m\u0001\u0000\u0000\u0000\u0244\u0245\u0005\u0005\u0000\u0000\u0245"+ + "\u0246\u0003p8\u0000\u0246o\u0001\u0000\u0000\u0000\u0247\u0248\u0005"+ + "G\u0000\u0000\u0248\u0249\u0003\u0002\u0001\u0000\u0249\u024a\u0005H\u0000"+ + "\u0000\u024aq\u0001\u0000\u0000\u0000\u024b\u024c\u0005\r\u0000\u0000"+ + "\u024c\u024d\u0005j\u0000\u0000\u024ds\u0001\u0000\u0000\u0000\u024e\u024f"+ + "\u0005\u0003\u0000\u0000\u024f\u0252\u0005`\u0000\u0000\u0250\u0251\u0005"+ + "^\u0000\u0000\u0251\u0253\u0003<\u001e\u0000\u0252\u0250\u0001\u0000\u0000"+ + "\u0000\u0252\u0253\u0001\u0000\u0000\u0000\u0253\u025d\u0001\u0000\u0000"+ + "\u0000\u0254\u0255\u0005_\u0000\u0000\u0255\u025a\u0003v;\u0000\u0256"+ + "\u0257\u0005&\u0000\u0000\u0257\u0259\u0003v;\u0000\u0258\u0256\u0001"+ + "\u0000\u0000\u0000\u0259\u025c\u0001\u0000\u0000\u0000\u025a\u0258\u0001"+ + "\u0000\u0000\u0000\u025a\u025b\u0001\u0000\u0000\u0000\u025b\u025e\u0001"+ + "\u0000\u0000\u0000\u025c\u025a\u0001\u0000\u0000\u0000\u025d\u0254\u0001"+ + "\u0000\u0000\u0000\u025d\u025e\u0001\u0000\u0000\u0000\u025eu\u0001\u0000"+ + "\u0000\u0000\u025f\u0260\u0003<\u001e\u0000\u0260\u0261\u0005#\u0000\u0000"+ + "\u0261\u0263\u0001\u0000\u0000\u0000\u0262\u025f\u0001\u0000\u0000\u0000"+ + "\u0262\u0263\u0001\u0000\u0000\u0000\u0263\u0264\u0001\u0000\u0000\u0000"+ + "\u0264\u0265\u0003<\u001e\u0000\u0265w\u0001\u0000\u0000\u0000\u0266\u0267"+ + "\u0005\u0013\u0000\u0000\u0267\u0268\u0003(\u0014\u0000\u0268\u0269\u0005"+ + "^\u0000\u0000\u0269\u026a\u0003>\u001f\u0000\u026ay\u0001\u0000\u0000"+ + "\u0000\u026b\u026c\u0005\u0012\u0000\u0000\u026c\u026f\u00036\u001b\u0000"+ + "\u026d\u026e\u0005 \u0000\u0000\u026e\u0270\u0003\"\u0011\u0000\u026f"+ + "\u026d\u0001\u0000\u0000\u0000\u026f\u0270\u0001\u0000\u0000\u0000\u0270"+ + "{\u0001\u0000\u0000\u0000\u0271\u0272\u0007\b\u0000\u0000\u0272\u0273"+ + "\u0005x\u0000\u0000\u0273\u0274\u0003~?\u0000\u0274\u0275\u0003\u0080"+ + "@\u0000\u0275}\u0001\u0000\u0000\u0000\u0276\u0277\u0003(\u0014\u0000"+ + "\u0277\u007f\u0001\u0000\u0000\u0000\u0278\u0279\u0005^\u0000\u0000\u0279"+ + "\u027e\u0003\u0082A\u0000\u027a\u027b\u0005&\u0000\u0000\u027b\u027d\u0003"+ + "\u0082A\u0000\u027c\u027a\u0001\u0000\u0000\u0000\u027d\u0280\u0001\u0000"+ + "\u0000\u0000\u027e\u027c\u0001\u0000\u0000\u0000\u027e\u027f\u0001\u0000"+ + "\u0000\u0000\u027f\u0081\u0001\u0000\u0000\u0000\u0280\u027e\u0001\u0000"+ + "\u0000\u0000\u0281\u0282\u0003\u0010\b\u0000\u0282\u0083\u0001\u0000\u0000"+ + "\u0000=\u008f\u0098\u00ab\u00b7\u00c0\u00c8\u00cd\u00d5\u00d7\u00dc\u00e3"+ + "\u00e8\u00ed\u00f7\u00fd\u0105\u0107\u0112\u0119\u0124\u0129\u012b\u0137"+ + "\u014a\u0150\u015a\u015e\u0163\u0171\u017a\u017e\u0182\u0189\u018d\u0194"+ + "\u019a\u01a1\u01a9\u01b1\u01b9\u01ca\u01d5\u01e0\u01e5\u01e9\u01ee\u01f9"+ + "\u01fe\u0202\u0210\u021b\u0229\u0234\u0237\u023c\u0252\u025a\u025d\u0262"+ + "\u026f\u027e"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 46c1de31bb471..ba8aaf6251c57 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -18,6 +18,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.capabilities.TelemetryAware; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -525,11 +526,11 @@ public PlanFactory visitLookupCommand(EsqlBaseParser.LookupCommandContext ctx) { public PlanFactory visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { var source = source(ctx); - if (false == Build.current().isSnapshot()) { + if (false == EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled()) { throw new ParsingException(source, "JOIN is in preview and only available in SNAPSHOT build"); } - if (ctx.type != null && ctx.type.getType() != EsqlBaseParser.DEV_JOIN_LOOKUP) { + if (ctx.type != null && ctx.type.getType() != EsqlBaseParser.JOIN_LOOKUP) { String joinType = ctx.type == null ? "(INNER)" : ctx.type.getText(); throw new ParsingException(source, "only LOOKUP JOIN available, {} JOIN unsupported at the moment", joinType); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java index 18d8bc9fb0a75..2ca1d8c4d1288 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/GrammarInDevelopmentParsingTests.java @@ -19,7 +19,7 @@ public void testDevelopmentInline() throws Exception { } public void testDevelopmentLookup() throws Exception { - parse("row a = 1 | lookup \"foo\" on j", "lookup"); + parse("row a = 1 | lookup_\uD83D\uDC14 \"foo\" on j", "lookup_\uD83D\uDC14"); } public void testDevelopmentMetrics() throws Exception { From ff3950d0259c264b65e181a4ffcce2909798e211 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 02:27:39 +1100 Subject: [PATCH 286/383] Mute org.elasticsearch.entitlement.runtime.policy.PolicyManagerTests testDuplicateFlagEntitlements #121300 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index d093c905bde51..a8486ebaf16ec 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -370,6 +370,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/121293 - class: org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculatorTests issue: https://github.com/elastic/elasticsearch/issues/121294 +- class: org.elasticsearch.entitlement.runtime.policy.PolicyManagerTests + method: testDuplicateFlagEntitlements + issue: https://github.com/elastic/elasticsearch/issues/121300 # Examples: # From aac1409814850a9b5a7f0e023667071002bd4d2d Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 30 Jan 2025 16:01:24 +0000 Subject: [PATCH 287/383] Cheaper snapshot-related `toString()` impls (#121283) If the `MasterService` needs to log a create-snapshot task description then it will call `CreateSnapshotTask#toString`, which today calls `RepositoryData#toString` which is not overridden so ends up calling `RepositoryData#hashCode`. This can be extraordinarily expensive in a large repository. Worse, if there's masses of create-snapshot tasks to execute then it'll do this repeatedly, because each one only ends up yielding a short hex string so we don't reach the description length limit very easily. With this commit we provide a more efficient implementation of `CreateSnapshotTask#toString` and also override `RepositoryData#toString` to protect against some other caller running into the same issue. --- docs/changelog/121283.yaml | 5 ++++ .../snapshots/SnapshotsServiceIT.java | 28 +++++++++++++++++++ .../repositories/RepositoryData.java | 5 ++++ .../snapshots/SnapshotsService.java | 5 ++++ .../repositories/RepositoryDataTests.java | 16 +++++++++++ 5 files changed, 59 insertions(+) create mode 100644 docs/changelog/121283.yaml diff --git a/docs/changelog/121283.yaml b/docs/changelog/121283.yaml new file mode 100644 index 0000000000000..56fb62acdb5fa --- /dev/null +++ b/docs/changelog/121283.yaml @@ -0,0 +1,5 @@ +pr: 121283 +summary: Cheaper snapshot-related `toString()` impls +area: Snapshot/Restore +type: bug +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceIT.java index b9e47740e2945..b86cae1c2fb60 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceIT.java @@ -17,10 +17,12 @@ import org.elasticsearch.cluster.SnapshotDeletionsInProgress; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.snapshots.mockstore.MockRepository; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.MockLog; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.util.List; import java.util.concurrent.TimeUnit; @@ -223,4 +225,30 @@ public void testRerouteWhenShardSnapshotsCompleted() throws Exception { safeAwait(shardMovedListener); ensureGreen(indexName); } + + @TestLogging(reason = "testing task description, logged at DEBUG", value = "org.elasticsearch.cluster.service.MasterService:DEBUG") + public void testCreateSnapshotTaskDescription() { + createIndexWithRandomDocs(randomIdentifier(), randomIntBetween(1, 5)); + final var repositoryName = randomIdentifier(); + createRepository(repositoryName, "mock"); + + final var snapshotName = randomIdentifier(); + MockLog.assertThatLogger( + () -> createFullSnapshot(repositoryName, snapshotName), + MasterService.class, + new MockLog.SeenEventExpectation( + "executing cluster state update debug message", + MasterService.class.getCanonicalName(), + Level.DEBUG, + "executing cluster state update for [create_snapshot [" + + snapshotName + + "][CreateSnapshotTask{repository=" + + repositoryName + + ", snapshot=*" + + snapshotName + + "*}]]" + ) + ); + } + } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index 2ebbf24d65670..2c429954f5f49 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -605,6 +605,11 @@ public int hashCode() { return Objects.hash(snapshotIds, snapshotsDetails, indices, indexSnapshots, shardGenerations, indexMetaDataGenerations); } + @Override + public String toString() { + return Strings.format("RepositoryData[uuid=%s,gen=%s]", uuid, genId); + } + /** * Resolve the index name to the index id specific to the repository, * throwing an exception if the index could not be resolved. diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 008c75ed13473..5d8ef51af8d51 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -3885,6 +3885,11 @@ public void onFailure(Exception e) { logSnapshotFailure("create", snapshot, e); listener.onFailure(e); } + + @Override + public String toString() { + return "CreateSnapshotTask{repository=" + repository.getMetadata().name() + ", snapshot=" + snapshot + '}'; + } } private static void logSnapshotFailure(String operation, Snapshot snapshot, Exception e) { diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java index f5ebacde08820..250d10855b23f 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java @@ -40,9 +40,12 @@ import static org.elasticsearch.repositories.RepositoryData.EMPTY_REPO_GEN; import static org.elasticsearch.repositories.RepositoryData.MISSING_UUID; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.not; /** * Tests for the {@link RepositoryData} class. @@ -430,6 +433,19 @@ public void testFailsIfMinVersionNotSatisfied() throws IOException { } } + public void testToString() { + final var repositoryData = generateRandomRepoData(); + assertThat( + repositoryData.toString(), + allOf( + containsString("RepositoryData"), + containsString(repositoryData.getUuid()), + containsString(Long.toString(repositoryData.getGenId())), + not(containsString("@")) // not the default Object#toString which does a very expensive hashcode computation + ) + ); + } + public static RepositoryData generateRandomRepoData() { final int numIndices = randomIntBetween(1, 30); final List indices = new ArrayList<>(numIndices); From da65532e9c6ce9195298b98f0968bc21046aa2ed Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 30 Jan 2025 08:22:42 -0800 Subject: [PATCH 288/383] Add 8.19.0 version constant --- .buildkite/pipelines/periodic-packaging.yml | 16 ++++++++++++++++ .buildkite/pipelines/periodic.yml | 19 +++++++++++++++++++ .ci/bwcVersions | 1 + .../main/java/org/elasticsearch/Version.java | 1 + 4 files changed, 37 insertions(+) diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index aded97712d7a5..9565aa4072a61 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -335,6 +335,22 @@ steps: env: BWC_VERSION: 8.18.0 + - label: "{{matrix.image}} / 8.19.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.19.0 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.19.0 + - label: "{{matrix.image}} / 9.0.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.0.0 timeout_in_minutes: 300 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 3472e7edce0da..2eb617493eb18 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -363,6 +363,25 @@ steps: - signal_reason: agent_stop limit: 3 + - label: 8.19.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.19.0#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.19.0 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + - label: 9.0.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.0.0#bwcTest timeout_in_minutes: 300 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 9f4b86ffc7ada..63549d42b8822 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -18,4 +18,5 @@ BWC_VERSION: - "8.16.4" - "8.17.2" - "8.18.0" + - "8.19.0" - "9.0.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 1249f36745835..1f4ab769182dc 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -197,6 +197,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_17_1 = new Version(8_17_01_99); public static final Version V_8_17_2 = new Version(8_17_02_99); public static final Version V_8_18_0 = new Version(8_18_00_99); + public static final Version V_8_19_0 = new Version(8_19_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); public static final Version CURRENT = V_9_0_0; From 6c405093d7c6becb7c5542d4b6ef1db21f9530f8 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 30 Jan 2025 08:22:49 -0800 Subject: [PATCH 289/383] Update BWC versions to support multiple staged releases --- .../internal/fake_git/remote/settings.gradle | 1 + .../gradle/internal/BwcVersions.java | 5 ++- .../gradle/internal/BwcVersionsSpec.groovy | 33 +++++++++++++++++++ distribution/bwc/staged2/build.gradle | 0 settings.gradle | 1 + 5 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 distribution/bwc/staged2/build.gradle diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle index e931537fcd6e9..1774ac0b0a112 100644 --- a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle +++ b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle @@ -14,6 +14,7 @@ include ":distribution:bwc:bugfix2" include ":distribution:bwc:minor" include ":distribution:bwc:major" include ":distribution:bwc:staged" +include ":distribution:bwc:staged2" include ":distribution:bwc:maintenance" include ":distribution:archives:darwin-tar" include ":distribution:archives:oss-darwin-tar" diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java index 9f7645349e852..5c1caa08bfaa1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java @@ -166,6 +166,7 @@ private Map computeUnreleased(List devel .toList(); boolean existingBugfix = false; + boolean existingStaged = false; for (int i = 0; i < featureFreezeBranches.size(); i++) { String branch = featureFreezeBranches.get(i); Version version = versions.stream() @@ -193,7 +194,9 @@ private Map computeUnreleased(List devel if (i == featureFreezeBranches.size() - 1) { result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:maintenance")); } else if (version.getRevision() == 0) { // This is the next staged minor - result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:staged")); + String project = existingStaged ? "staged2" : "staged"; + result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:" + project)); + existingStaged = true; } else { // This is a bugfix String project = existingBugfix ? "bugfix2" : "bugfix"; result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:" + project)); diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy index 4d033564a42b4..a662a76db4da7 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy @@ -77,6 +77,39 @@ class BwcVersionsSpec extends Specification { bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0'), v('9.0.0')] } + def "current version is next major with two staged minors"() { + given: + addVersion('7.17.10', '8.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.16.2', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('8.18.0', '9.10.0') + addVersion('8.19.0', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.1.0', '10.1.0') + + when: + def bwc = new BwcVersions(versionLines, v('9.1.0'), ['main', '9.0', '8.x', '8.18', '8.17', '8.16', '7.17']) + def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } + + then: + unreleased == [ + (v('8.16.2')): new UnreleasedVersionInfo(v('8.16.2'), '8.16', ':distribution:bwc:bugfix2'), + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:staged2'), + (v('8.19.0')): new UnreleasedVersionInfo(v('8.19.0'), '8.x', ':distribution:bwc:minor'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.0', ':distribution:bwc:staged'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution'), + ] + bwc.wireCompatible == [v('8.19.0'), v('9.0.0'), v('9.1.0')] + bwc.indexCompatible == [v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.16.2'), v('8.17.0'), v('8.17.1'), v('8.18.0'), v('8.19.0'), v('9.0.0'), v('9.1.0')] + } + def "current version is first new minor in major series"() { given: addVersion('7.17.10', '8.9.0') diff --git a/distribution/bwc/staged2/build.gradle b/distribution/bwc/staged2/build.gradle new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/settings.gradle b/settings.gradle index 374b67ee78a13..a6b5367591ee2 100644 --- a/settings.gradle +++ b/settings.gradle @@ -79,6 +79,7 @@ List projects = [ 'distribution:bwc:maintenance', 'distribution:bwc:minor', 'distribution:bwc:staged', + 'distribution:bwc:staged2', 'distribution:bwc:main', 'distribution:tools:java-version-checker', 'distribution:tools:cli-launcher', From 6cb7f0e296d6b2ce70c9e02af746c7e24c70e197 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 30 Jan 2025 08:31:34 -0800 Subject: [PATCH 290/383] Fix backport mapping --- .backportrc.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.backportrc.json b/.backportrc.json index 20287f0bfc0e6..b1d10104dca17 100644 --- a/.backportrc.json +++ b/.backportrc.json @@ -4,7 +4,7 @@ "targetPRLabels" : [ "backport" ], "branchLabelMapping" : { "^v9.0.0$" : "main", - "^v8.18.0$" : "8.x", + "^v8.19.0$" : "8.x", "^v(\\d+).(\\d+).\\d+(?:-(?:alpha|beta|rc)\\d+)?$" : "$1.$2" } } From befec5d0dffb249d423174b27c9fa26f2fb42d39 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 30 Jan 2025 08:32:53 -0800 Subject: [PATCH 291/383] Add 8.18 branch to backport config --- .backportrc.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.backportrc.json b/.backportrc.json index b1d10104dca17..eef30c5e93c34 100644 --- a/.backportrc.json +++ b/.backportrc.json @@ -1,6 +1,6 @@ { "upstream" : "elastic/elasticsearch", - "targetBranchChoices" : [ "main", "8.x", "8.17", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], + "targetBranchChoices" : [ "main", "8.x", "8.18", "8.17", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], "targetPRLabels" : [ "backport" ], "branchLabelMapping" : { "^v9.0.0$" : "main", From 011557d61b87558b0b781dd950dde4dfe2742c93 Mon Sep 17 00:00:00 2001 From: Dan Rubinstein Date: Thu, 30 Jan 2025 11:37:04 -0500 Subject: [PATCH 292/383] Fix inference update API calls with task_type in body or deployment_id defined (#121231) * Fix inference update API calls with task_type in body or deployment_id defined * Update docs/changelog/121231.yaml * Fixing test * Reuse existing deployment ID retrieval logic --------- Co-authored-by: Elastic Machine --- docs/changelog/121231.yaml | 6 ++ .../inference/CreateFromDeploymentIT.java | 46 ++++++++++++++++ .../inference/InferenceBaseRestTest.java | 5 ++ .../xpack/inference/InferenceCrudIT.java | 55 +++++++++++++++++++ .../TransportUpdateInferenceModelAction.java | 32 ++++++++--- .../rest/RestUpdateInferenceModelAction.java | 5 +- 6 files changed, 138 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/121231.yaml diff --git a/docs/changelog/121231.yaml b/docs/changelog/121231.yaml new file mode 100644 index 0000000000000..bd9eb934c8d08 --- /dev/null +++ b/docs/changelog/121231.yaml @@ -0,0 +1,6 @@ +pr: 121231 +summary: Fix inference update API calls with `task_type` in body or `deployment_id` + defined +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java index 0a2200ff912ac..e5eda9a71b472 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java @@ -43,6 +43,24 @@ public void testAttachToDeployment() throws IOException { var results = infer(inferenceId, List.of("washing machine")); assertNotNull(results.get("sparse_embedding")); + var updatedNumAllocations = randomIntBetween(1, 10); + var updatedEndpointConfig = updateEndpoint(inferenceId, updatedEndpointConfig(updatedNumAllocations), TaskType.SPARSE_EMBEDDING); + assertThat( + updatedEndpointConfig.get("service_settings"), + is( + Map.of( + "num_allocations", + updatedNumAllocations, + "num_threads", + 1, + "model_id", + "attach_to_deployment", + "deployment_id", + "existing_deployment" + ) + ) + ); + deleteModel(inferenceId); // assert deployment not stopped var stats = (List>) getTrainedModelStats(modelId).get("trained_model_stats"); @@ -83,6 +101,24 @@ public void testAttachWithModelId() throws IOException { var results = infer(inferenceId, List.of("washing machine")); assertNotNull(results.get("sparse_embedding")); + var updatedNumAllocations = randomIntBetween(1, 10); + var updatedEndpointConfig = updateEndpoint(inferenceId, updatedEndpointConfig(updatedNumAllocations), TaskType.SPARSE_EMBEDDING); + assertThat( + updatedEndpointConfig.get("service_settings"), + is( + Map.of( + "num_allocations", + updatedNumAllocations, + "num_threads", + 1, + "model_id", + "attach_with_model_id", + "deployment_id", + "existing_deployment_with_model_id" + ) + ) + ); + stopMlNodeDeployment(deploymentId); } @@ -189,6 +225,16 @@ private String endpointConfig(String modelId, String deploymentId) { """, modelId, deploymentId); } + private String updatedEndpointConfig(int numAllocations) { + return Strings.format(""" + { + "service_settings": { + "num_allocations": %d + } + } + """, numAllocations); + } + private Response startMlNodeDeploymemnt(String modelId, String deploymentId) throws IOException { String endPoint = "/_ml/trained_models/" + modelId diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index bb3f3e9b46c4d..950ff196e5136 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -238,6 +238,11 @@ static Map updateEndpoint(String inferenceID, String modelConfig return putRequest(endpoint, modelConfig); } + static Map updateEndpoint(String inferenceID, String modelConfig) throws IOException { + String endpoint = Strings.format("_inference/%s/_update", inferenceID); + return putRequest(endpoint, modelConfig); + } + protected Map putPipeline(String pipelineId, String modelId) throws IOException { String endpoint = Strings.format("_ingest/pipeline/%s", pipelineId); String body = """ diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index b786cd1298495..793b3f7a9a349 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -369,6 +369,61 @@ public void testUnifiedCompletionInference() throws Exception { } } + public void testUpdateEndpointWithWrongTaskTypeInURL() throws IOException { + putModel("sparse_embedding_model", mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + var e = expectThrows( + ResponseException.class, + () -> updateEndpoint( + "sparse_embedding_model", + updateConfig(null, randomAlphaOfLength(10), randomIntBetween(1, 10)), + TaskType.TEXT_EMBEDDING + ) + ); + assertThat(e.getMessage(), containsString("Task type must match the task type of the existing endpoint")); + } + + public void testUpdateEndpointWithWrongTaskTypeInBody() throws IOException { + putModel("sparse_embedding_model", mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + var e = expectThrows( + ResponseException.class, + () -> updateEndpoint( + "sparse_embedding_model", + updateConfig(TaskType.TEXT_EMBEDDING, randomAlphaOfLength(10), randomIntBetween(1, 10)) + ) + ); + assertThat(e.getMessage(), containsString("Task type must match the task type of the existing endpoint")); + } + + public void testUpdateEndpointWithTaskTypeInURL() throws IOException { + testUpdateEndpoint(false, true); + } + + public void testUpdateEndpointWithTaskTypeInBody() throws IOException { + testUpdateEndpoint(true, false); + } + + public void testUpdateEndpointWithTaskTypeInBodyAndURL() throws IOException { + testUpdateEndpoint(true, true); + } + + @SuppressWarnings("unchecked") + private void testUpdateEndpoint(boolean taskTypeInBody, boolean taskTypeInURL) throws IOException { + String endpointId = "sparse_embedding_model"; + putModel(endpointId, mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + + int temperature = randomIntBetween(1, 10); + var expectedConfig = updateConfig(taskTypeInBody ? TaskType.SPARSE_EMBEDDING : null, randomAlphaOfLength(1), temperature); + Map updatedEndpoint; + if (taskTypeInURL) { + updatedEndpoint = updateEndpoint(endpointId, expectedConfig, TaskType.SPARSE_EMBEDDING); + } else { + updatedEndpoint = updateEndpoint(endpointId, expectedConfig); + } + + Map updatedTaskSettings = (Map) updatedEndpoint.get("task_settings"); + assertEquals(temperature, updatedTaskSettings.get("temperature")); + } + private static Iterator expectedResultsIterator(List input) { // The Locale needs to be ROOT to match what the test service is going to respond with return Stream.concat(input.stream().map(s -> s.toUpperCase(Locale.ROOT)).map(InferenceCrudIT::expectedResult), Stream.of("[DONE]")) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java index b857ef3068835..ed005a86d66b5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUpdateInferenceModelAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.inference.InferenceService; @@ -50,6 +51,7 @@ import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalModel; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalServiceSettings; @@ -255,14 +257,13 @@ private void updateInClusterEndpoint( ActionListener listener ) throws IOException { // The model we are trying to update must have a trained model associated with it if it is an in-cluster deployment - throwIfTrainedModelDoesntExist(request); + var deploymentId = getDeploymentIdForInClusterEndpoint(existingParsedModel); + throwIfTrainedModelDoesntExist(request.getInferenceEntityId(), deploymentId); Map serviceSettings = request.getContentAsSettings().serviceSettings(); if (serviceSettings != null && serviceSettings.get(NUM_ALLOCATIONS) instanceof Integer numAllocations) { - UpdateTrainedModelDeploymentAction.Request updateRequest = new UpdateTrainedModelDeploymentAction.Request( - request.getInferenceEntityId() - ); + UpdateTrainedModelDeploymentAction.Request updateRequest = new UpdateTrainedModelDeploymentAction.Request(deploymentId); updateRequest.setNumberOfAllocations(numAllocations); var delegate = listener.delegateFailure((l2, response) -> { @@ -270,7 +271,8 @@ private void updateInClusterEndpoint( }); logger.info( - "Updating trained model deployment for inference entity [{}] with [{}] num_allocations", + "Updating trained model deployment [{}] for inference entity [{}] with [{}] num_allocations", + deploymentId, request.getInferenceEntityId(), numAllocations ); @@ -293,12 +295,26 @@ private boolean isInClusterService(String name) { return List.of(ElasticsearchInternalService.NAME, ElasticsearchInternalService.OLD_ELSER_SERVICE_NAME).contains(name); } - private void throwIfTrainedModelDoesntExist(UpdateInferenceModelAction.Request request) throws ElasticsearchStatusException { - var assignments = TrainedModelAssignmentUtils.modelAssignments(request.getInferenceEntityId(), clusterService.state()); + private String getDeploymentIdForInClusterEndpoint(Model model) { + if (model instanceof ElasticsearchInternalModel esModel) { + return esModel.mlNodeDeploymentId(); + } else { + throw new IllegalStateException( + Strings.format( + "Cannot update inference endpoint [%s]. Class [%s] is not an Elasticsearch internal model", + model.getInferenceEntityId(), + model.getClass().getSimpleName() + ) + ); + } + } + + private void throwIfTrainedModelDoesntExist(String inferenceEntityId, String deploymentId) throws ElasticsearchStatusException { + var assignments = TrainedModelAssignmentUtils.modelAssignments(deploymentId, clusterService.state()); if ((assignments == null || assignments.isEmpty())) { throw ExceptionsHelper.entityNotFoundException( Messages.MODEL_ID_DOES_NOT_MATCH_EXISTING_MODEL_IDS_BUT_MUST_FOR_IN_CLUSTER_SERVICE, - request.getInferenceEntityId() + inferenceEntityId ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java index 120731a4f8e66..7b3c54c60cdcc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.inference.rest; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; @@ -48,7 +46,8 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient inferenceEntityId = restRequest.param(INFERENCE_ID); taskType = TaskType.fromStringOrStatusException(restRequest.param(TASK_TYPE_OR_INFERENCE_ID)); } else { - throw new ElasticsearchStatusException("Inference ID must be provided in the path", RestStatus.BAD_REQUEST); + inferenceEntityId = restRequest.param(TASK_TYPE_OR_INFERENCE_ID); + taskType = TaskType.ANY; } var content = restRequest.requiredContent(); From 2bbf7c72db4f082820af8b9ee141208d5f9002ef Mon Sep 17 00:00:00 2001 From: Patrick Doyle <810052+prdoyle@users.noreply.github.com> Date: Thu, 30 Jan 2025 11:47:37 -0500 Subject: [PATCH 293/383] Fix PolicyManagerTests after package move (#121304) * Fix PolicyManagerTests after package move * Unmute --- .../entitlement/runtime/policy/PolicyManagerTests.java | 9 +++------ muted-tests.yml | 3 --- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index dc13703d85d2e..3e4896fd714e4 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -305,8 +305,7 @@ public void testDuplicateFlagEntitlements() { ) ); assertEquals( - "[server] using module [test] found duplicate flag entitlements " - + "[org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement]", + "[server] using module [test] found duplicate flag entitlements " + "[" + CreateClassLoaderEntitlement.class.getName() + "]", iae.getMessage() ); @@ -322,8 +321,7 @@ public void testDuplicateFlagEntitlements() { ) ); assertEquals( - "[agent] using module [unnamed] found duplicate flag entitlements " - + "[org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement]", + "[agent] using module [unnamed] found duplicate flag entitlements " + "[" + CreateClassLoaderEntitlement.class.getName() + "]", iae.getMessage() ); @@ -355,8 +353,7 @@ public void testDuplicateFlagEntitlements() { ) ); assertEquals( - "[plugin1] using module [test] found duplicate flag entitlements " - + "[org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement]", + "[plugin1] using module [test] found duplicate flag entitlements " + "[" + CreateClassLoaderEntitlement.class.getName() + "]", iae.getMessage() ); } diff --git a/muted-tests.yml b/muted-tests.yml index a8486ebaf16ec..d093c905bde51 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -370,9 +370,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/121293 - class: org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculatorTests issue: https://github.com/elastic/elasticsearch/issues/121294 -- class: org.elasticsearch.entitlement.runtime.policy.PolicyManagerTests - method: testDuplicateFlagEntitlements - issue: https://github.com/elastic/elasticsearch/issues/121300 # Examples: # From 69bdf465b05b92a4afa236b53e47ae89def9d00f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 30 Jan 2025 16:55:46 +0000 Subject: [PATCH 294/383] Bump to version 9.1.0 --- .backportrc.json | 6 ++--- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 16 +++++++++++++ .buildkite/pipelines/periodic.yml | 23 +++++++++++++++++-- .ci/bwcVersions | 1 + .ci/snapshotBwcVersions | 2 ++ build-tools-internal/version.properties | 2 +- docs/reference/migration/index.asciidoc | 2 ++ docs/reference/migration/migrate_9_1.asciidoc | 20 ++++++++++++++++ docs/reference/release-notes.asciidoc | 2 ++ docs/reference/release-notes/9.1.0.asciidoc | 8 +++++++ .../release-notes/highlights.asciidoc | 5 ++++ .../main/java/org/elasticsearch/Version.java | 3 ++- 13 files changed, 84 insertions(+), 8 deletions(-) create mode 100644 docs/reference/migration/migrate_9_1.asciidoc create mode 100644 docs/reference/release-notes/9.1.0.asciidoc diff --git a/.backportrc.json b/.backportrc.json index eef30c5e93c34..702113b5600e7 100644 --- a/.backportrc.json +++ b/.backportrc.json @@ -1,10 +1,10 @@ { "upstream" : "elastic/elasticsearch", - "targetBranchChoices" : [ "main", "8.x", "8.18", "8.17", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], + "targetBranchChoices" : [ "main", "8.x", "9.0", "8.18", "8.17", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], "targetPRLabels" : [ "backport" ], "branchLabelMapping" : { - "^v9.0.0$" : "main", + "^v9.1.0$" : "main", "^v8.19.0$" : "8.x", "^v(\\d+).(\\d+).\\d+(?:-(?:alpha|beta|rc)\\d+)?$" : "$1.$2" } -} +} \ No newline at end of file diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index ea04a0340076d..a207eeafaaae6 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "8.19.0", "9.0.0", "9.1.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 9565aa4072a61..99ff9ec9ecf2b 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -367,6 +367,22 @@ steps: env: BWC_VERSION: 9.0.0 + - label: "{{matrix.image}} / 9.1.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.1.0 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 9.1.0 + - group: packaging-tests-windows steps: - label: "{{matrix.image}} / packaging-tests-windows" diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 2eb617493eb18..d925f7e2bffbf 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -401,6 +401,25 @@ steps: - signal_reason: agent_stop limit: 3 + - label: 9.1.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.1.0#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 9.1.0 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + - label: concurrent-search-tests command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true functionalTests timeout_in_minutes: 420 @@ -467,7 +486,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "8.19.0", "9.0.0", "9.1.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -509,7 +528,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "8.19.0", "9.0.0", "9.1.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 63549d42b8822..e80e036907a0c 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -20,3 +20,4 @@ BWC_VERSION: - "8.18.0" - "8.19.0" - "9.0.0" + - "9.1.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 38104e03edb5f..68663fdfb1782 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -2,4 +2,6 @@ BWC_VERSION: - "8.16.4" - "8.17.2" - "8.18.0" + - "8.19.0" - "9.0.0" + - "9.1.0" diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index c041bd2dd2156..a0c663b19a0c6 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,4 +1,4 @@ -elasticsearch = 9.0.0 +elasticsearch = 9.1.0 lucene = 10.1.0 bundled_jdk_vendor = openjdk diff --git a/docs/reference/migration/index.asciidoc b/docs/reference/migration/index.asciidoc index 11aca45b003fa..1624910aa3837 100644 --- a/docs/reference/migration/index.asciidoc +++ b/docs/reference/migration/index.asciidoc @@ -1,6 +1,8 @@ include::migration_intro.asciidoc[] +* <> * <> +include::migrate_9_1.asciidoc[] include::migrate_9_0.asciidoc[] diff --git a/docs/reference/migration/migrate_9_1.asciidoc b/docs/reference/migration/migrate_9_1.asciidoc new file mode 100644 index 0000000000000..07966c04ae109 --- /dev/null +++ b/docs/reference/migration/migrate_9_1.asciidoc @@ -0,0 +1,20 @@ +[[migrating-9.1]] +== Migrating to 9.1 +++++ +9.1 +++++ + +This section discusses the changes that you need to be aware of when migrating +your application to {es} 9.1. + +See also <> and <>. + +coming::[9.1.0] + + +[discrete] +[[breaking-changes-9.1]] +=== Breaking changes + +There are no breaking changes in {es} 9.1. + diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index 615e7135365cd..85a5af66aceb7 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -6,9 +6,11 @@ This section summarizes the changes in each release. +* <> * <> -- +include::release-notes/9.1.0.asciidoc[] include::release-notes/9.0.0.asciidoc[] diff --git a/docs/reference/release-notes/9.1.0.asciidoc b/docs/reference/release-notes/9.1.0.asciidoc new file mode 100644 index 0000000000000..2f614615005f1 --- /dev/null +++ b/docs/reference/release-notes/9.1.0.asciidoc @@ -0,0 +1,8 @@ +[[release-notes-9.1.0]] +== {es} version 9.1.0 + +coming[9.1.0] + +Also see <>. + + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index b87081639c684..eeac565778289 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -8,6 +8,11 @@ ifeval::["{release-state}"!="unreleased"] For detailed information about this release, see the <> and <>. +// Add previous release to the list +Other versions: + +{ref-bare}/9.0/release-highlights.html[9.0] + endif::[] // The notable-highlights tag marks entries that diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 1f4ab769182dc..49f1f6c0d4a13 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -199,7 +199,8 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_18_0 = new Version(8_18_00_99); public static final Version V_8_19_0 = new Version(8_19_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); - public static final Version CURRENT = V_9_0_0; + public static final Version V_9_1_0 = new Version(9_01_00_99); + public static final Version CURRENT = V_9_1_0; private static final NavigableMap VERSION_IDS; private static final Map VERSION_STRINGS; From d32583cd80f58045023583061ac2d370a544e2cb Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Thu, 30 Jan 2025 19:02:37 +0200 Subject: [PATCH 295/383] [TEST] Clean up skipped tests in rest-compat mode (#121298) * Skip tests in rest-compat that fail due to versioning issues * Clean up skipped tests in rest-compat mode * restore some * restore some --- rest-api-spec/build.gradle | 39 -------------------------------------- 1 file changed, 39 deletions(-) diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index c8861ecaea4b2..0069e34fe949e 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -62,33 +62,7 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("cat.aliases/10_basic/Deprecated local parameter", "CAT APIs not covered by compatibility policy") task.skipTest("cat.shards/10_basic/Help", "sync_id is removed in 9.0") task.skipTest("search/500_date_range/from, to, include_lower, include_upper deprecated", "deprecated parameters are removed in 9.0") - task.skipTest("logsdb/10_settings/logsdb with default ignore dynamic beyond limit and default sorting", "skip until pr/118968 gets backported") - task.skipTest("logsdb/10_settings/logsdb with default ignore dynamic beyond limit and too low limit", "skip until pr/118968 gets backported") - task.skipTest("logsdb/10_settings/logsdb with default ignore dynamic beyond limit and subobjects false", "skip until pr/118968 gets backported") - task.skipTest("logsdb/10_settings/override sort missing settings", "skip until pr/118968 gets backported") - task.skipTest("logsdb/10_settings/override sort order settings", "skip until pr/118968 gets backported") - task.skipTest("logsdb/10_settings/override sort mode settings", "skip until pr/118968 gets backported") - task.skipTest("search.vectors/41_knn_search_bbq_hnsw/Test knn search", "Scoring has changed in latest versions") - task.skipTest("search.vectors/42_knn_search_bbq_flat/Test knn search", "Scoring has changed in latest versions") - task.skipTest("search.vectors/180_update_dense_vector_type/Test create and update dense vector mapping with bulk indexing", "waiting for #118774 backport") - task.skipTest("search.vectors/160_knn_query_missing_params/kNN query in a bool clause - missing num_candidates", "waiting for #118774 backport") - task.skipTest("search.vectors/110_knn_query_with_filter/Simple knn query", "waiting for #118774 backport") - task.skipTest("search.vectors/160_knn_query_missing_params/kNN search used in nested field - missing num_candidates", "waiting for #118774 backport") - task.skipTest("search.vectors/180_update_dense_vector_type/Test create and update dense vector mapping to int4 with per-doc indexing and flush", "waiting for #118774 backport") - task.skipTest("search.vectors/110_knn_query_with_filter/PRE_FILTER: knn query with internal filter as pre-filter", "waiting for #118774 backport") - task.skipTest("search.vectors/180_update_dense_vector_type/Index, update and merge", "waiting for #118774 backport") - task.skipTest("search.vectors/160_knn_query_missing_params/kNN query with missing num_candidates param - size provided", "waiting for #118774 backport") - task.skipTest("search.vectors/110_knn_query_with_filter/POST_FILTER: knn query with filter from a parent bool query as post-filter", "waiting for #118774 backport") - task.skipTest("search.vectors/120_knn_query_multiple_shards/Aggregations with collected number of docs depends on num_candidates", "waiting for #118774 backport") - task.skipTest("search.vectors/180_update_dense_vector_type/Test create and update dense vector mapping with per-doc indexing and flush", "waiting for #118774 backport") - task.skipTest("search.vectors/110_knn_query_with_filter/PRE_FILTER: knn query with alias filter as pre-filter", "waiting for #118774 backport") - task.skipTest("search.vectors/140_knn_query_with_other_queries/Function score query with knn query", "waiting for #118774 backport") - task.skipTest("search.vectors/130_knn_query_nested_search/nested kNN search inner_hits size > 1", "waiting for #118774 backport") - task.skipTest("search.vectors/110_knn_query_with_filter/PRE_FILTER: pre-filter across multiple aliases", "waiting for #118774 backport") - task.skipTest("search.vectors/160_knn_query_missing_params/kNN search in a dis_max query - missing num_candidates", "waiting for #118774 backport") task.skipTest("search.highlight/30_max_analyzed_offset/Plain highlighter with max_analyzed_offset < 0 should FAIL", "semantics of test has changed") - task.skipTest("indices.create/10_basic/Create lookup index", "default auto_expand_replicas was removed") - task.skipTest("indices.create/10_basic/Create lookup index with one shard", "default auto_expand_replicas was removed") task.skipTest("range/20_synthetic_source/Double range", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("range/20_synthetic_source/Float range", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("range/20_synthetic_source/Integer range", "_source.mode mapping attribute is no-op since 9.0.0") @@ -99,17 +73,4 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("index/92_metrics_auto_subobjects/Root without subobjects with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("index/91_metrics_no_subobjects/Metrics object indexing with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("index/91_metrics_no_subobjects/Root without subobjects with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") - task.skipTest("logsdb/10_settings/routing path allowed in logs mode with routing on sort fields", "Unknown feature routing.logsb_route_on_sort_fields") - - task.skipTest("search/520_fetch_fields/fetch _seq_no via fields", "Skip until versioning between 8.x and 9.x is restored") - task.skipTest("search/90_search_after/_shard_doc sort", "Skip until versioning between 8.x and 9.x is restored") - task.skipTest("tsdb/90_unsupported_operations/search with routing", "Skip until versioning between 8.x and 9.x is restored") - task.skipTest("tsdb/90_unsupported_operations/noop update", "Skip until versioning between 8.x and 9.x is restored") - task.skipTest("tsdb/80_index_resize/split", "Skip until versioning between 8.x and 9.x is restored") - task.skipTest("tsdb/90_unsupported_operations/index with routing over _bulk", "Skip until versioning between 8.x and 9.x is restored") - task.skipTest("tsdb/90_unsupported_operations/index with routing", "Skip until versioning between 8.x and 9.x is restored") - task.skipTest("tsdb/90_unsupported_operations/update over _bulk", "Skip until versioning between 8.x and 9.x is restored") - task.skipTest("tsdb/90_unsupported_operations/regular update", "Skip until versioning between 8.x and 9.x is restored") - task.skipTest("tsdb/25_id_generation/delete over _bulk", "Skip until versioning between 8.x and 9.x is restored") - task.skipTest("tsdb/20_mapping/exact match object type", "Skip until versioning between 8.x and 9.x is restored") }) From d97b736662cd103ef5163f8dcf854d8aed5b48a2 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 30 Jan 2025 09:34:41 -0800 Subject: [PATCH 296/383] Preparation for 9.0.0-beta1 release --- .buildkite/scripts/dra-workflow.trigger.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.buildkite/scripts/dra-workflow.trigger.sh b/.buildkite/scripts/dra-workflow.trigger.sh index 43c4b42ecf006..71998b06bd3d2 100755 --- a/.buildkite/scripts/dra-workflow.trigger.sh +++ b/.buildkite/scripts/dra-workflow.trigger.sh @@ -7,8 +7,8 @@ echo "steps:" source .buildkite/scripts/branches.sh for BRANCH in "${BRANCHES[@]}"; do - if [[ "$BRANCH" == "main" ]]; then - export VERSION_QUALIFIER="alpha1" + if [[ "$BRANCH" == "9.0" ]]; then + export VERSION_QUALIFIER="beta1" fi INTAKE_PIPELINE_SLUG="elasticsearch-intake" From 9ad3f680d91d0510af53d78e42699197aea5a512 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 04:36:05 +1100 Subject: [PATCH 297/383] Mute org.elasticsearch.env.NodeEnvironmentTests testGetBestDowngradeVersion #121316 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index d093c905bde51..b9f018cdfa54a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -370,6 +370,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/121293 - class: org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculatorTests issue: https://github.com/elastic/elasticsearch/issues/121294 +- class: org.elasticsearch.env.NodeEnvironmentTests + method: testGetBestDowngradeVersion + issue: https://github.com/elastic/elasticsearch/issues/121316 # Examples: # From b381a1ddfcb70eb50b1859f130cdd580eaf33043 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Thu, 30 Jan 2025 14:55:32 -0500 Subject: [PATCH 298/383] [ES|QL] Fix MapExpression and named parameter related tests in StatementParserTests (#121075) * fix StatementParserTests.testNamedFunctionArgumentInMap, testNamedFunctionArgumentWithUnsupportedNamedParameterTypes * separate snapshot test from release test --------- Co-authored-by: Elastic Machine --- muted-tests.yml | 3 -- .../esql/parser/StatementParserTests.java | 33 ++++++++++++++++--- 2 files changed, 29 insertions(+), 7 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index b9f018cdfa54a..ad2e56501082d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -267,9 +267,6 @@ tests: - class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT method: test {p0=data_stream/140_data_stream_aliases/Create data stream alias with filter} issue: https://github.com/elastic/elasticsearch/issues/121014 -- class: org.elasticsearch.xpack.esql.parser.StatementParserTests - method: testNamedFunctionArgumentInMap - issue: https://github.com/elastic/elasticsearch/issues/121020 - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testSuggestProfilesWithName issue: https://github.com/elastic/elasticsearch/issues/121022 diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 9bbada3cca53b..1e1629e6f993b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -2466,8 +2466,29 @@ by fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"opt assertEquals(List.of(referenceAttribute("bar", KEYWORD)), dissect.extractedFields()); UnresolvedRelation ur = as(dissect.child(), UnresolvedRelation.class); assertEquals(ur, relation("test")); + } + public void testNamedFunctionArgumentInMapWithNamedParameters() { // map entry values provided in named parameter, arrays are not supported by named parameters yet + assumeTrue( + "named parameters for identifiers and patterns require snapshot build", + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() + ); + LinkedHashMap expectedMap1 = new LinkedHashMap<>(4); + expectedMap1.put("option1", "string"); + expectedMap1.put("option2", 1); + expectedMap1.put("option3", List.of(2.0, 3.0, 4.0)); + expectedMap1.put("option4", List.of(true, false)); + LinkedHashMap expectedMap2 = new LinkedHashMap<>(4); + expectedMap2.put("option1", List.of("string1", "string2")); + expectedMap2.put("option2", List.of(1, 2, 3)); + expectedMap2.put("option3", 2.0); + expectedMap2.put("option4", true); + LinkedHashMap expectedMap3 = new LinkedHashMap<>(4); + expectedMap3.put("option1", "string"); + expectedMap3.put("option2", 2.0); + expectedMap3.put("option3", List.of(1, 2, 3)); + expectedMap3.put("option4", List.of(true, false)); assertEquals( new Filter( EMPTY, @@ -2565,7 +2586,7 @@ by fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"opt ) ); - plan = statement( + LogicalPlan plan = statement( """ from test | dissect ?fn1(?n1, ?n2, {"option1":?n3,"option2":?n4,"option3":[2.0,3.0,4.0],"option4":[true,false]}) "%{bar}" @@ -2585,16 +2606,16 @@ by fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"opt ) ) ); - grok = as(plan, Grok.class); + Grok grok = as(plan, Grok.class); assertEquals(function("fn2", List.of(attribute("f3"), mapExpression(expectedMap2))), grok.input()); assertEquals("%{WORD:foo}", grok.parser().pattern()); assertEquals(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields()); - dissect = as(grok.child(), Dissect.class); + Dissect dissect = as(grok.child(), Dissect.class); assertEquals(function("fn1", List.of(attribute("f1"), attribute("f2"), mapExpression(expectedMap1))), dissect.input()); assertEquals("%{bar}", dissect.parser().pattern()); assertEquals("", dissect.parser().appendSeparator()); assertEquals(List.of(referenceAttribute("bar", KEYWORD)), dissect.extractedFields()); - ur = as(dissect.child(), UnresolvedRelation.class); + UnresolvedRelation ur = as(dissect.child(), UnresolvedRelation.class); assertEquals(ur, relation("test")); } @@ -2860,6 +2881,10 @@ public void testNamedFunctionArgumentInInvalidPositions() { } public void testNamedFunctionArgumentWithUnsupportedNamedParameterTypes() { + assumeTrue( + "named parameters for identifiers and patterns require snapshot build", + EsqlCapabilities.Cap.NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES_SIMPLIFIED_SYNTAX.isEnabled() + ); Map commands = Map.ofEntries( Map.entry("eval x = {}", "29"), Map.entry("where {}", "26"), From 385a4a60d9345fb9cabc658ff750a932c3fb3a24 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 30 Jan 2025 11:59:01 -0800 Subject: [PATCH 299/383] Ignore main branch in staging DRA jobs --- .buildkite/scripts/dra-workflow.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/scripts/dra-workflow.sh b/.buildkite/scripts/dra-workflow.sh index d6bc3063fab75..aa3e871b6dc18 100755 --- a/.buildkite/scripts/dra-workflow.sh +++ b/.buildkite/scripts/dra-workflow.sh @@ -6,7 +6,7 @@ WORKFLOW="${DRA_WORKFLOW:-snapshot}" BRANCH="${BUILDKITE_BRANCH:-}" # Don't publish main branch to staging -if [[ "$BRANCH" == *.x && "$WORKFLOW" == "staging" ]]; then +if [[ ("$BRANCH" == "main" || "$BRANCH" == *.x) && "$WORKFLOW" == "staging" ]]; then exit 0 fi From b3a23df83ffdca5a0cef17412efb561a69aec0dc Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 08:29:51 +1100 Subject: [PATCH 300/383] Mute org.elasticsearch.index.engine.ShuffleForcedMergePolicyTests testDiagnostics #121336 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index ad2e56501082d..c237820cbb69a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -370,6 +370,9 @@ tests: - class: org.elasticsearch.env.NodeEnvironmentTests method: testGetBestDowngradeVersion issue: https://github.com/elastic/elasticsearch/issues/121316 +- class: org.elasticsearch.index.engine.ShuffleForcedMergePolicyTests + method: testDiagnostics + issue: https://github.com/elastic/elasticsearch/issues/121336 # Examples: # From 6553f0ffcc8fba7d8b7f646ddfd35a2744d93443 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 08:39:48 +1100 Subject: [PATCH 301/383] Mute org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT test {yaml=reference/rest-api/security/invalidate-tokens/line_194} #121337 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c237820cbb69a..f8e77e9657119 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -373,6 +373,9 @@ tests: - class: org.elasticsearch.index.engine.ShuffleForcedMergePolicyTests method: testDiagnostics issue: https://github.com/elastic/elasticsearch/issues/121336 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/rest-api/security/invalidate-tokens/line_194} + issue: https://github.com/elastic/elasticsearch/issues/121337 # Examples: # From b66035998f0aedf56fbb9156398c856663a15631 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 08:40:48 +1100 Subject: [PATCH 302/383] Mute org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT test {yaml=reference/rest-api/common-options/line_125} #121338 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f8e77e9657119..e8ef5992d3e95 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -376,6 +376,9 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/rest-api/security/invalidate-tokens/line_194} issue: https://github.com/elastic/elasticsearch/issues/121337 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/rest-api/common-options/line_125} + issue: https://github.com/elastic/elasticsearch/issues/121338 # Examples: # From 6cf38353c87fe7f48ae8dc3d85b0cae496d631d3 Mon Sep 17 00:00:00 2001 From: Larisa Motova Date: Thu, 30 Jan 2025 11:53:56 -1000 Subject: [PATCH 303/383] [ES|QL] Add aggregate metric double feature flag to its capability (#121318) AggregateMetricDouble should be behind a feature flag but on release builds it was getting added when it should not have been. This commit addresses that bug. --- .../org/elasticsearch/xpack/esql/action/EsqlCapabilities.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 20de3e443107d..25518220e308b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -20,6 +20,8 @@ import java.util.Locale; import java.util.Set; +import static org.elasticsearch.xpack.esql.core.plugin.EsqlCorePlugin.AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG; + /** * A {@link Set} of "capabilities" supported by the {@link RestEsqlQueryAction} * and {@link RestEsqlAsyncQueryAction} APIs. These are exposed over the @@ -784,7 +786,7 @@ public enum Cap { /** * Support for aggregate_metric_double type */ - AGGREGATE_METRIC_DOUBLE; + AGGREGATE_METRIC_DOUBLE(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG.isEnabled()); private final boolean enabled; From 1f95d2178a271b6505f4564938eab64d6e2164df Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 09:03:07 +1100 Subject: [PATCH 304/383] Mute org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_751} #121345 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index e8ef5992d3e95..7e6b30aba38b8 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -379,6 +379,9 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/rest-api/common-options/line_125} issue: https://github.com/elastic/elasticsearch/issues/121338 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_751} + issue: https://github.com/elastic/elasticsearch/issues/121345 # Examples: # From eeb745cfa2f34d61c48ccd57db8d832315344cab Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Thu, 30 Jan 2025 16:09:04 -0600 Subject: [PATCH 305/383] ReindexDataStreamIndex bug in assertion caused by reference equality (#121325) Assertion was using reference equality on two boxed longs. So assertion could produce false positives. Change to Objects.equals to check value and avoid null check. --- docs/changelog/121325.yaml | 5 +++++ .../action/ReindexDataStreamIndexTransportAction.java | 3 ++- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/121325.yaml diff --git a/docs/changelog/121325.yaml b/docs/changelog/121325.yaml new file mode 100644 index 0000000000000..9a9edc67d19fa --- /dev/null +++ b/docs/changelog/121325.yaml @@ -0,0 +1,5 @@ +pr: 121325 +summary: '`ReindexDataStreamIndex` bug in assertion caused by reference equality' +area: Data streams +type: bug +issues: [] diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java index 8c12011ca4bb1..e8110886cbd5a 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java @@ -54,6 +54,7 @@ import java.util.Locale; import java.util.Map; +import java.util.Objects; import static org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock.WRITE; @@ -372,7 +373,7 @@ private void sanityCheck( listener.delegateFailureAndWrap((delegate, ignored) -> { getIndexDocCount(sourceIndexName, parentTaskId, delegate.delegateFailureAndWrap((delegate1, sourceCount) -> { getIndexDocCount(destIndexName, parentTaskId, delegate1.delegateFailureAndWrap((delegate2, destCount) -> { - assert sourceCount == destCount + assert Objects.equals(sourceCount, destCount) : String.format( Locale.ROOT, "source index [%s] has %d docs and dest [%s] has %d docs", From c8aa582e7c2136a00635b33a59e736f23a751ba8 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 09:14:26 +1100 Subject: [PATCH 306/383] Mute org.elasticsearch.xpack.security.profile.ProfileIntegTests testHasPrivileges #121346 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 7e6b30aba38b8..7ffd4e477b419 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -382,6 +382,9 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_751} issue: https://github.com/elastic/elasticsearch/issues/121345 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testHasPrivileges + issue: https://github.com/elastic/elasticsearch/issues/121346 # Examples: # From 884b61e1cca7593cc2e7e83d23455c8a3d35ce5d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 09:14:40 +1100 Subject: [PATCH 307/383] Mute org.elasticsearch.xpack.security.profile.ProfileIntegTests testActivateProfile #121151 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 7ffd4e477b419..1cc851e2b5855 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -385,6 +385,9 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testHasPrivileges issue: https://github.com/elastic/elasticsearch/issues/121346 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testActivateProfile + issue: https://github.com/elastic/elasticsearch/issues/121151 # Examples: # From 0d2db063ec4d8e95941e3b92dc150e294e5f0689 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 30 Jan 2025 23:17:47 +0100 Subject: [PATCH 308/383] Update ESRestTestCase's ROLLUP_REQUESTS_OPTIONS (#121335) Sometimes there are multiple warning. --- .../org/elasticsearch/test/rest/ESRestTestCase.java | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 5738ab60f47eb..40b2bae2fc4b1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -169,14 +169,16 @@ public abstract class ESRestTestCase extends ESTestCase { private static final String EXPECTED_ROLLUP_WARNING_MESSAGE = "The rollup functionality will be removed in Elasticsearch 10.0. See docs for more information."; public static final RequestOptions.Builder ROLLUP_REQUESTS_OPTIONS = RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> { - // Either no warning, because of bwc integration test OR - // the expected warning, because on current version if (warnings.isEmpty()) { return false; - } else if (warnings.size() == 1 && EXPECTED_ROLLUP_WARNING_MESSAGE.equals(warnings.get(0))) { - return false; } else { - return true; + // Sometimes multiple rollup deprecation warnings. Transport actions can be invoked multiple time on different nodes. + for (String warning : warnings) { + if (EXPECTED_ROLLUP_WARNING_MESSAGE.equals(warning) == false) { + return true; + } + } + return false; } }); From 1c1f1950185abb7529b4f812864403c7887c2b50 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 31 Jan 2025 10:20:16 +1100 Subject: [PATCH 309/383] Mute org.elasticsearch.smoketest.SmokeTestMultiNodeClientYamlTestSuiteIT test {yaml=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only} #121350 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1cc851e2b5855..f6413da66de9d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -388,6 +388,9 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testActivateProfile issue: https://github.com/elastic/elasticsearch/issues/121151 +- class: org.elasticsearch.smoketest.SmokeTestMultiNodeClientYamlTestSuiteIT + method: test {yaml=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only} + issue: https://github.com/elastic/elasticsearch/issues/121350 # Examples: # From 1225b0720aba89247c14727bf2e8e29a1af96cd0 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Thu, 30 Jan 2025 15:26:58 -0800 Subject: [PATCH 310/383] Fix propagation of dynamic mapping parameter when applying copy_to (#121109) --- docs/changelog/121109.yaml | 6 +++ rest-api-spec/build.gradle | 1 + .../rest-api-spec/test/mapping/10_copy_to.yml | 41 +++++++++++++++++++ .../index/mapper/DocumentParserContext.java | 8 ++++ .../index/mapper/MapperFeatures.java | 4 +- .../DefaultMappingParametersHandler.java | 7 +--- 6 files changed, 60 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/121109.yaml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mapping/10_copy_to.yml diff --git a/docs/changelog/121109.yaml b/docs/changelog/121109.yaml new file mode 100644 index 0000000000000..6492eccbf975a --- /dev/null +++ b/docs/changelog/121109.yaml @@ -0,0 +1,6 @@ +pr: 121109 +summary: Fix propagation of dynamic mapping parameter when applying `copy_to` +area: Mapping +type: bug +issues: + - 113049 diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 0069e34fe949e..67f0b5a0714a9 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -73,4 +73,5 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("index/92_metrics_auto_subobjects/Root without subobjects with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("index/91_metrics_no_subobjects/Metrics object indexing with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("index/91_metrics_no_subobjects/Root without subobjects with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("indices.create/20_synthetic_source/synthetic_source with copy_to inside nested object", "temporary until backported") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mapping/10_copy_to.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mapping/10_copy_to.yml new file mode 100644 index 0000000000000..58f09ec71ad61 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mapping/10_copy_to.yml @@ -0,0 +1,41 @@ +--- +copy_to from object with dynamic strict to dynamic field: + - requires: + cluster_features: ["mapper.copy_to.dynamic_handling"] + reason: requires a fix + + - do: + indices.create: + index: test + body: + mappings: + properties: + one: + dynamic: strict + properties: + k: + type: keyword + copy_to: two.k + + - do: + index: + index: test + id: 1 + refresh: true + body: + one: + k: "hey" + + - do: + search: + index: test + body: + docvalue_fields: [ "two.k.keyword" ] + + - match: + hits.hits.0._source: + one: + k: "hey" + - match: + hits.hits.0.fields: + two.k.keyword: [ "hey" ] diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index ba9e902fee5d9..127ec05b25e63 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -50,6 +50,13 @@ private Wrapper(ObjectMapper parent, DocumentParserContext in) { this.in = in; } + // Used to create a copy_to context. + // It is important to reset `dynamic` here since it is possible that we copy into a completely different object. + private Wrapper(RootObjectMapper root, DocumentParserContext in) { + super(root, ObjectMapper.Dynamic.getRootDynamic(in.mappingLookup()), in); + this.in = in; + } + @Override public Iterable nonRootDocuments() { return in.nonRootDocuments(); @@ -711,6 +718,7 @@ in synthetic _source (to be consistent with stored _source). ContentPath path = new ContentPath(); XContentParser parser = DotExpandingXContentParser.expandDots(new CopyToParser(copyToField, parser()), path); + return new Wrapper(root(), this) { @Override public ContentPath path() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 0935e219fb5c0..7567fae7d73e6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -30,6 +30,7 @@ public class MapperFeatures implements FeatureSpecification { public static final NodeFeature META_FETCH_FIELDS_ERROR_CODE_CHANGED = new NodeFeature("meta_fetch_fields_error_code_changed"); public static final NodeFeature SPARSE_VECTOR_STORE_SUPPORT = new NodeFeature("mapper.sparse_vector.store_support"); public static final NodeFeature SORT_FIELDS_CHECK_FOR_NESTED_OBJECT_FIX = new NodeFeature("mapper.nested.sorting_fields_check_fix"); + public static final NodeFeature DYNAMIC_HANDLING_IN_COPY_TO = new NodeFeature("mapper.copy_to.dynamic_handling"); @Override public Set getTestFeatures() { @@ -45,8 +46,9 @@ public Set getTestFeatures() { CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX, META_FETCH_FIELDS_ERROR_CODE_CHANGED, SPARSE_VECTOR_STORE_SUPPORT, - SORT_FIELDS_CHECK_FOR_NESTED_OBJECT_FIX, COUNTED_KEYWORD_SYNTHETIC_SOURCE_NATIVE_SUPPORT, + SORT_FIELDS_CHECK_FOR_NESTED_OBJECT_FIX, + DYNAMIC_HANDLING_IN_COPY_TO, SourceFieldMapper.SYNTHETIC_RECOVERY_SOURCE, ObjectMapper.SUBOBJECTS_FALSE_MAPPING_UPDATE_FIX ); diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java index b639108ea6ad2..04cb9467270d4 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java @@ -11,7 +11,6 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.ObjectMapper; -import org.elasticsearch.logsdb.datageneration.fields.DynamicMapping; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -50,11 +49,7 @@ private Supplier> keywordMapping( // We only add copy_to to keywords because we get into trouble with numeric fields that are copied to dynamic fields. // If first copied value is numeric, dynamic field is created with numeric field type and then copy of text values fail. // Actual value being copied does not influence the core logic of copy_to anyway. - // - // TODO - // We don't use copy_to on fields that are inside an object with dynamic: strict - // because we'll hit https://github.com/elastic/elasticsearch/issues/113049. - if (request.dynamicMapping() != DynamicMapping.FORBIDDEN && ESTestCase.randomDouble() <= 0.05) { + if (ESTestCase.randomDouble() <= 0.05) { var options = request.eligibleCopyToFields() .stream() .filter(f -> f.equals(request.fieldName()) == false) From 09b1c6d912d8e4777758f577a131ea09c968d782 Mon Sep 17 00:00:00 2001 From: Saikat Sarkar <132922331+saikatsarkar056@users.noreply.github.com> Date: Thu, 30 Jan 2025 21:40:04 -0700 Subject: [PATCH 311/383] Integrate watsonx for re-ranking task (#117176) * Integrate watsonx reranking to inference api * Add api_version to the watsonx api call * Fix the return_doc option * Add top_n parameter to task_settings * Add truncate_input_tokens parameter to task_settings * Add test for IbmWatonxRankedResponseEntity * Add test for IbmWatonxRankedRequestEntity * Add test for IbmWatonxRankedRequest * [CI] Auto commit changes from spotless * Add changelog * Fix transport version * Add test for IbmWatsonxService * Remove canHandleStreamingResponses * Add requireNonNull for modelId and projectId * Remove maxInputToken method * Convert all optionals to required * [CI] Auto commit changes from spotless * Set minimal_supported version to be ML_INFERENCE_IBM_WATSONX_RERANK_ADDED * Remove extraction of unused fields from IbmWatsonxRerankServiceSettings * Add space * Add space --------- Co-authored-by: elasticsearchmachine --- docs/changelog/117176.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../InferenceNamedWriteablesProvider.java | 13 ++ .../ibmwatsonx/IbmWatsonxActionCreator.java | 14 +- .../ibmwatsonx/IbmWatsonxActionVisitor.java | 3 + .../IbmWatsonxRerankRequestManager.java | 72 +++++++ .../ibmwatsonx/IbmWatsonxResponseHandler.java | 1 - .../ibmwatsonx/IbmWatsonxRerankRequest.java | 110 ++++++++++ .../IbmWatsonxRerankRequestEntity.java | 77 +++++++ .../request/ibmwatsonx/IbmWatsonxUtils.java | 1 + .../IbmWatsonxRankedResponseEntity.java | 157 ++++++++++++++ .../services/ibmwatsonx/IbmWatsonxModel.java | 7 + .../ibmwatsonx/IbmWatsonxService.java | 10 + .../rerank/IbmWatsonxRerankModel.java | 121 +++++++++++ .../IbmWatsonxRerankServiceSettings.java | 190 +++++++++++++++++ .../rerank/IbmWatsonxRerankTaskSettings.java | 192 ++++++++++++++++++ .../IbmWatsonxRerankRequestEntityTests.java | 60 ++++++ .../rerank/IbmWatsonxRerankRequestTests.java | 107 ++++++++++ .../IbmWatsonxRankedResponseEntityTests.java | 166 +++++++++++++++ .../ibmwatsonx/IbmWatsonxServiceTests.java | 37 ++++ .../rerank/IbmWatsonxRerankModelTests.java | 28 +++ 21 files changed, 1370 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/117176.yaml create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/IbmWatsonxRerankRequestManager.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequest.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankTaskSettings.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModelTests.java diff --git a/docs/changelog/117176.yaml b/docs/changelog/117176.yaml new file mode 100644 index 0000000000000..26e0d3635bc9e --- /dev/null +++ b/docs/changelog/117176.yaml @@ -0,0 +1,5 @@ +pr: 117176 +summary: Integrate IBM watsonx to Inference API for re-ranking task +area: Experiences +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 8f747a59ae5e0..1144f94795713 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -171,6 +171,7 @@ static TransportVersion def(int id) { public static final TransportVersion LINEAR_RETRIEVER_SUPPORT = def(8_837_00_0); public static final TransportVersion TIMEOUT_GET_PARAM_FOR_RESOLVE_CLUSTER = def(8_838_00_0); public static final TransportVersion INFERENCE_REQUEST_ADAPTIVE_RATE_LIMITING = def(8_839_00_0); + public static final TransportVersion ML_INFERENCE_IBM_WATSONX_RERANK_ADDED = def(8_840_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 6fc9870034018..e8dc763116707 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -75,6 +75,8 @@ import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserServiceSettings; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankServiceSettings; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankTaskSettings; import org.elasticsearch.xpack.inference.services.jinaai.JinaAIServiceSettings; import org.elasticsearch.xpack.inference.services.jinaai.embeddings.JinaAIEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.jinaai.embeddings.JinaAIEmbeddingsTaskSettings; @@ -364,6 +366,17 @@ private static void addIbmWatsonxNamedWritables(List namedWriteables) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionCreator.java index 7cad7c42bdcf1..6b1097256e97f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionCreator.java @@ -12,9 +12,11 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.SenderExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.IbmWatsonxEmbeddingsRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.IbmWatsonxRerankRequestManager; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; import java.util.Map; import java.util.Objects; @@ -22,7 +24,6 @@ import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; public class IbmWatsonxActionCreator implements IbmWatsonxActionVisitor { - private final Sender sender; private final ServiceComponents serviceComponents; @@ -41,6 +42,17 @@ public ExecutableAction create(IbmWatsonxEmbeddingsModel model, Map taskSettings) { + var overriddenModel = IbmWatsonxRerankModel.of(model, taskSettings); + var requestCreator = IbmWatsonxRerankRequestManager.of(overriddenModel, serviceComponents.threadPool()); + var failedToSendRequestErrorMessage = constructFailedToSendRequestMessage( + overriddenModel.getServiceSettings().uri(), + "Ibm Watsonx rerank" + ); + return new SenderExecutableAction(sender, requestCreator, failedToSendRequestErrorMessage); + } + protected IbmWatsonxEmbeddingsRequestManager getEmbeddingsRequestManager( IbmWatsonxEmbeddingsModel model, Truncator truncator, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionVisitor.java index 0a13ec2fb4645..474533040e0c3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionVisitor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ibmwatsonx/IbmWatsonxActionVisitor.java @@ -9,9 +9,12 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; import java.util.Map; public interface IbmWatsonxActionVisitor { ExecutableAction create(IbmWatsonxEmbeddingsModel model, Map taskSettings); + + ExecutableAction create(IbmWatsonxRerankModel model, Map taskSettings); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/IbmWatsonxRerankRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/IbmWatsonxRerankRequestManager.java new file mode 100644 index 0000000000000..f503771510e72 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/IbmWatsonxRerankRequestManager.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.ibmwatsonx.IbmWatsonxResponseHandler; +import org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxRerankRequest; +import org.elasticsearch.xpack.inference.external.response.ibmwatsonx.IbmWatsonxRankedResponseEntity; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; + +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +public class IbmWatsonxRerankRequestManager extends IbmWatsonxRequestManager { + private static final Logger logger = LogManager.getLogger(IbmWatsonxRerankRequestManager.class); + private static final ResponseHandler HANDLER = createIbmWatsonxResponseHandler(); + + private static ResponseHandler createIbmWatsonxResponseHandler() { + return new IbmWatsonxResponseHandler( + "ibm watsonx rerank", + (request, response) -> IbmWatsonxRankedResponseEntity.fromResponse(response) + ); + } + + public static IbmWatsonxRerankRequestManager of(IbmWatsonxRerankModel model, ThreadPool threadPool) { + return new IbmWatsonxRerankRequestManager(Objects.requireNonNull(model), Objects.requireNonNull(threadPool)); + } + + private final IbmWatsonxRerankModel model; + + public IbmWatsonxRerankRequestManager(IbmWatsonxRerankModel model, ThreadPool threadPool) { + super(threadPool, model); + this.model = model; + } + + @Override + public void execute( + InferenceInputs inferenceInputs, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + ActionListener listener + ) { + var rerankInput = QueryAndDocsInputs.of(inferenceInputs); + + execute( + new ExecutableInferenceRequest( + requestSender, + logger, + getRerankRequest(rerankInput.getQuery(), rerankInput.getChunks(), model), + HANDLER, + hasRequestCompletedFunction, + listener + ) + ); + } + + protected IbmWatsonxRerankRequest getRerankRequest(String query, List chunks, IbmWatsonxRerankModel model) { + return new IbmWatsonxRerankRequest(query, chunks, model); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java index 6d1d3fb2a4f91..1f28a8cd61026 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java @@ -17,7 +17,6 @@ import static org.elasticsearch.core.Strings.format; public class IbmWatsonxResponseHandler extends BaseResponseHandler { - public IbmWatsonxResponseHandler(String requestType, ResponseParser parseFunction) { super(requestType, parseFunction, IbmWatsonxErrorResponseEntity::fromResponse); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequest.java new file mode 100644 index 0000000000000..cfc1f367be45c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequest.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.ibmwatsonx; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ByteArrayEntity; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankTaskSettings; + +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +public class IbmWatsonxRerankRequest implements IbmWatsonxRequest { + + private final String query; + private final List input; + private final IbmWatsonxRerankTaskSettings taskSettings; + private final IbmWatsonxRerankModel model; + + public IbmWatsonxRerankRequest(String query, List input, IbmWatsonxRerankModel model) { + Objects.requireNonNull(model); + + this.input = Objects.requireNonNull(input); + this.query = Objects.requireNonNull(query); + taskSettings = model.getTaskSettings(); + this.model = model; + } + + @Override + public HttpRequest createHttpRequest() { + URI uri; + + try { + uri = new URI(model.uri().toString()); + } catch (URISyntaxException ex) { + throw new IllegalArgumentException("cannot parse URI patter"); + } + + HttpPost httpPost = new HttpPost(uri); + + ByteArrayEntity byteEntity = new ByteArrayEntity( + Strings.toString( + new IbmWatsonxRerankRequestEntity( + query, + input, + taskSettings, + model.getServiceSettings().modelId(), + model.getServiceSettings().projectId() + ) + ).getBytes(StandardCharsets.UTF_8) + ); + + httpPost.setEntity(byteEntity); + httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType()); + + decorateWithAuth(httpPost); + + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + public void decorateWithAuth(HttpPost httpPost) { + IbmWatsonxRequest.decorateWithBearerToken(httpPost, model.getSecretSettings(), model.getInferenceEntityId()); + } + + @Override + public String getInferenceEntityId() { + return model.getInferenceEntityId(); + } + + @Override + public URI getURI() { + return model.uri(); + } + + @Override + public Request truncate() { + return this; + } + + public String getQuery() { + return query; + } + + public List getInput() { + return input; + } + + public IbmWatsonxRerankModel getModel() { + return model; + } + + @Override + public boolean[] getTruncationInfo() { + return null; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequestEntity.java new file mode 100644 index 0000000000000..36e5951ebdc15 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRerankRequestEntity.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.ibmwatsonx; + +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankTaskSettings; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record IbmWatsonxRerankRequestEntity( + String query, + List inputs, + IbmWatsonxRerankTaskSettings taskSettings, + String modelId, + String projectId +) implements ToXContentObject { + + private static final String INPUTS_FIELD = "inputs"; + private static final String QUERY_FIELD = "query"; + private static final String MODEL_ID_FIELD = "model_id"; + private static final String PROJECT_ID_FIELD = "project_id"; + + public IbmWatsonxRerankRequestEntity { + Objects.requireNonNull(query); + Objects.requireNonNull(inputs); + Objects.requireNonNull(modelId); + Objects.requireNonNull(projectId); + Objects.requireNonNull(taskSettings); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(MODEL_ID_FIELD, modelId); + builder.field(QUERY_FIELD, query); + builder.startArray(INPUTS_FIELD); + for (String input : inputs) { + builder.startObject(); + builder.field("text", input); + builder.endObject(); + } + builder.endArray(); + builder.field(PROJECT_ID_FIELD, projectId); + + builder.startObject("parameters"); + { + if (taskSettings.getTruncateInputTokens() != null) { + builder.field("truncate_input_tokens", taskSettings.getTruncateInputTokens()); + } + + builder.startObject("return_options"); + { + if (taskSettings.getDoesReturnDocuments() != null) { + builder.field("inputs", taskSettings.getDoesReturnDocuments()); + } + if (taskSettings.getTopNDocumentsOnly() != null) { + builder.field("top_n", taskSettings.getTopNDocumentsOnly()); + } + } + builder.endObject(); + } + builder.endObject(); + + builder.endObject(); + + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxUtils.java index a506a33385dfb..91679288e5ae3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxUtils.java @@ -13,6 +13,7 @@ public class IbmWatsonxUtils { public static final String V1 = "v1"; public static final String TEXT = "text"; public static final String EMBEDDINGS = "embeddings"; + public static final String RERANKS = "reranks"; private IbmWatsonxUtils() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntity.java new file mode 100644 index 0000000000000..05f369bd8961e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntity.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file was contributed to by a generative AI + */ + +package org.elasticsearch.xpack.inference.external.response.ibmwatsonx; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseList; +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; + +public class IbmWatsonxRankedResponseEntity { + + private static final Logger logger = LogManager.getLogger(IbmWatsonxRankedResponseEntity.class); + + /** + * Parses the Ibm Watsonx ranked response. + * + * For a request like: + * "model": "rerank-english-v2.0", + * "query": "database", + * "return_documents": true, + * "top_n": 3, + * "input": ["greenland", "google","john", "mysql","potter", "grammar"] + *

+ * The response will look like (without whitespace): + * { + * "rerank": [ + * { + * "index": 3, + * "relevance_score": 0.7989932 + * }, + * { + * "index": 5, + * "relevance_score": 0.61281824 + * }, + * { + * "index": 1, + * "relevance_score": 0.5762553 + * }, + * { + * "index": 4, + * "relevance_score": 0.47395563 + * }, + * { + * "index": 0, + * "relevance_score": 0.4338926 + * }, + * { + * "index": 2, + * "relevance_score": 0.42638257 + * } + * ], + * } + * + * @param response the http response from ibm watsonx + * @return the parsed response + * @throws IOException if there is an error parsing the response + */ + public static InferenceServiceResults fromResponse(HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "results", FAILED_TO_FIND_FIELD_TEMPLATE); // TODO error message + + token = jsonParser.currentToken(); + if (token == XContentParser.Token.START_ARRAY) { + return new RankedDocsResults(parseList(jsonParser, IbmWatsonxRankedResponseEntity::parseRankedDocObject)); + } else { + throwUnknownToken(token, jsonParser); + } + + // This should never be reached. The above code should either return successfully or hit the throwUnknownToken + // or throw a parsing exception + throw new IllegalStateException("Reached an invalid state while parsing the Watsonx response"); + } + } + + private static RankedDocsResults.RankedDoc parseRankedDocObject(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + int index = -1; + float score = -1; + String documentText = null; + parser.nextToken(); + while (parser.currentToken() != XContentParser.Token.END_OBJECT) { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + switch (parser.currentName()) { + case "index": + parser.nextToken(); // move to VALUE_NUMBER + index = parser.intValue(); + parser.nextToken(); // move to next FIELD_NAME or END_OBJECT + break; + case "score": + parser.nextToken(); // move to VALUE_NUMBER + score = parser.floatValue(); + parser.nextToken(); // move to next FIELD_NAME or END_OBJECT + break; + case "input": + parser.nextToken(); // move to START_OBJECT; document text is wrapped in an object + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + do { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME && parser.currentName().equals("text")) { + parser.nextToken(); // move to VALUE_STRING + documentText = parser.text(); + } + } while (parser.nextToken() != XContentParser.Token.END_OBJECT); + parser.nextToken();// move past END_OBJECT + // parser should now be at the next FIELD_NAME or END_OBJECT + break; + default: + throwUnknownField(parser.currentName(), parser); + } + } else { + parser.nextToken(); + } + } + + if (index == -1) { + logger.warn("Failed to find required field [index] in Watsonx rerank response"); + } + if (score == -1) { + logger.warn("Failed to find required field [relevance_score] in Watsonx rerank response"); + } + // documentText may or may not be present depending on the request parameter + + return new RankedDocsResults.RankedDoc(index, score, documentText); + } + + private IbmWatsonxRankedResponseEntity() {} + + static String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Watsonx rerank response"; +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxModel.java index 4f0b425cdaa51..09706f70e3684 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxModel.java @@ -12,6 +12,7 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.ibmwatsonx.IbmWatsonxActionVisitor; @@ -38,6 +39,12 @@ public IbmWatsonxModel(IbmWatsonxModel model, ServiceSettings serviceSettings) { rateLimitServiceSettings = model.rateLimitServiceSettings(); } + public IbmWatsonxModel(IbmWatsonxModel model, TaskSettings taskSettings) { + super(model, taskSettings); + + rateLimitServiceSettings = model.rateLimitServiceSettings(); + } + public abstract ExecutableAction accept(IbmWatsonxActionVisitor creator, Map taskSettings, InputType inputType); public IbmWatsonxRateLimitServiceSettings rateLimitServiceSettings() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java index 477225f00d22b..3fa423c2dae19 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java @@ -41,6 +41,7 @@ import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import java.util.EnumSet; @@ -138,6 +139,15 @@ private static IbmWatsonxModel createModel( secretSettings, context ); + case RERANK -> new IbmWatsonxRerankModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings, + context + ); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModel.java new file mode 100644 index 0000000000000..cb4c509d88c2b --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModel.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank; + +import org.apache.http.client.utils.URIBuilder; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.ibmwatsonx.IbmWatsonxActionVisitor; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxModel; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxUtils.ML; +import static org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxUtils.RERANKS; +import static org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxUtils.TEXT; +import static org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxUtils.V1; + +public class IbmWatsonxRerankModel extends IbmWatsonxModel { + public static IbmWatsonxRerankModel of(IbmWatsonxRerankModel model, Map taskSettings) { + var requestTaskSettings = IbmWatsonxRerankTaskSettings.fromMap(taskSettings); + return new IbmWatsonxRerankModel(model, IbmWatsonxRerankTaskSettings.of(model.getTaskSettings(), requestTaskSettings)); + } + + public IbmWatsonxRerankModel( + String modelId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + @Nullable Map secrets, + ConfigurationParseContext context + ) { + this( + modelId, + taskType, + service, + IbmWatsonxRerankServiceSettings.fromMap(serviceSettings, context), + IbmWatsonxRerankTaskSettings.fromMap(taskSettings), + DefaultSecretSettings.fromMap(secrets) + ); + } + + // should only be used for testing + IbmWatsonxRerankModel( + String modelId, + TaskType taskType, + String service, + IbmWatsonxRerankServiceSettings serviceSettings, + IbmWatsonxRerankTaskSettings taskSettings, + @Nullable DefaultSecretSettings secretSettings + ) { + super( + new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secretSettings), + serviceSettings + ); + } + + private IbmWatsonxRerankModel(IbmWatsonxRerankModel model, IbmWatsonxRerankTaskSettings taskSettings) { + super(model, taskSettings); + } + + @Override + public IbmWatsonxRerankServiceSettings getServiceSettings() { + return (IbmWatsonxRerankServiceSettings) super.getServiceSettings(); + } + + @Override + public IbmWatsonxRerankTaskSettings getTaskSettings() { + return (IbmWatsonxRerankTaskSettings) super.getTaskSettings(); + } + + @Override + public DefaultSecretSettings getSecretSettings() { + return (DefaultSecretSettings) super.getSecretSettings(); + } + + public URI uri() { + URI uri; + try { + uri = buildUri(this.getServiceSettings().uri().toString(), this.getServiceSettings().apiVersion()); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + + return uri; + } + + /** + * Accepts a visitor to create an executable action. The returned action will not return documents in the response. + * @param visitor _ + * @param taskSettings _ + * @param inputType ignored for rerank task + * @return the rerank action + */ + @Override + public ExecutableAction accept(IbmWatsonxActionVisitor visitor, Map taskSettings, InputType inputType) { + return visitor.create(this, taskSettings); + } + + public static URI buildUri(String uri, String apiVersion) throws URISyntaxException { + return new URIBuilder().setScheme("https") + .setHost(uri) + .setPathSegments(ML, V1, TEXT, RERANKS) + .setParameter("version", apiVersion) + .build(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankServiceSettings.java new file mode 100644 index 0000000000000..969622f9ba54f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankServiceSettings.java @@ -0,0 +1,190 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxService; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.net.URI; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; +import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxServiceFields.API_VERSION; +import static org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxServiceFields.PROJECT_ID; + +public class IbmWatsonxRerankServiceSettings extends FilteredXContentObject implements ServiceSettings, IbmWatsonxRateLimitServiceSettings { + public static final String NAME = "ibm_watsonx_rerank_service_settings"; + + /** + * Rate limits are defined at + * Watson Machine Learning plans. + * For Lite plan, you've 120 requests per minute. + */ + private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(120); + + public static IbmWatsonxRerankServiceSettings fromMap(Map map, ConfigurationParseContext context) { + ValidationException validationException = new ValidationException(); + + String url = extractRequiredString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); + URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); + String apiVersion = extractRequiredString(map, API_VERSION, ModelConfigurations.SERVICE_SETTINGS, validationException); + + String modelId = extractRequiredString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + String projectId = extractRequiredString(map, PROJECT_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + IbmWatsonxService.NAME, + context + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new IbmWatsonxRerankServiceSettings(uri, apiVersion, modelId, projectId, rateLimitSettings); + } + + private final URI uri; + + private final String apiVersion; + + private final String modelId; + + private final String projectId; + + private final RateLimitSettings rateLimitSettings; + + public IbmWatsonxRerankServiceSettings( + URI uri, + String apiVersion, + String modelId, + String projectId, + @Nullable RateLimitSettings rateLimitSettings + ) { + this.uri = uri; + this.apiVersion = apiVersion; + this.projectId = projectId; + this.modelId = modelId; + this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS); + } + + public IbmWatsonxRerankServiceSettings(StreamInput in) throws IOException { + this.uri = createUri(in.readString()); + this.apiVersion = in.readString(); + this.modelId = in.readString(); + this.projectId = in.readString(); + this.rateLimitSettings = new RateLimitSettings(in); + + } + + public URI uri() { + return uri; + } + + public String apiVersion() { + return apiVersion; + } + + @Override + public String modelId() { + return modelId; + } + + public String projectId() { + return projectId; + } + + @Override + public RateLimitSettings rateLimitSettings() { + return rateLimitSettings; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + toXContentFragmentOfExposedFields(builder, params); + + builder.endObject(); + return builder; + } + + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + builder.field(URL, uri.toString()); + + builder.field(API_VERSION, apiVersion); + + builder.field(MODEL_ID, modelId); + + builder.field(PROJECT_ID, projectId); + + rateLimitSettings.toXContent(builder, params); + + return builder; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_IBM_WATSONX_RERANK_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(uri.toString()); + out.writeString(apiVersion); + + out.writeString(modelId); + out.writeString(projectId); + + rateLimitSettings.writeTo(out); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + IbmWatsonxRerankServiceSettings that = (IbmWatsonxRerankServiceSettings) object; + return Objects.equals(uri, that.uri) + && Objects.equals(apiVersion, that.apiVersion) + && Objects.equals(modelId, that.modelId) + && Objects.equals(projectId, that.projectId) + && Objects.equals(rateLimitSettings, that.rateLimitSettings); + } + + @Override + public int hashCode() { + return Objects.hash(uri, apiVersion, modelId, projectId, rateLimitSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankTaskSettings.java new file mode 100644 index 0000000000000..12f4b8f6fa33e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankTaskSettings.java @@ -0,0 +1,192 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalBoolean; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; + +public class IbmWatsonxRerankTaskSettings implements TaskSettings { + + public static final String NAME = "ibm_watsonx_rerank_task_settings"; + public static final String RETURN_DOCUMENTS = "return_documents"; + public static final String TOP_N_DOCS_ONLY = "top_n"; + public static final String TRUNCATE_INPUT_TOKENS = "truncate_input_tokens"; + + static final IbmWatsonxRerankTaskSettings EMPTY_SETTINGS = new IbmWatsonxRerankTaskSettings(null, null, null); + + public static IbmWatsonxRerankTaskSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + if (map == null || map.isEmpty()) { + return EMPTY_SETTINGS; + } + + Boolean returnDocuments = extractOptionalBoolean(map, RETURN_DOCUMENTS, validationException); + Integer topNDocumentsOnly = extractOptionalPositiveInteger( + map, + TOP_N_DOCS_ONLY, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + Integer truncateInputTokens = extractOptionalPositiveInteger( + map, + TRUNCATE_INPUT_TOKENS, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return of(topNDocumentsOnly, returnDocuments, truncateInputTokens); + } + + /** + * Creates a new {@link IbmWatsonxRerankTaskSettings} + * by preferring non-null fields from the request settings over the original settings. + * + * @param originalSettings the settings stored as part of the inference entity configuration + * @param requestTaskSettings the settings passed in within the task_settings field of the request + * @return a constructed {@link IbmWatsonxRerankTaskSettings} + */ + public static IbmWatsonxRerankTaskSettings of( + IbmWatsonxRerankTaskSettings originalSettings, + IbmWatsonxRerankTaskSettings requestTaskSettings + ) { + return new IbmWatsonxRerankTaskSettings( + requestTaskSettings.getTopNDocumentsOnly() != null + ? requestTaskSettings.getTopNDocumentsOnly() + : originalSettings.getTopNDocumentsOnly(), + requestTaskSettings.getReturnDocuments() != null + ? requestTaskSettings.getReturnDocuments() + : originalSettings.getReturnDocuments(), + requestTaskSettings.getTruncateInputTokens() != null + ? requestTaskSettings.getTruncateInputTokens() + : originalSettings.getTruncateInputTokens() + ); + } + + public static IbmWatsonxRerankTaskSettings of(Integer topNDocumentsOnly, Boolean returnDocuments, Integer maxChunksPerDoc) { + return new IbmWatsonxRerankTaskSettings(topNDocumentsOnly, returnDocuments, maxChunksPerDoc); + } + + private final Integer topNDocumentsOnly; + private final Boolean returnDocuments; + private final Integer truncateInputTokens; + + public IbmWatsonxRerankTaskSettings(StreamInput in) throws IOException { + this(in.readOptionalInt(), in.readOptionalBoolean(), in.readOptionalInt()); + } + + public IbmWatsonxRerankTaskSettings( + @Nullable Integer topNDocumentsOnly, + @Nullable Boolean doReturnDocuments, + @Nullable Integer truncateInputTokens + ) { + this.topNDocumentsOnly = topNDocumentsOnly; + this.returnDocuments = doReturnDocuments; + this.truncateInputTokens = truncateInputTokens; + } + + @Override + public boolean isEmpty() { + return topNDocumentsOnly == null && returnDocuments == null && truncateInputTokens == null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (topNDocumentsOnly != null) { + builder.field(TOP_N_DOCS_ONLY, topNDocumentsOnly); + } + if (returnDocuments != null) { + builder.field(RETURN_DOCUMENTS, returnDocuments); + } + if (truncateInputTokens != null) { + builder.field(TRUNCATE_INPUT_TOKENS, truncateInputTokens); + } + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_IBM_WATSONX_RERANK_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalInt(topNDocumentsOnly); + out.writeOptionalBoolean(returnDocuments); + out.writeOptionalInt(truncateInputTokens); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IbmWatsonxRerankTaskSettings that = (IbmWatsonxRerankTaskSettings) o; + return Objects.equals(returnDocuments, that.returnDocuments) + && Objects.equals(topNDocumentsOnly, that.topNDocumentsOnly) + && Objects.equals(truncateInputTokens, that.truncateInputTokens); + } + + @Override + public int hashCode() { + return Objects.hash(returnDocuments, topNDocumentsOnly, truncateInputTokens); + } + + public static String invalidInputTypeMessage(InputType inputType) { + return Strings.format("received invalid input type value [%s]", inputType.toString()); + } + + public Boolean getDoesReturnDocuments() { + return returnDocuments; + } + + public Integer getTopNDocumentsOnly() { + return topNDocumentsOnly; + } + + public Boolean getReturnDocuments() { + return returnDocuments; + } + + public Integer getTruncateInputTokens() { + return truncateInputTokens; + } + + @Override + public TaskSettings updatedTaskSettings(Map newSettings) { + IbmWatsonxRerankTaskSettings updatedSettings = IbmWatsonxRerankTaskSettings.fromMap(new HashMap<>(newSettings)); + return IbmWatsonxRerankTaskSettings.of(this, updatedSettings); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestEntityTests.java new file mode 100644 index 0000000000000..8278b76a1cee4 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestEntityTests.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.ibmwatsonx.rerank; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxRerankRequestEntity; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankTaskSettings; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.inference.MatchersUtils.equalToIgnoringWhitespaceInJsonString; + +public class IbmWatsonxRerankRequestEntityTests extends ESTestCase { + public void testXContent_Request() throws IOException { + IbmWatsonxRerankTaskSettings taskSettings = new IbmWatsonxRerankTaskSettings(5, true, 100); + var entity = new IbmWatsonxRerankRequestEntity( + "database", + List.of("greenland", "google", "john", "mysql", "potter", "grammar"), + taskSettings, + "model", + "project_id" + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString(""" + {"model_id":"model", + "query":"database", + "inputs":[ + {"text":"greenland"}, + {"text":"google"}, + {"text":"john"}, + {"text":"mysql"}, + {"text":"potter"}, + {"text":"grammar"} + ], + "project_id":"project_id", + "parameters":{ + "truncate_input_tokens":100, + "return_options":{ + "inputs":true, + "top_n":5 + } + } + } + """)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestTests.java new file mode 100644 index 0000000000000..8c95a01bc3230 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/rerank/IbmWatsonxRerankRequestTests.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.ibmwatsonx.rerank; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.core.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.ibmwatsonx.IbmWatsonxRerankRequest; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModelTests; + +import java.io.IOException; +import java.net.URI; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class IbmWatsonxRerankRequestTests extends ESTestCase { + private static final String AUTH_HEADER_VALUE = "foo"; + + public void testCreateRequest() throws IOException { + var model = "model"; + var projectId = "project_id"; + URI uri = null; + try { + uri = new URI("http://abc.com"); + } catch (Exception ignored) {} + var apiVersion = "2023-05-04"; + var apiKey = "api_key"; + var query = "database"; + List input = List.of("greenland", "google", "john", "mysql", "potter", "grammar"); + + var request = createRequest(model, projectId, uri, apiVersion, apiKey, query, input); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat(httpPost.getURI().toString(), endsWith(Strings.format("%s=%s", "version", apiVersion))); + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap, aMapWithSize(5)); + assertThat( + requestMap, + is( + + Map.of( + "project_id", + "project_id", + "model_id", + "model", + "inputs", + List.of( + Map.of("text", "greenland"), + Map.of("text", "google"), + Map.of("text", "john"), + Map.of("text", "mysql"), + Map.of("text", "potter"), + Map.of("text", "grammar") + ), + "query", + "database", + "parameters", + Map.of("return_options", Map.of("top_n", 2, "inputs", true), "truncate_input_tokens", 100) + ) + ) + ); + } + + public static IbmWatsonxRerankRequest createRequest( + String model, + String projectId, + URI uri, + String apiVersion, + String apiKey, + String query, + List input + ) { + var embeddingsModel = IbmWatsonxRerankModelTests.createModel(model, projectId, uri, apiVersion, apiKey); + + return new IbmWatsonxRerankWithoutAuthRequest(query, input, embeddingsModel); + } + + private static class IbmWatsonxRerankWithoutAuthRequest extends IbmWatsonxRerankRequest { + IbmWatsonxRerankWithoutAuthRequest(String query, List input, IbmWatsonxRerankModel model) { + super(query, input, model); + } + + @Override + public void decorateWithAuth(HttpPost httpPost) { + httpPost.setHeader(HttpHeaders.AUTHORIZATION, AUTH_HEADER_VALUE); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntityTests.java new file mode 100644 index 0000000000000..6b59f25896a48 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/ibmwatsonx/IbmWatsonxRankedResponseEntityTests.java @@ -0,0 +1,166 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.inference.external.response.ibmwatsonx; + +import org.apache.http.HttpResponse; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class IbmWatsonxRankedResponseEntityTests extends ESTestCase { + + public void testResponseLiteral() throws IOException { + InferenceServiceResults parsedResults = IbmWatsonxRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseLiteral.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + List expected = responseLiteralDocs(); + for (int i = 0; i < ((RankedDocsResults) parsedResults).getRankedDocs().size(); i++) { + assertEquals(((RankedDocsResults) parsedResults).getRankedDocs().get(i).index(), expected.get(i).index()); + } + } + + public void testGeneratedResponse() throws IOException { + int numDocs = randomIntBetween(1, 10); + + List expected = new ArrayList<>(numDocs); + StringBuilder responseBuilder = new StringBuilder(); + + responseBuilder.append("{"); + responseBuilder.append("\"results\": ["); + List indices = linear(numDocs); + List scores = linearFloats(numDocs); + for (int i = 0; i < numDocs; i++) { + int index = indices.remove(randomInt(indices.size() - 1)); + + responseBuilder.append("{"); + responseBuilder.append("\"index\":").append(index).append(","); + responseBuilder.append("\"score\":").append(scores.get(i).toString()).append("}"); + expected.add(new RankedDocsResults.RankedDoc(index, scores.get(i), null)); + if (i < numDocs - 1) { + responseBuilder.append(","); + } + } + responseBuilder.append("]"); + + responseBuilder.append(randomIntBetween(1, 10)).append("}"); + + InferenceServiceResults parsedResults = IbmWatsonxRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseBuilder.toString().getBytes(StandardCharsets.UTF_8)) + ); + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + for (int i = 0; i < ((RankedDocsResults) parsedResults).getRankedDocs().size(); i++) { + assertEquals(((RankedDocsResults) parsedResults).getRankedDocs().get(i).index(), expected.get(i).index()); + } + } + + private ArrayList responseLiteralDocs() { + var list = new ArrayList(); + + list.add(new RankedDocsResults.RankedDoc(2, 0.98005307F, null)); + list.add(new RankedDocsResults.RankedDoc(3, 0.27904198F, null)); + list.add(new RankedDocsResults.RankedDoc(0, 0.10194652F, null)); + return list; + } + + private final String responseLiteral = """ + { + "results": [ + { + "index": 2, + "score": 0.98005307 + }, + { + "index": 3, + "score": 0.27904198 + }, + { + "index": 0, + "score": 0.10194652 + } + ] + } + """; + + public void testResponseLiteralWithDocuments() throws IOException { + InferenceServiceResults parsedResults = IbmWatsonxRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseLiteralWithDocuments.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + MatcherAssert.assertThat(((RankedDocsResults) parsedResults).getRankedDocs(), is(responseLiteralDocsWithText)); + } + + private final String responseLiteralWithDocuments = """ + { + "results": [ + { + "input": { + "text": "Washington, D.C.." + }, + "index": 2, + "score": 0.98005307 + }, + { + "input": { + "text": "Capital punishment has existed in the United States since before the United States was a country. " + }, + "index": 3, + "score": 0.27904198 + }, + { + "input": { + "text": "Carson City is the capital city of the American state of Nevada." + }, + "index": 0, + "score": 0.10194652 + } + ] + } + """; + + private final List responseLiteralDocsWithText = List.of( + new RankedDocsResults.RankedDoc(2, 0.98005307F, "Washington, D.C.."), + new RankedDocsResults.RankedDoc( + 3, + 0.27904198F, + "Capital punishment has existed in the United States since before the United States was a country. " + ), + new RankedDocsResults.RankedDoc(0, 0.10194652F, "Carson City is the capital city of the American state of Nevada.") + ); + + private ArrayList linear(int n) { + ArrayList list = new ArrayList<>(); + for (int i = 0; i <= n; i++) { + list.add(i); + } + return list; + } + + // creates a list of doubles of monotonically decreasing magnitude + private ArrayList linearFloats(int n) { + ArrayList list = new ArrayList<>(); + float startValue = 1.0f; + float decrement = startValue / n + 1; + for (int i = 0; i <= n; i++) { + list.add(startValue - (i * decrement)); + } + return list; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java index ff99101fc4ee5..99b7b3868b7f4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java @@ -50,6 +50,7 @@ import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModelTests; +import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel; import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; @@ -154,6 +155,42 @@ public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModel() throws IO } } + public void testParseRequestConfig_CreatesAIbmWatsonxRerankModel() throws IOException { + try (var service = createIbmWatsonxService()) { + ActionListener modelListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(IbmWatsonxRerankModel.class)); + + var rerankModel = (IbmWatsonxRerankModel) model; + assertThat(rerankModel.getServiceSettings().modelId(), is(modelId)); + assertThat(rerankModel.getServiceSettings().projectId(), is(projectId)); + assertThat(rerankModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(rerankModel.getSecretSettings().apiKey().toString(), is(apiKey)); + }, e -> fail("Model parsing should have succeeded, but failed: " + e.getMessage())); + + service.parseRequestConfig( + "id", + TaskType.RERANK, + getRequestConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + new HashMap<>(Map.of()), + getSecretSettingsMap(apiKey) + ), + modelListener + ); + } + } + public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createIbmWatsonxService()) { ActionListener modelListener = ActionListener.wrap(model -> { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModelTests.java new file mode 100644 index 0000000000000..0138952c11e07 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModelTests.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.net.URI; + +public class IbmWatsonxRerankModelTests extends ESTestCase { + public static IbmWatsonxRerankModel createModel(String model, String projectId, URI uri, String apiVersion, String apiKey) { + return new IbmWatsonxRerankModel( + "id", + TaskType.RERANK, + "service", + new IbmWatsonxRerankServiceSettings(uri, apiVersion, model, projectId, null), + new IbmWatsonxRerankTaskSettings(2, true, 100), + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } +} From 0a8605e8c2ba5df305b948c09b328b4e0632075f Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Fri, 31 Jan 2025 10:21:18 +0100 Subject: [PATCH 312/383] Remove feature flag check in BaseTransportInferenceAction and rely on Noop implementation (#121270) --- .../xpack/inference/action/BaseTransportInferenceAction.java | 5 ----- 1 file changed, 5 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java index 08d74a36d6503..23117d0daa35a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceAction.java @@ -55,7 +55,6 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.InferencePlugin.INFERENCE_API_FEATURE; -import static org.elasticsearch.xpack.inference.common.InferenceAPIClusterAwareRateLimitingFeature.INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG; import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.modelAttributes; import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.responseAttributes; @@ -188,10 +187,6 @@ private void validateRequest(Request request, UnparsedModel unparsedModel) { } private NodeRoutingDecision determineRouting(String serviceName, Request request, UnparsedModel unparsedModel) { - if (INFERENCE_API_CLUSTER_AWARE_RATE_LIMITING_FEATURE_FLAG.isEnabled() == false) { - return NodeRoutingDecision.handleLocally(); - } - var modelTaskType = unparsedModel.taskType(); // Rerouting not supported or request was already rerouted From eee69733897769329cf650cf8573bff531252144 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Fri, 31 Jan 2025 10:26:20 +0100 Subject: [PATCH 313/383] [Inference API] Fix tests in TransportInferenceActionTests (#121302) --- muted-tests.yml | 6 ---- .../BaseTransportInferenceActionTestCase.java | 10 +++---- .../action/TransportInferenceActionTests.java | 30 +++++++++++-------- ...TransportUnifiedCompletionActionTests.java | 6 ++-- 4 files changed, 25 insertions(+), 27 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index f6413da66de9d..05cdb0bc15721 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -359,12 +359,6 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=indices.get_alias/10_basic/Get aliases via /*/_alias/} issue: https://github.com/elastic/elasticsearch/issues/121290 -- class: org.elasticsearch.xpack.inference.action.TransportInferenceActionTests - method: testRerouting_HandlesTransportException_FromOtherNode - issue: https://github.com/elastic/elasticsearch/issues/121292 -- class: org.elasticsearch.xpack.inference.action.TransportInferenceActionTests - method: testRerouting_ToOtherNode - issue: https://github.com/elastic/elasticsearch/issues/121293 - class: org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculatorTests issue: https://github.com/elastic/elasticsearch/issues/121294 - class: org.elasticsearch.env.NodeEnvironmentTests diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java index 4fa0a1ec49c74..562c99c0887b5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java @@ -28,7 +28,7 @@ import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; -import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; import org.junit.Before; @@ -64,7 +64,7 @@ public abstract class BaseTransportInferenceActionTestCase createAction( InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, StreamingTaskManager streamingTaskManager, - InferenceServiceNodeLocalRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + InferenceServiceRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, NodeClient nodeClient, ThreadPool threadPool ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java index e71d15dbe0420..3129f0865a249 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; -import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.common.RateLimitAssignment; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; @@ -50,7 +50,7 @@ protected BaseTransportInferenceAction createAction( InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, StreamingTaskManager streamingTaskManager, - InferenceServiceNodeLocalRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + InferenceServiceRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, NodeClient nodeClient, ThreadPool threadPool ) { @@ -77,7 +77,7 @@ public void testNoRerouting_WhenTaskTypeNotSupported() { TaskType unsupportedTaskType = TaskType.COMPLETION; mockService(listener -> listener.onResponse(mock())); - when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, unsupportedTaskType)).thenReturn(false); + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, unsupportedTaskType)).thenReturn(false); var listener = doExecute(unsupportedTaskType); @@ -89,8 +89,8 @@ public void testNoRerouting_WhenTaskTypeNotSupported() { public void testNoRerouting_WhenNoGroupingCalculatedYet() { mockService(listener -> listener.onResponse(mock())); - when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); - when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(null); + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(null); var listener = doExecute(taskType); @@ -102,8 +102,8 @@ public void testNoRerouting_WhenNoGroupingCalculatedYet() { public void testNoRerouting_WhenEmptyNodeList() { mockService(listener -> listener.onResponse(mock())); - when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); - when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn( + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn( new RateLimitAssignment(List.of()) ); @@ -120,10 +120,10 @@ public void testRerouting_ToOtherNode() { // The local node is different to the "other-node" responsible for serviceId when(nodeClient.getLocalNodeId()).thenReturn("local-node"); - when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); // Requests for serviceId are always routed to "other-node" var assignment = new RateLimitAssignment(List.of(otherNode)); - when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); mockService(listener -> listener.onResponse(mock())); var listener = doExecute(taskType); @@ -141,9 +141,9 @@ public void testRerouting_ToLocalNode_WithoutGoingThroughTransportLayerAgain() { // The local node is the only one responsible for serviceId when(nodeClient.getLocalNodeId()).thenReturn(localNodeId); - when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); var assignment = new RateLimitAssignment(List.of(localNode)); - when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); mockService(listener -> listener.onResponse(mock())); var listener = doExecute(taskType); @@ -158,9 +158,9 @@ public void testRerouting_HandlesTransportException_FromOtherNode() { when(otherNode.getId()).thenReturn("other-node"); when(nodeClient.getLocalNodeId()).thenReturn("local-node"); - when(inferenceServiceNodeLocalRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); + when(inferenceServiceRateLimitCalculator.isTaskTypeReroutingSupported(serviceId, taskType)).thenReturn(true); var assignment = new RateLimitAssignment(List.of(otherNode)); - when(inferenceServiceNodeLocalRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); + when(inferenceServiceRateLimitCalculator.getRateLimitAssignment(serviceId, taskType)).thenReturn(assignment); mockService(listener -> listener.onResponse(mock())); @@ -173,6 +173,10 @@ public void testRerouting_HandlesTransportException_FromOtherNode() { var listener = doExecute(taskType); + // Verify request was rerouted + verify(transportService).sendRequest(same(otherNode), eq(InferenceAction.NAME), any(), any()); + // Verify local execution didn't happen + verify(listener, never()).onResponse(any()); // Verify exception was propagated from "other-node" to "local-node" verify(listener).onFailure(same(expectedException)); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java index 4ed69e5abe537..e6b5c6d336134 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; -import org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator; +import org.elasticsearch.xpack.inference.common.InferenceServiceRateLimitCalculator; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; @@ -49,7 +49,7 @@ protected BaseTransportInferenceAction createAc InferenceServiceRegistry serviceRegistry, InferenceStats inferenceStats, StreamingTaskManager streamingTaskManager, - InferenceServiceNodeLocalRateLimitCalculator inferenceServiceNodeLocalRateLimitCalculator, + InferenceServiceRateLimitCalculator inferenceServiceRateLimitCalculator, NodeClient nodeClient, ThreadPool threadPool ) { @@ -61,7 +61,7 @@ protected BaseTransportInferenceAction createAc serviceRegistry, inferenceStats, streamingTaskManager, - inferenceServiceNodeLocalRateLimitCalculator, + inferenceServiceRateLimitCalculator, nodeClient, threadPool ); From a89677927195dc0db83e2b9e64c307f721749bee Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 31 Jan 2025 10:34:48 +0100 Subject: [PATCH 314/383] Fix LambdaMatchers.transformedMatch to handle null values (#121371) --- .../java/org/elasticsearch/test/LambdaMatchers.java | 13 +++++-------- .../org/elasticsearch/test/LambdaMatchersTests.java | 3 +++ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java b/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java index dd2e8e4ec5506..67a6cd49b0724 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java +++ b/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java @@ -23,7 +23,7 @@ public class LambdaMatchers { - private static class TransformMatcher extends BaseMatcher { + private static class TransformMatcher extends TypeSafeMatcher { private final Matcher matcher; private final Function transform; @@ -33,24 +33,21 @@ private TransformMatcher(Matcher matcher, Function transform) { } @Override - @SuppressWarnings("unchecked") - public boolean matches(Object actual) { + protected boolean matchesSafely(T item) { U u; try { - u = transform.apply((T) actual); + u = transform.apply(item); } catch (ClassCastException e) { throw new AssertionError(e); } - return matcher.matches(u); } @Override - @SuppressWarnings("unchecked") - public void describeMismatch(Object item, Description description) { + protected void describeMismatchSafely(T item, Description description) { U u; try { - u = transform.apply((T) item); + u = transform.apply(item); } catch (ClassCastException e) { description.appendValue(item).appendText(" is not of the correct type (").appendText(e.getMessage()).appendText(")"); return; diff --git a/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java b/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java index f7ab2349ec1ce..20f9b354f2b4a 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java @@ -19,11 +19,13 @@ import static org.elasticsearch.test.LambdaMatchers.transformedItemsMatch; import static org.elasticsearch.test.LambdaMatchers.transformedMatch; import static org.elasticsearch.test.LambdaMatchers.trueWith; +import static org.hamcrest.Matchers.anything; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.emptyString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; public class LambdaMatchersTests extends ESTestCase { @@ -56,6 +58,7 @@ public void testTransformMatcher() { assertThat(new A("1"), transformedMatch(a -> a.str, equalTo("1"))); assertThat(new B("1"), transformedMatch((A a) -> a.str, equalTo("1"))); + assertMismatch((A) null, transformedMatch(A::toString, anything()), is("was null")); assertMismatch(new A("1"), transformedMatch(a -> a.str, emptyString()), equalTo("transformed value was \"1\"")); } From 0393e56fa72890d4b46aa5d2b5923a08faf27a69 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Fri, 31 Jan 2025 11:39:28 +0100 Subject: [PATCH 315/383] ESQL: introduce a pre-mapping logical plan processing step (#121260) This adds a pre-mapping logical plan processing step, occurring after the logical optimisation, but before mapping it to a physical plan. This step can perform async actions, if needed, and involves using a new `TransportActionServices` record with all available services. Furthermore, the query rewriting step part of the `FullTextFunction`s planning (occurring on the coordinator only) is refactored a bit to update the queries in-place. The verification done by `Match` and `Term` involving checking on the argument type is also now pulled back from post-optimisation to post-analysis. Their respective tests are moved accordingly as well. --- docs/changelog/121260.yaml | 5 + .../xpack/esql/core/util/Holder.java | 10 ++ .../xpack/esql/EsqlTestUtils.java | 16 +- .../xpack/esql/MockQueryBuilderResolver.java | 30 ---- .../xpack/esql/plugin/MatchFunctionIT.java | 2 +- .../xpack/esql/plugin/MatchOperatorIT.java | 2 +- .../xpack/esql/execution/PlanExecutor.java | 6 +- .../function/fulltext/FullTextFunction.java | 8 +- .../expression/function/fulltext/Match.java | 65 +++---- .../fulltext/QueryBuilderResolver.java | 95 ++++++++++ .../expression/function/fulltext/Term.java | 35 ++-- .../esql/planner/premapper/PreMapper.java | 40 +++++ .../esql/plugin/TransportActionServices.java | 24 +++ .../esql/plugin/TransportEsqlQueryAction.java | 15 +- .../xpack/esql/session/EsqlSession.java | 24 ++- .../esql/session/QueryBuilderResolver.java | 167 ------------------ .../elasticsearch/xpack/esql/CsvTests.java | 2 +- .../xpack/esql/analysis/VerifierTests.java | 23 ++- .../optimizer/LogicalPlanOptimizerTests.java | 32 ---- .../telemetry/PlanExecutorMetricsTests.java | 4 +- .../queries/SemanticQueryBuilder.java | 5 +- 21 files changed, 300 insertions(+), 310 deletions(-) create mode 100644 docs/changelog/121260.yaml delete mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/MockQueryBuilderResolver.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryBuilderResolver.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/premapper/PreMapper.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportActionServices.java delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/QueryBuilderResolver.java diff --git a/docs/changelog/121260.yaml b/docs/changelog/121260.yaml new file mode 100644 index 0000000000000..40c7487f29b12 --- /dev/null +++ b/docs/changelog/121260.yaml @@ -0,0 +1,5 @@ +pr: 121260 +summary: Introduce a pre-mapping logical plan processing step +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/Holder.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/Holder.java index 1290bbca59ee7..9aadcefb84e84 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/Holder.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/Holder.java @@ -26,6 +26,16 @@ public void set(T value) { this.value = value; } + /** + * Sets a value in the holder, but only if none has already been set. + * @param value the new value to set. + */ + public void setIfAbsent(T value) { + if (this.value == null) { + this.value = value; + } + } + public T get() { return value; } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 3e072e9a05c20..6deda725dcad4 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -11,6 +11,8 @@ import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; @@ -31,9 +33,11 @@ import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.index.IndexMode; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.search.SearchService; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; @@ -72,8 +76,8 @@ import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.elasticsearch.xpack.esql.plugin.TransportActionServices; import org.elasticsearch.xpack.esql.session.Configuration; -import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; import org.elasticsearch.xpack.esql.stats.SearchStats; import org.elasticsearch.xpack.esql.telemetry.Metrics; import org.elasticsearch.xpack.versionfield.Version; @@ -140,6 +144,7 @@ import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import static org.mockito.Mockito.mock; public final class EsqlTestUtils { @@ -360,7 +365,14 @@ public static LogicalOptimizerContext unboundLogicalOptimizerContext() { public static final Verifier TEST_VERIFIER = new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)); - public static final QueryBuilderResolver MOCK_QUERY_BUILDER_RESOLVER = new MockQueryBuilderResolver(); + public static final TransportActionServices MOCK_TRANSPORT_ACTION_SERVICES = new TransportActionServices( + mock(TransportService.class), + mock(SearchService.class), + null, + mock(ClusterService.class), + mock(IndexNameExpressionResolver.class), + null + ); private EsqlTestUtils() {} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/MockQueryBuilderResolver.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/MockQueryBuilderResolver.java deleted file mode 100644 index 7af3a89108fc0..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/MockQueryBuilderResolver.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; -import org.elasticsearch.xpack.esql.session.Result; - -import java.util.function.BiConsumer; - -public class MockQueryBuilderResolver extends QueryBuilderResolver { - public MockQueryBuilderResolver() { - super(null, null, null, null); - } - - @Override - public void resolveQueryBuilders( - LogicalPlan plan, - ActionListener listener, - BiConsumer> callback - ) { - callback.accept(plan, listener); - } -} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java index b928b25929401..2da9bee3701d7 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java @@ -246,7 +246,7 @@ public void testWhereMatchWithRow() { var error = expectThrows(ElasticsearchException.class, () -> run(query)); assertThat( error.getMessage(), - containsString("[MATCH] function cannot operate on [\"a brown fox\"], which is not a field from an index mapping") + containsString("line 2:15: [MATCH] function cannot operate on [content], which is not a field from an index mapping") ); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java index bd7246518c958..dbe115cc66176 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java @@ -230,7 +230,7 @@ public void testWhereMatchWithRow() { var error = expectThrows(ElasticsearchException.class, () -> run(query)); assertThat( error.getMessage(), - containsString("[:] operator cannot operate on [\"a brown fox\"], which is not a field from an index mapping") + containsString("line 2:9: [:] operator cannot operate on [content], which is not a field from an index mapping") ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index 81f63fd9d37a6..611516fc55342 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -21,10 +21,10 @@ import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.planner.mapper.Mapper; +import org.elasticsearch.xpack.esql.plugin.TransportActionServices; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.session.EsqlSession; import org.elasticsearch.xpack.esql.session.IndexResolver; -import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; import org.elasticsearch.xpack.esql.session.Result; import org.elasticsearch.xpack.esql.telemetry.Metrics; import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; @@ -62,7 +62,7 @@ public void esql( EsqlExecutionInfo executionInfo, IndicesExpressionGrouper indicesExpressionGrouper, EsqlSession.PlanRunner planRunner, - QueryBuilderResolver queryBuilderResolver, + TransportActionServices services, ActionListener listener ) { final PlanTelemetry planTelemetry = new PlanTelemetry(functionRegistry); @@ -78,7 +78,7 @@ public void esql( verifier, planTelemetry, indicesExpressionGrouper, - queryBuilderResolver + services ); QueryMetric clientId = QueryMetric.fromString("rest"); metrics.total(clientId); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index 32a350ac7351e..cb0306775a711 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; -import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluator; import org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluator.ShardConfig; import org.elasticsearch.compute.operator.EvalOperator; @@ -110,11 +110,7 @@ public Expression query() { */ public Object queryAsObject() { Object queryAsObject = query().fold(FoldContext.small() /* TODO remove me */); - if (queryAsObject instanceof BytesRef bytesRef) { - return bytesRef.utf8ToString(); - } - - return queryAsObject; + return BytesRefs.toString(queryAsObject); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java index 3223e96da7136..ec599020e0b05 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.DataTypeConverter; import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; +import org.elasticsearch.xpack.esql.core.util.Check; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -38,6 +39,7 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import org.elasticsearch.xpack.esql.querydsl.query.MatchQuery; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; @@ -48,6 +50,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.BiConsumer; import static java.util.Map.entry; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -88,7 +91,7 @@ /** * Full text function that performs a {@link org.elasticsearch.xpack.esql.querydsl.query.MatchQuery} . */ -public class Match extends FullTextFunction implements OptionalArgument, PostOptimizationVerificationAware { +public class Match extends FullTextFunction implements OptionalArgument, PostAnalysisPlanVerificationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Match", Match::readFrom); public static final Set FIELD_DATA_TYPES = Set.of( @@ -429,23 +432,23 @@ public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { } @Override - public void postOptimizationVerification(Failures failures) { - Expression fieldExpression = field(); - // Field may be converted to other data type (field_name :: data_type), so we need to check the original field - if (fieldExpression instanceof AbstractConvertFunction convertFunction) { - fieldExpression = convertFunction.field(); - } - if (fieldExpression instanceof FieldAttribute == false) { - failures.add( - Failure.fail( - field, - "[{}] {} cannot operate on [{}], which is not a field from an index mapping", - functionName(), - functionType(), - field.sourceText() - ) - ); - } + public BiConsumer postAnalysisPlanVerification() { + return (plan, failures) -> { + super.postAnalysisPlanVerification().accept(plan, failures); + plan.forEachExpression(Match.class, m -> { + if (m.fieldAsFieldAttribute() == null) { + failures.add( + Failure.fail( + m.field(), + "[{}] {} cannot operate on [{}], which is not a field from an index mapping", + functionName(), + functionType(), + m.field().sourceText() + ) + ); + } + }); + }; } @Override @@ -476,22 +479,24 @@ public Object queryAsObject() { @Override protected Query translate(TranslatorHandler handler) { + var fieldAttribute = fieldAsFieldAttribute(); + Check.notNull(fieldAttribute, "Match must have a field attribute as the first argument"); + String fieldName = fieldAttribute.name(); + if (fieldAttribute.field() instanceof MultiTypeEsField multiTypeEsField) { + // If we have multiple field types, we allow the query to be done, but getting the underlying field name + fieldName = multiTypeEsField.getName(); + } + // Make query lenient so mixed field types can be queried when a field type is incompatible with the value provided + return new MatchQuery(source(), fieldName, queryAsObject(), matchQueryOptions()); + } + + private FieldAttribute fieldAsFieldAttribute() { Expression fieldExpression = field; // Field may be converted to other data type (field_name :: data_type), so we need to check the original field if (fieldExpression instanceof AbstractConvertFunction convertFunction) { fieldExpression = convertFunction.field(); } - if (fieldExpression instanceof FieldAttribute fieldAttribute) { - String fieldName = fieldAttribute.name(); - if (fieldAttribute.field() instanceof MultiTypeEsField multiTypeEsField) { - // If we have multiple field types, we allow the query to be done, but getting the underlying field name - fieldName = multiTypeEsField.getName(); - } - // Make query lenient so mixed field types can be queried when a field type is incompatible with the value provided - return new MatchQuery(source(), fieldName, queryAsObject(), matchQueryOptions()); - } - - throw new IllegalArgumentException("Match must have a field attribute as the first argument"); + return fieldExpression instanceof FieldAttribute fieldAttribute ? fieldAttribute : null; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryBuilderResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryBuilderResolver.java new file mode 100644 index 0000000000000..14607de433630 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryBuilderResolver.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ResolvedIndices; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.Rewriteable; +import org.elasticsearch.xpack.esql.core.util.Holder; +import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; +import org.elasticsearch.xpack.esql.plugin.TransportActionServices; +import org.elasticsearch.xpack.esql.session.IndexResolver; + +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; + +/** + * Some {@link FullTextFunction} implementations such as {@link org.elasticsearch.xpack.esql.expression.function.fulltext.Match} + * will be translated to a {@link QueryBuilder} that require a rewrite phase on the coordinator. + * {@link QueryBuilderResolver#resolveQueryBuilders(LogicalPlan, TransportActionServices, ActionListener)} will rewrite the plan by + * replacing {@link FullTextFunction} expression with new ones that hold rewritten {@link QueryBuilder}s. + */ +public final class QueryBuilderResolver { + + private QueryBuilderResolver() {} + + public static void resolveQueryBuilders(LogicalPlan plan, TransportActionServices services, ActionListener listener) { + var hasFullTextFunctions = plan.anyMatch(p -> { + Holder hasFullTextFunction = new Holder<>(false); + p.forEachExpression(FullTextFunction.class, unused -> hasFullTextFunction.set(true)); + return hasFullTextFunction.get(); + }); + if (hasFullTextFunctions) { + Rewriteable.rewriteAndFetch( + new FullTextFunctionsRewritable(plan), + queryRewriteContext(services, indexNames(plan)), + listener.delegateFailureAndWrap((l, r) -> l.onResponse(r.plan)) + ); + } else { + listener.onResponse(plan); + } + } + + private static QueryRewriteContext queryRewriteContext(TransportActionServices services, Set indexNames) { + ResolvedIndices resolvedIndices = ResolvedIndices.resolveWithIndexNamesAndOptions( + indexNames.toArray(String[]::new), + IndexResolver.FIELD_CAPS_INDICES_OPTIONS, + services.clusterService().state(), + services.indexNameExpressionResolver(), + services.transportService().getRemoteClusterService(), + System.currentTimeMillis() + ); + + return services.searchService().getRewriteContext(System::currentTimeMillis, resolvedIndices, null); + } + + private static Set indexNames(LogicalPlan plan) { + Set indexNames = new HashSet<>(); + plan.forEachDown(EsRelation.class, esRelation -> indexNames.addAll(esRelation.concreteIndices())); + return indexNames; + } + + private record FullTextFunctionsRewritable(LogicalPlan plan) implements Rewriteable { + @Override + public FullTextFunctionsRewritable rewrite(QueryRewriteContext ctx) throws IOException { + Holder exceptionHolder = new Holder<>(); + Holder updated = new Holder<>(false); + LogicalPlan newPlan = plan.transformExpressionsDown(FullTextFunction.class, f -> { + QueryBuilder builder = f.queryBuilder(), initial = builder; + builder = builder == null ? f.asQuery(TranslatorHandler.TRANSLATOR_HANDLER).asBuilder() : builder; + try { + builder = builder.rewrite(ctx); + } catch (IOException e) { + exceptionHolder.setIfAbsent(e); + } + var rewritten = builder != initial; + updated.set(updated.get() || rewritten); + return rewritten ? f.replaceQueryBuilder(builder) : f; + }); + if (exceptionHolder.get() != null) { + throw exceptionHolder.get(); + } + return updated.get() ? new FullTextFunctionsRewritable(newPlan) : this; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java index 4db1c38694757..1da28b3069675 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -26,10 +26,12 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import java.io.IOException; import java.util.List; +import java.util.function.BiConsumer; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -39,7 +41,7 @@ /** * Full text function that performs a {@link TermQuery} . */ -public class Term extends FullTextFunction implements PostOptimizationVerificationAware { +public class Term extends FullTextFunction implements PostAnalysisPlanVerificationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Term", Term::readFrom); @@ -104,18 +106,23 @@ private TypeResolution resolveField() { } @Override - public void postOptimizationVerification(Failures failures) { - if (field instanceof FieldAttribute == false) { - failures.add( - Failure.fail( - field, - "[{}] {} cannot operate on [{}], which is not a field from an index mapping", - functionName(), - functionType(), - field.sourceText() - ) - ); - } + public BiConsumer postAnalysisPlanVerification() { + return (plan, failures) -> { + super.postAnalysisPlanVerification().accept(plan, failures); + plan.forEachExpression(Term.class, t -> { + if (t.field() instanceof FieldAttribute == false) { // TODO: is a conversion possible, similar to Match's case? + failures.add( + Failure.fail( + t.field(), + "[{}] {} cannot operate on [{}], which is not a field from an index mapping", + t.functionName(), + t.functionType(), + t.field().sourceText() + ) + ); + } + }); + }; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/premapper/PreMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/premapper/PreMapper.java new file mode 100644 index 0000000000000..f69754dc3ce89 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/premapper/PreMapper.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.planner.premapper; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryBuilderResolver; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plugin.TransportActionServices; + +/** + * The class is responsible for invoking any premapping steps that need to be applied to the logical plan, + * before this is being mapped to a physical one. + */ +public class PreMapper { + + private final TransportActionServices services; + + public PreMapper(TransportActionServices services) { + this.services = services; + } + + /** + * Invokes any premapping steps that need to be applied to the logical plan, before this is being mapped to a physical one. + */ + public void preMapper(LogicalPlan plan, ActionListener listener) { + queryRewrite(plan, listener.delegateFailureAndWrap((l, p) -> { + p.setOptimized(); + l.onResponse(p); + })); + } + + private void queryRewrite(LogicalPlan plan, ActionListener listener) { + QueryBuilderResolver.resolveQueryBuilders(plan, services, listener); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportActionServices.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportActionServices.java new file mode 100644 index 0000000000000..ad112542e000a --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportActionServices.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.search.SearchService; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.usage.UsageService; + +public record TransportActionServices( + TransportService transportService, + SearchService searchService, + ExchangeService exchangeService, + ClusterService clusterService, + IndexNameExpressionResolver indexNameExpressionResolver, + UsageService usageService +) {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index a32b4591943f4..b3a2c403137f3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -53,7 +53,6 @@ import org.elasticsearch.xpack.esql.execution.PlanExecutor; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.session.EsqlSession.PlanRunner; -import org.elasticsearch.xpack.esql.session.QueryBuilderResolver; import org.elasticsearch.xpack.esql.session.Result; import java.io.IOException; @@ -81,8 +80,8 @@ public class TransportEsqlQueryAction extends HandledTransportAction asyncTaskManagementService; private final RemoteClusterService remoteClusterService; - private final QueryBuilderResolver queryBuilderResolver; private final UsageService usageService; + private final TransportActionServices services; // Listeners for active async queries, key being the async task execution ID private final Map asyncListeners = ConcurrentCollections.newConcurrentMap(); @@ -153,8 +152,16 @@ public TransportEsqlQueryAction( bigArrays ); this.remoteClusterService = transportService.getRemoteClusterService(); - this.queryBuilderResolver = new QueryBuilderResolver(searchService, clusterService, transportService, indexNameExpressionResolver); this.usageService = usageService; + + this.services = new TransportActionServices( + transportService, + searchService, + exchangeService, + clusterService, + indexNameExpressionResolver, + usageService + ); } @Override @@ -258,7 +265,7 @@ private void innerExecute(Task task, EsqlQueryRequest request, ActionListener { recordCCSTelemetry(task, executionInfo, request, null); listener.onResponse(toResponse(task, request, configuration, result)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 94bf414da1b9d..ff1b599ef191e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -73,6 +73,8 @@ import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.mapper.Mapper; +import org.elasticsearch.xpack.esql.planner.premapper.PreMapper; +import org.elasticsearch.xpack.esql.plugin.TransportActionServices; import org.elasticsearch.xpack.esql.telemetry.PlanTelemetry; import java.util.ArrayList; @@ -109,12 +111,12 @@ public interface PlanRunner { private final Verifier verifier; private final EsqlFunctionRegistry functionRegistry; private final LogicalPlanOptimizer logicalPlanOptimizer; + private final PreMapper preMapper; private final Mapper mapper; private final PhysicalPlanOptimizer physicalPlanOptimizer; private final PlanTelemetry planTelemetry; private final IndicesExpressionGrouper indicesExpressionGrouper; - private final QueryBuilderResolver queryBuilderResolver; public EsqlSession( String sessionId, @@ -128,7 +130,7 @@ public EsqlSession( Verifier verifier, PlanTelemetry planTelemetry, IndicesExpressionGrouper indicesExpressionGrouper, - QueryBuilderResolver queryBuilderResolver + TransportActionServices services ) { this.sessionId = sessionId; this.configuration = configuration; @@ -142,7 +144,7 @@ public EsqlSession( this.physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(configuration)); this.planTelemetry = planTelemetry; this.indicesExpressionGrouper = indicesExpressionGrouper; - this.queryBuilderResolver = queryBuilderResolver; + this.preMapper = new PreMapper(services); } public String sessionId() { @@ -162,16 +164,12 @@ public void execute(EsqlQueryRequest request, EsqlExecutionInfo executionInfo, P new EsqlSessionCCSUtils.CssPartialErrorsActionListener(executionInfo, listener) { @Override public void onResponse(LogicalPlan analyzedPlan) { - try { - var optimizedPlan = optimizedPlan(analyzedPlan); - queryBuilderResolver.resolveQueryBuilders( - optimizedPlan, - listener, - (newPlan, next) -> executeOptimizedPlan(request, executionInfo, planRunner, newPlan, next) - ); - } catch (Exception e) { - listener.onFailure(e); - } + preMapper.preMapper( + analyzedPlan, + listener.delegateFailureAndWrap( + (l, p) -> executeOptimizedPlan(request, executionInfo, planRunner, optimizedPlan(p), l) + ) + ); } } ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/QueryBuilderResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/QueryBuilderResolver.java deleted file mode 100644 index 7db81069f9d3c..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/QueryBuilderResolver.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.session; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ResolvedIndices; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryRewriteContext; -import org.elasticsearch.index.query.Rewriteable; -import org.elasticsearch.search.SearchService; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.esql.core.util.Holder; -import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; -import org.elasticsearch.xpack.esql.plan.logical.EsRelation; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; - -import java.io.IOException; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.function.BiConsumer; - -import static org.elasticsearch.xpack.esql.planner.TranslatorHandler.TRANSLATOR_HANDLER; - -/** - * Some {@link FullTextFunction} implementations such as {@link org.elasticsearch.xpack.esql.expression.function.fulltext.Match} - * will be translated to a {@link QueryBuilder} that require a rewrite phase on the coordinator. - * {@link QueryBuilderResolver#resolveQueryBuilders(LogicalPlan, ActionListener, BiConsumer)} will rewrite the plan by replacing - * {@link FullTextFunction} expression with new ones that hold rewritten {@link QueryBuilder}s. - */ -public class QueryBuilderResolver { - private final SearchService searchService; - private final ClusterService clusterService; - private final TransportService transportService; - private final IndexNameExpressionResolver indexNameExpressionResolver; - - public QueryBuilderResolver( - SearchService searchService, - ClusterService clusterService, - TransportService transportService, - IndexNameExpressionResolver indexNameExpressionResolver - ) { - this.searchService = searchService; - this.clusterService = clusterService; - this.transportService = transportService; - this.indexNameExpressionResolver = indexNameExpressionResolver; - } - - public void resolveQueryBuilders( - LogicalPlan plan, - ActionListener listener, - BiConsumer> callback - ) { - if (plan.optimized() == false) { - listener.onFailure(new IllegalStateException("Expected optimized plan before query builder rewrite.")); - return; - } - - Set unresolved = fullTextFunctions(plan); - Set indexNames = indexNames(plan); - - if (indexNames == null || indexNames.isEmpty() || unresolved.isEmpty()) { - callback.accept(plan, listener); - return; - } - QueryRewriteContext ctx = queryRewriteContext(indexNames); - FullTextFunctionsRewritable rewritable = new FullTextFunctionsRewritable(unresolved); - Rewriteable.rewriteAndFetch(rewritable, ctx, new ActionListener() { - @Override - public void onResponse(FullTextFunctionsRewritable fullTextFunctionsRewritable) { - try { - LogicalPlan newPlan = planWithResolvedQueryBuilders(plan, fullTextFunctionsRewritable.results()); - callback.accept(newPlan, listener); - } catch (Exception e) { - onFailure(e); - } - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - } - - private Set fullTextFunctions(LogicalPlan plan) { - Set functions = new HashSet<>(); - plan.forEachExpressionDown(FullTextFunction.class, func -> functions.add(func)); - return functions; - } - - public Set indexNames(LogicalPlan plan) { - Holder> indexNames = new Holder<>(); - plan.forEachDown(EsRelation.class, esRelation -> indexNames.set(esRelation.concreteIndices())); - return indexNames.get(); - } - - public LogicalPlan planWithResolvedQueryBuilders(LogicalPlan plan, Map newQueryBuilders) { - LogicalPlan newPlan = plan.transformExpressionsDown(FullTextFunction.class, m -> { - if (newQueryBuilders.keySet().contains(m)) { - return m.replaceQueryBuilder(newQueryBuilders.get(m)); - } - return m; - }); - // The given plan was already analyzed and optimized, so we set the resulted plan to optimized as well. - newPlan.setOptimized(); - return newPlan; - } - - private QueryRewriteContext queryRewriteContext(Set indexNames) { - ResolvedIndices resolvedIndices = ResolvedIndices.resolveWithIndexNamesAndOptions( - indexNames.toArray(String[]::new), - IndexResolver.FIELD_CAPS_INDICES_OPTIONS, - clusterService.state(), - indexNameExpressionResolver, - transportService.getRemoteClusterService(), - System.currentTimeMillis() - ); - - return searchService.getRewriteContext(() -> System.currentTimeMillis(), resolvedIndices, null); - } - - private class FullTextFunctionsRewritable implements Rewriteable { - - private final Map queryBuilderMap; - - FullTextFunctionsRewritable(Map queryBuilderMap) { - this.queryBuilderMap = queryBuilderMap; - } - - FullTextFunctionsRewritable(Set functions) { - this.queryBuilderMap = new HashMap<>(); - - for (FullTextFunction func : functions) { - queryBuilderMap.put(func, TRANSLATOR_HANDLER.asQuery(func).asBuilder()); - } - } - - @Override - public FullTextFunctionsRewritable rewrite(QueryRewriteContext ctx) throws IOException { - Map results = new HashMap<>(); - - boolean hasChanged = false; - for (var entry : queryBuilderMap.entrySet()) { - var initial = entry.getValue(); - var rewritten = initial.rewrite(ctx); - hasChanged |= rewritten != initial; - - results.put(entry.getKey(), rewritten); - } - - return hasChanged ? new FullTextFunctionsRewritable(results) : this; - } - - public Map results() { - return queryBuilderMap; - } - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index bae20bb9b26d3..340d5a00e80b7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -516,7 +516,7 @@ private ActualResults executePlan(BigArrays bigArrays) throws Exception { TEST_VERIFIER, new PlanTelemetry(functionRegistry), null, - EsqlTestUtils.MOCK_QUERY_BUILDER_RESOLVER + EsqlTestUtils.MOCK_TRANSPORT_ACTION_SERVICES ); TestPhysicalOperationProviders physicalOperationProviders = testOperationProviders(foldCtx, testDatasets); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 4403477e51125..e7f2fa1b97fb7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1186,9 +1186,9 @@ public void testWeightedAvg() { public void testMatchInsideEval() throws Exception { assumeTrue("Match operator is available just for snapshots", Build.current().isSnapshot()); - assertEquals( - "1:36: [:] operator is only supported in WHERE commands", + "1:36: [:] operator is only supported in WHERE commands\n" + + "line 1:36: [:] operator cannot operate on [title], which is not a field from an index mapping", error("row title = \"brown fox\" | eval x = title:\"fox\" ") ); } @@ -1217,6 +1217,25 @@ public void testMatchFunctionAndOperatorHaveCorrectErrorMessages() throws Except assertEquals("1:24: [:] operator cannot be used after LIMIT", error("from test | limit 10 | where first_name : \"Anna\"")); } + // These should pass eventually once we lift some restrictions on match function + public void testMatchWithNonIndexedColumnCurrentlyUnsupported() { + assertEquals( + "1:67: [MATCH] function cannot operate on [initial], which is not a field from an index mapping", + error("from test | eval initial = substring(first_name, 1) | where match(initial, \"A\")") + ); + assertEquals( + "1:67: [MATCH] function cannot operate on [text], which is not a field from an index mapping", + error("from test | eval text=concat(first_name, last_name) | where match(text, \"cat\")") + ); + } + + public void testMatchFunctionIsNotNullable() { + assertEquals( + "1:48: [MATCH] function cannot operate on [text::keyword], which is not a field from an index mapping", + error("row n = null | eval text = n + 5 | where match(text::keyword, \"Anna\")") + ); + } + public void testQueryStringFunctionsNotAllowedAfterCommands() throws Exception { // Source commands assertEquals("1:13: [QSTR] function cannot be used after SHOW", error("show info | where qstr(\"8.16.0\")")); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index c80e374540d09..ea88f4fd5ba06 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -7240,38 +7240,6 @@ public void testToDatePeriodToTimeDurationWithField() { assertEquals("1:60: argument of [to_timeduration(x)] must be a constant, received [x]", e.getMessage().substring(header.length())); } - // These should pass eventually once we lift some restrictions on match function - public void testMatchWithNonIndexedColumnCurrentlyUnsupported() { - final String header = "Found 1 problem\nline "; - VerificationException e = expectThrows(VerificationException.class, () -> plan(""" - from test | eval initial = substring(first_name, 1) | where match(initial, "A")""")); - assertTrue(e.getMessage().startsWith("Found ")); - assertEquals( - "1:67: [MATCH] function cannot operate on [initial], which is not a field from an index mapping", - e.getMessage().substring(header.length()) - ); - - e = expectThrows(VerificationException.class, () -> plan(""" - from test | eval text=concat(first_name, last_name) | where match(text, "cat")""")); - assertTrue(e.getMessage().startsWith("Found ")); - assertEquals( - "1:67: [MATCH] function cannot operate on [text], which is not a field from an index mapping", - e.getMessage().substring(header.length()) - ); - } - - public void testMatchFunctionIsNotNullable() { - String queryText = """ - row n = null | eval text = n + 5 | where match(text::keyword, "Anna") - """; - - VerificationException ve = expectThrows(VerificationException.class, () -> plan(queryText)); - assertThat( - ve.getMessage(), - containsString("[MATCH] function cannot operate on [text::keyword], which is not a field from an index mapping") - ); - } - public void testWhereNull() { var plan = plan(""" from test diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java index 4c2913031271f..aa735e5cb6d86 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java @@ -125,7 +125,7 @@ public void testFailedMetric() { new EsqlExecutionInfo(randomBoolean()), groupIndicesByCluster, runPhase, - EsqlTestUtils.MOCK_QUERY_BUILDER_RESOLVER, + EsqlTestUtils.MOCK_TRANSPORT_ACTION_SERVICES, new ActionListener<>() { @Override public void onResponse(Result result) { @@ -156,7 +156,7 @@ public void onFailure(Exception e) { new EsqlExecutionInfo(randomBoolean()), groupIndicesByCluster, runPhase, - EsqlTestUtils.MOCK_QUERY_BUILDER_RESOLVER, + EsqlTestUtils.MOCK_TRANSPORT_ACTION_SERVICES, new ActionListener<>() { @Override public void onResponse(Result result) {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java index 285739fe0936f..eafdb6366afd4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java @@ -332,11 +332,12 @@ private static String getInferenceIdForForField(Collection indexM protected boolean doEquals(SemanticQueryBuilder other) { return Objects.equals(fieldName, other.fieldName) && Objects.equals(query, other.query) - && Objects.equals(inferenceResults, other.inferenceResults); + && Objects.equals(inferenceResults, other.inferenceResults) + && Objects.equals(inferenceResultsSupplier, other.inferenceResultsSupplier); } @Override protected int doHashCode() { - return Objects.hash(fieldName, query, inferenceResults); + return Objects.hash(fieldName, query, inferenceResults, inferenceResultsSupplier); } } From f6ca4e17f06aeb84a0e398f1e2e59e22211eaa6b Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 31 Jan 2025 11:38:27 +0000 Subject: [PATCH 316/383] Improve logging of put-mapping failures (#121372) No sense in converting to a list just to convert to a string, we may as well convert directly to a string. Also removes the unnecessary extra `[]` wrapper. --- .../indices/mapping/put/PutMappingIT.java | 55 +++++++++++++++++++ .../put/TransportPutMappingAction.java | 35 +++++------- .../metadata/MetadataMappingService.java | 2 +- 3 files changed, 71 insertions(+), 21 deletions(-) create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingIT.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingIT.java new file mode 100644 index 0000000000000..0a29b99ca6fdc --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingIT.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.admin.indices.mapping.put; + +import org.apache.logging.log4j.Level; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.MockLog; +import org.elasticsearch.test.junit.annotations.TestLogging; + +import static org.hamcrest.Matchers.equalTo; + +public class PutMappingIT extends ESSingleNodeTestCase { + + @TestLogging( + reason = "testing DEBUG logging", + value = "org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction:DEBUG" + ) + public void testFailureLogging() { + final var indexName = randomIdentifier(); + createIndex(indexName); + final var fieldName = randomIdentifier(); + safeGet(client().execute(TransportPutMappingAction.TYPE, new PutMappingRequest(indexName).source(fieldName, "type=keyword"))); + MockLog.assertThatLogger( + () -> assertThat( + asInstanceOf( + IllegalArgumentException.class, + safeAwaitFailure( + AcknowledgedResponse.class, + l -> client().execute( + TransportPutMappingAction.TYPE, + new PutMappingRequest(indexName).source(fieldName, "type=long"), + l + ) + ) + ).getMessage(), + equalTo("mapper [" + fieldName + "] cannot be changed from type [keyword] to [long]") + ), + TransportPutMappingAction.class, + new MockLog.SeenEventExpectation( + "failure message", + TransportPutMappingAction.class.getCanonicalName(), + Level.DEBUG, + "failed to put mappings on indices [[" + indexName + ) + ); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java index 2870a6538f8bb..1c99d84900866 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java @@ -36,7 +36,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -126,7 +125,7 @@ protected void masterOperation( performMappingUpdate(concreteIndices, request, listener, metadataMappingService, false); } catch (IndexNotFoundException ex) { - logger.debug(() -> "failed to put mappings on indices [" + Arrays.asList(request.indices() + "]"), ex); + logger.debug(() -> "failed to put mappings on indices " + Arrays.toString(request.indices()), ex); throw ex; } } @@ -162,25 +161,21 @@ static void performMappingUpdate( MetadataMappingService metadataMappingService, boolean autoUpdate ) { - final ActionListener wrappedListener = listener.delegateResponse((l, e) -> { - logger.debug(() -> "failed to put mappings on indices [" + Arrays.asList(concreteIndices) + "]", e); + ActionListener.run(listener.delegateResponse((l, e) -> { + logger.debug(() -> "failed to put mappings on indices " + Arrays.toString(concreteIndices), e); l.onFailure(e); - }); - final PutMappingClusterStateUpdateRequest updateRequest; - try { - updateRequest = new PutMappingClusterStateUpdateRequest( - request.masterNodeTimeout(), - request.ackTimeout(), - request.source(), - autoUpdate, - concreteIndices - ); - } catch (IOException e) { - wrappedListener.onFailure(e); - return; - } - - metadataMappingService.putMapping(updateRequest, wrappedListener); + }), + wrappedListener -> metadataMappingService.putMapping( + new PutMappingClusterStateUpdateRequest( + request.masterNodeTimeout(), + request.ackTimeout(), + request.source(), + autoUpdate, + concreteIndices + ), + wrappedListener + ) + ); } static String checkForFailureStoreViolations(ClusterState clusterState, Index[] concreteIndices, PutMappingRequest request) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java index 5d45bf1ce127e..8b8c3f12cdf9f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java @@ -56,7 +56,7 @@ public class MetadataMappingService { public MetadataMappingService(ClusterService clusterService, IndicesService indicesService) { this.clusterService = clusterService; this.indicesService = indicesService; - taskQueue = clusterService.createTaskQueue("put-mapping", Priority.HIGH, new PutMappingExecutor()); + this.taskQueue = clusterService.createTaskQueue("put-mapping", Priority.HIGH, new PutMappingExecutor()); } record PutMappingClusterStateUpdateTask(PutMappingClusterStateUpdateRequest request, ActionListener listener) From 2e84950cb3476d7cf1f347326069a099e25ec3db Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 31 Jan 2025 12:15:35 +0000 Subject: [PATCH 317/383] [ML] Unmute XPackRestIT and mute all ml and transform tests (#121377) --- muted-tests.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 05cdb0bc15721..b41da58379675 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -231,9 +231,6 @@ tests: - class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT method: test {p0=data_stream/140_data_stream_aliases/Create data stream aliases using wildcard expression} issue: https://github.com/elastic/elasticsearch/issues/120890 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=ml/inference_crud/*} - issue: https://github.com/elastic/elasticsearch/issues/120816 - class: org.elasticsearch.xpack.security.authc.service.ServiceAccountIT method: testAuthenticateShouldNotFallThroughInCaseOfFailure issue: https://github.com/elastic/elasticsearch/issues/120902 @@ -330,6 +327,10 @@ tests: method: testCrossClusterAsyncQueryStop issue: https://github.com/elastic/elasticsearch/issues/121249 - class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=transform/*} + issue: https://github.com/elastic/elasticsearch/issues/120816 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=ml/*} issue: https://github.com/elastic/elasticsearch/issues/120816 - class: org.elasticsearch.upgrades.VectorSearchIT method: testBBQVectorSearch {upgradedNodes=0} From 859d92cfd7c987f7d8deb749eba6e73d98c457a5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 1 Feb 2025 00:50:15 +1100 Subject: [PATCH 318/383] Mute org.elasticsearch.test.rest.yaml.RcsCcsCommonYamlTestSuiteIT test {p0=search.vectors/42_knn_search_int4_flat/KNN Vector similarity search only} #121395 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b41da58379675..aa43ad37c2871 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -386,6 +386,9 @@ tests: - class: org.elasticsearch.smoketest.SmokeTestMultiNodeClientYamlTestSuiteIT method: test {yaml=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only} issue: https://github.com/elastic/elasticsearch/issues/121350 +- class: org.elasticsearch.test.rest.yaml.RcsCcsCommonYamlTestSuiteIT + method: test {p0=search.vectors/42_knn_search_int4_flat/KNN Vector similarity search only} + issue: https://github.com/elastic/elasticsearch/issues/121395 # Examples: # From c3f752054e796e0c427eca0f1223bf4e126bd89c Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 31 Jan 2025 05:51:44 -0800 Subject: [PATCH 319/383] Pass environment paths into entitlement bootstrap (#121347) This commit adds the data dirs, config dir and temp dir into entitlement bootstrapping. It doesn't yet use them in entitlement policies, but makes them available to use within initialization. --- .../bootstrap/EntitlementBootstrap.java | 27 ++++++++++++++++--- .../bootstrap/Elasticsearch.java | 8 +++++- 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index 496a28a448381..e7312103f9921 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -31,10 +31,22 @@ public class EntitlementBootstrap { - public record BootstrapArgs(Map pluginPolicies, Function, String> pluginResolver) { + public record BootstrapArgs( + Map pluginPolicies, + Function, String> pluginResolver, + Path[] dataDirs, + Path configDir, + Path tempDir + ) { public BootstrapArgs { requireNonNull(pluginPolicies); requireNonNull(pluginResolver); + requireNonNull(dataDirs); + if (dataDirs.length == 0) { + throw new IllegalArgumentException("must provide at least one data directory"); + } + requireNonNull(configDir); + requireNonNull(tempDir); } } @@ -50,13 +62,22 @@ public static BootstrapArgs bootstrapArgs() { * * @param pluginPolicies a map holding policies for plugins (and modules), by plugin (or module) name. * @param pluginResolver a functor to map a Java Class to the plugin it belongs to (the plugin name). + * @param dataDirs data directories for Elasticsearch + * @param configDir the config directory for Elasticsearch + * @param tempDir the temp directory for Elasticsearch */ - public static void bootstrap(Map pluginPolicies, Function, String> pluginResolver) { + public static void bootstrap( + Map pluginPolicies, + Function, String> pluginResolver, + Path[] dataDirs, + Path configDir, + Path tempDir + ) { logger.debug("Loading entitlement agent"); if (EntitlementBootstrap.bootstrapArgs != null) { throw new IllegalStateException("plugin data is already set"); } - EntitlementBootstrap.bootstrapArgs = new BootstrapArgs(pluginPolicies, pluginResolver); + EntitlementBootstrap.bootstrapArgs = new BootstrapArgs(pluginPolicies, pluginResolver, dataDirs, configDir, tempDir); exportInitializationToAgent(); loadAgent(findAgentJar()); selfTest(); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 1c959d4157f9b..9256a30298951 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -242,7 +242,13 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { pluginsLoader = PluginsLoader.createPluginsLoader(modulesBundles, pluginsBundles, findPluginsWithNativeAccess(pluginPolicies)); var pluginsResolver = PluginsResolver.create(pluginsLoader); - EntitlementBootstrap.bootstrap(pluginPolicies, pluginsResolver::resolveClassToPluginName); + EntitlementBootstrap.bootstrap( + pluginPolicies, + pluginsResolver::resolveClassToPluginName, + nodeEnv.dataFiles(), + nodeEnv.configFile(), + nodeEnv.tmpFile() + ); } else if (RuntimeVersionFeature.isSecurityManagerAvailable()) { // no need to explicitly enable native access for legacy code pluginsLoader = PluginsLoader.createPluginsLoader(modulesBundles, pluginsBundles, Map.of()); From b1dcfb57b1334b03d142a7cf5686e4d7325a2341 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 31 Jan 2025 05:56:14 -0800 Subject: [PATCH 320/383] Always use String getLogger with log4j (#121250) This commit forces the delegate for ES logging to always use the String version of LogManager.getLogger instead of the one taking a Class. The reason is that if a classloader is not in the hierarchy of the app classloader, the ES logging configuration will not be found. By using the String variant, the app classloader is always used. --- .../common/logging/internal/LoggerFactoryImpl.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/common/logging/internal/LoggerFactoryImpl.java b/server/src/main/java/org/elasticsearch/common/logging/internal/LoggerFactoryImpl.java index 6b92f87a9be23..e8354be5ea225 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/internal/LoggerFactoryImpl.java +++ b/server/src/main/java/org/elasticsearch/common/logging/internal/LoggerFactoryImpl.java @@ -22,6 +22,12 @@ public Logger getLogger(String name) { @Override public Logger getLogger(Class clazz) { - return new LoggerImpl(LogManager.getLogger(clazz)); + // Elasticsearch configures logging at the root level, it does not support + // programmatic configuration at the logger level. Log4j's method for + // getting a logger by Class doesn't just use the class name, but also + // scans the classloader hierarchy for programmatic configuration. Here we + // just delegate to use the String class name so that regardless of which + // classloader a class comes from, we will use the root logging config. + return getLogger(clazz.getName()); } } From 2a1b4339822e29665d13b82c3aea08374b77bc56 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Fri, 31 Jan 2025 08:56:36 -0500 Subject: [PATCH 321/383] [ML] Adding fields for Inference service configuration API (#121103) * Adding fields and making some optional * Fixing tests --- .../AlibabaCloudSearchService.java | 2 +- .../amazonbedrock/AmazonBedrockService.java | 15 ++ .../azureaistudio/AzureAiStudioService.java | 15 ++ .../azureopenai/AzureOpenAiService.java | 15 ++ .../services/cohere/CohereService.java | 15 ++ .../googlevertexai/GoogleVertexAiService.java | 2 + .../services/jinaai/JinaAIService.java | 29 ++++ .../services/openai/OpenAiService.java | 19 ++- .../AlibabaCloudSearchServiceTests.java | 2 +- .../AmazonBedrockServiceTests.java | 133 ++++++++++-------- .../AzureAiStudioServiceTests.java | 115 ++++++++------- .../azureopenai/AzureOpenAiServiceTests.java | 9 ++ .../services/cohere/CohereServiceTests.java | 9 ++ .../services/jinaai/JinaAIServiceTests.java | 70 +++++---- .../services/openai/OpenAiServiceTests.java | 19 ++- 15 files changed, 308 insertions(+), 161 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java index 0fd0c281d8bc6..589ca1e033f06 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java @@ -409,7 +409,7 @@ public static InferenceServiceConfiguration get() { HTTP_SCHEMA_NAME, new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("") .setLabel("HTTP Schema") - .setRequired(true) + .setRequired(false) .setSensitive(false) .setUpdatable(false) .setType(SettingsConfigurationFieldType.STRING) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java index e13c668197a8f..493acd3c0cd1a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java @@ -54,6 +54,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -413,6 +414,20 @@ public static InferenceServiceConfiguration get() { .build() ); + configurationMap.put( + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-titan-embed-text.html." + ) + .setLabel("Dimensions") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.INTEGER) + .build() + ); + configurationMap.putAll(AmazonBedrockSecretSettings.Configuration.get()); configurationMap.putAll( RateLimitSettings.toSettingsConfigurationWithDescription( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java index 88d5b54398d06..34a5c2b4cc1e9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java @@ -53,6 +53,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -441,6 +442,20 @@ public static InferenceServiceConfiguration get() { .build() ); + configurationMap.put( + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://learn.microsoft.com/en-us/azure/ai-studio/reference/reference-model-inference-embeddings." + ) + .setLabel("Dimensions") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.INTEGER) + .build() + ); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index 5b622d68f2c25..9a77b63337978 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -50,6 +50,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -382,6 +383,20 @@ public static InferenceServiceConfiguration get() { .build() ); + configurationMap.put( + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#request-body-1." + ) + .setLabel("Dimensions") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.INTEGER) + .build() + ); + configurationMap.putAll(AzureOpenAiSecretSettings.Configuration.get()); configurationMap.putAll( RateLimitSettings.toSettingsConfigurationWithDescription( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 60326a8a34ca3..6c2d3bb96d74d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -25,6 +25,7 @@ import org.elasticsearch.inference.SettingsConfiguration; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; @@ -51,6 +52,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -363,6 +365,19 @@ public static InferenceServiceConfiguration get() { () -> { var configurationMap = new HashMap(); + configurationMap.put( + MODEL_ID, + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( + "The name of the model to use for the inference task." + ) + .setLabel("Model ID") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.STRING) + .build() + ); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java index 55397b2398d39..3e921f669e864 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java @@ -327,6 +327,8 @@ public static InferenceServiceConfiguration get() { () -> { var configurationMap = new HashMap(); + // TODO whether the model ID is required or not depends on the task type + // For rerank it is optional, for text_embedding it is required configurationMap.put( MODEL_ID, new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("ID of the LLM you're using.") diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java index 7ad70fc88054d..37add1e264704 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java @@ -25,6 +25,7 @@ import org.elasticsearch.inference.SettingsConfiguration; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; @@ -49,6 +50,7 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -339,6 +341,33 @@ public static InferenceServiceConfiguration get() { () -> { var configurationMap = new HashMap(); + configurationMap.put( + JinaAIServiceSettings.MODEL_ID, + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( + "The name of the model to use for the inference task." + ) + .setLabel("Model ID") + .setRequired(true) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.STRING) + .build() + ); + + configurationMap.put( + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://api.jina.ai/redoc#tag/embeddings/operation/create_embedding_v1_embeddings_post." + ) + .setLabel("Dimensions") + .setRequired(false) + .setSensitive(false) + .setUpdatable(false) + .setType(SettingsConfigurationFieldType.INTEGER) + .build() + ); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 0ce5bc801b59f..8a420a62d1bce 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -56,8 +56,8 @@ import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; import static org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionCreator.COMPLETION_ERROR_PREFIX; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; -import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -440,19 +440,16 @@ public static InferenceServiceConfiguration get() { ); configurationMap.put( - URL, - new SettingsConfiguration.Builder(SUPPORTED_TASK_TYPES_FOR_SERVICES_API).setDefaultValue( - "https://api.openai.com/v1/chat/completions" + DIMENSIONS, + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "The number of dimensions the resulting embeddings should have. For more information refer to " + + "https://platform.openai.com/docs/api-reference/embeddings/create#embeddings-create-dimensions." ) - .setDescription( - "The OpenAI API endpoint URL. For more information on the URL, refer to the " - + "https://platform.openai.com/docs/api-reference." - ) - .setLabel("URL") - .setRequired(true) + .setLabel("Dimensions") + .setRequired(false) .setSensitive(false) .setUpdatable(false) - .setType(SettingsConfigurationFieldType.STRING) + .setType(SettingsConfigurationFieldType.INTEGER) .build() ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java index 92544d5535acb..1ca50d1887ee1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java @@ -490,7 +490,7 @@ public void testGetConfiguration() throws Exception { "http_schema": { "description": "", "label": "HTTP Schema", - "required": true, + "required": false, "sensitive": false, "updatable": false, "type": "str", diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java index c11d4b4c7923d..ec41388684df1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java @@ -154,69 +154,80 @@ public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOExcepti @SuppressWarnings("checkstyle:LineLength") public void testGetConfiguration() throws Exception { try (var service = createAmazonBedrockService()) { - String content = XContentHelper.stripWhitespace(""" - { - "service": "amazonbedrock", - "name": "Amazon Bedrock", - "task_types": ["text_embedding", "completion"], - "configurations": { - "secret_key": { - "description": "A valid AWS secret key that is paired with the access_key.", - "label": "Secret Key", - "required": true, - "sensitive": true, - "updatable": true, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "provider": { - "description": "The model provider for your deployment.", - "label": "Provider", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "access_key": { - "description": "A valid AWS access key that has permissions to use Amazon Bedrock.", - "label": "Access Key", - "required": true, - "sensitive": true, - "updatable": true, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "model": { - "description": "The base model ID or an ARN to a custom model based on a foundational model.", - "label": "Model", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "rate_limit.requests_per_minute": { - "description": "By default, the amazonbedrock service sets the number of requests allowed per minute to 240.", - "label": "Rate Limit", - "required": false, - "sensitive": false, - "updatable": false, - "type": "int", - "supported_task_types": ["text_embedding", "completion"] - }, - "region": { - "description": "The region that your model or ARN is deployed in.", - "label": "Region", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] + String content = XContentHelper.stripWhitespace( + """ + { + "service": "amazonbedrock", + "name": "Amazon Bedrock", + "task_types": ["text_embedding", "completion"], + "configurations": { + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-titan-embed-text.html.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, + "secret_key": { + "description": "A valid AWS secret key that is paired with the access_key.", + "label": "Secret Key", + "required": true, + "sensitive": true, + "updatable": true, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "provider": { + "description": "The model provider for your deployment.", + "label": "Provider", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "access_key": { + "description": "A valid AWS access key that has permissions to use Amazon Bedrock.", + "label": "Access Key", + "required": true, + "sensitive": true, + "updatable": true, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "model": { + "description": "The base model ID or an ARN to a custom model based on a foundational model.", + "label": "Model", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "rate_limit.requests_per_minute": { + "description": "By default, the amazonbedrock service sets the number of requests allowed per minute to 240.", + "label": "Rate Limit", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding", "completion"] + }, + "region": { + "description": "The region that your model or ARN is deployed in.", + "label": "Region", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + } } } - } - """); + """ + ); InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes( new BytesArray(content), XContentType.JSON diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java index d2e4652b96488..77ed889fc7361 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java @@ -1389,60 +1389,71 @@ public void testInfer_StreamRequest_ErrorResponse() throws Exception { @SuppressWarnings("checkstyle:LineLength") public void testGetConfiguration() throws Exception { try (var service = createService()) { - String content = XContentHelper.stripWhitespace(""" - { - "service": "azureaistudio", - "name": "Azure AI Studio", - "task_types": ["text_embedding", "completion"], - "configurations": { - "endpoint_type": { - "description": "Specifies the type of endpoint that is used in your model deployment.", - "label": "Endpoint Type", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "provider": { - "description": "The model provider for your deployment.", - "label": "Provider", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "api_key": { - "description": "API Key for the provider you're connecting to.", - "label": "API Key", - "required": true, - "sensitive": true, - "updatable": true, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] - }, - "rate_limit.requests_per_minute": { - "description": "Minimize the number of rate limit errors.", - "label": "Rate Limit", - "required": false, - "sensitive": false, - "updatable": false, - "type": "int", - "supported_task_types": ["text_embedding", "completion"] - }, - "target": { - "description": "The target URL of your Azure AI Studio model deployment.", - "label": "Target", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion"] + String content = XContentHelper.stripWhitespace( + """ + { + "service": "azureaistudio", + "name": "Azure AI Studio", + "task_types": ["text_embedding", "completion"], + "configurations": { + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://learn.microsoft.com/en-us/azure/ai-studio/reference/reference-model-inference-embeddings.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, + "endpoint_type": { + "description": "Specifies the type of endpoint that is used in your model deployment.", + "label": "Endpoint Type", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "provider": { + "description": "The model provider for your deployment.", + "label": "Provider", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "api_key": { + "description": "API Key for the provider you're connecting to.", + "label": "API Key", + "required": true, + "sensitive": true, + "updatable": true, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + }, + "rate_limit.requests_per_minute": { + "description": "Minimize the number of rate limit errors.", + "label": "Rate Limit", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding", "completion"] + }, + "target": { + "description": "The target URL of your Azure AI Studio model deployment.", + "label": "Target", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "completion"] + } } } - } - """); + """ + ); InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes( new BytesArray(content), XContentType.JSON diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index 52527d74aad19..3500f11b199af 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -1473,6 +1473,15 @@ public void testGetConfiguration() throws Exception { "type": "str", "supported_task_types": ["text_embedding", "completion"] }, + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#request-body-1.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, "entra_id": { "description": "You must provide either an API key or an Entra ID.", "label": "Entra ID", diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 86b3edc4130da..b1c5e02fb6f51 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -1648,6 +1648,15 @@ public void testGetConfiguration() throws Exception { "type": "str", "supported_task_types": ["text_embedding", "rerank", "completion"] }, + "model_id": { + "description": "The name of the model to use for the inference task.", + "label": "Model ID", + "required": false, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "rerank", "completion"] + }, "rate_limit.requests_per_minute": { "description": "Minimize the number of rate limit errors.", "label": "Rate Limit", diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java index 5fa14da4ba733..2aeb0447f9c78 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java @@ -1831,33 +1831,53 @@ public void testDefaultSimilarity() { @SuppressWarnings("checkstyle:LineLength") public void testGetConfiguration() throws Exception { try (var service = createJinaAIService()) { - String content = XContentHelper.stripWhitespace(""" - { - "service": "jinaai", - "name": "Jina AI", - "task_types": ["text_embedding", "rerank"], - "configurations": { - "api_key": { - "description": "API Key for the provider you're connecting to.", - "label": "API Key", - "required": true, - "sensitive": true, - "updatable": true, - "type": "str", - "supported_task_types": ["text_embedding", "rerank"] - }, - "rate_limit.requests_per_minute": { - "description": "Minimize the number of rate limit errors.", - "label": "Rate Limit", - "required": false, - "sensitive": false, - "updatable": false, - "type": "int", - "supported_task_types": ["text_embedding", "rerank"] + String content = XContentHelper.stripWhitespace( + """ + { + "service": "jinaai", + "name": "Jina AI", + "task_types": ["text_embedding", "rerank"], + "configurations": { + "api_key": { + "description": "API Key for the provider you're connecting to.", + "label": "API Key", + "required": true, + "sensitive": true, + "updatable": true, + "type": "str", + "supported_task_types": ["text_embedding", "rerank"] + }, + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://api.jina.ai/redoc#tag/embeddings/operation/create_embedding_v1_embeddings_post.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, + "model_id": { + "description": "The name of the model to use for the inference task.", + "label": "Model ID", + "required": true, + "sensitive": false, + "updatable": false, + "type": "str", + "supported_task_types": ["text_embedding", "rerank"] + }, + "rate_limit.requests_per_minute": { + "description": "Minimize the number of rate limit errors.", + "label": "Rate Limit", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding", "rerank"] + } } } - } - """); + """ + ); InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes( new BytesArray(content), XContentType.JSON diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 6fddbf4450283..50c028fab28dd 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -1752,6 +1752,15 @@ public void testGetConfiguration() throws Exception { "type": "str", "supported_task_types": ["text_embedding", "completion", "chat_completion"] }, + "dimensions": { + "description": "The number of dimensions the resulting embeddings should have. For more information refer to https://platform.openai.com/docs/api-reference/embeddings/create#embeddings-create-dimensions.", + "label": "Dimensions", + "required": false, + "sensitive": false, + "updatable": false, + "type": "int", + "supported_task_types": ["text_embedding"] + }, "organization_id": { "description": "The unique identifier of your organization.", "label": "Organization ID", @@ -1778,16 +1787,6 @@ public void testGetConfiguration() throws Exception { "updatable": false, "type": "str", "supported_task_types": ["text_embedding", "completion", "chat_completion"] - }, - "url": { - "default_value": "https://api.openai.com/v1/chat/completions", - "description": "The OpenAI API endpoint URL. For more information on the URL, refer to the https://platform.openai.com/docs/api-reference.", - "label": "URL", - "required": true, - "sensitive": false, - "updatable": false, - "type": "str", - "supported_task_types": ["text_embedding", "completion", "chat_completion"] } } } From 9f572a310a6937e7a49fc0b84dbe2cefa4919a1a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 1 Feb 2025 01:05:31 +1100 Subject: [PATCH 322/383] Mute org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT #121407 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index aa43ad37c2871..d7ae2b22f1cb3 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -389,6 +389,8 @@ tests: - class: org.elasticsearch.test.rest.yaml.RcsCcsCommonYamlTestSuiteIT method: test {p0=search.vectors/42_knn_search_int4_flat/KNN Vector similarity search only} issue: https://github.com/elastic/elasticsearch/issues/121395 +- class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT + issue: https://github.com/elastic/elasticsearch/issues/121407 # Examples: # From 4642f1511b2f2513258deb712f6caa4548c6ab72 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Fri, 31 Jan 2025 15:08:15 +0100 Subject: [PATCH 323/383] [Inference API] Wait for assignments to happen in InferenceServiceNodeLocalRateLimitCalculatorTests. (#121379) --- muted-tests.yml | 2 - ...viceNodeLocalRateLimitCalculatorTests.java | 61 +++++++++++-------- 2 files changed, 37 insertions(+), 26 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index d7ae2b22f1cb3..f721b706a29e3 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -360,8 +360,6 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=indices.get_alias/10_basic/Get aliases via /*/_alias/} issue: https://github.com/elastic/elasticsearch/issues/121290 -- class: org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculatorTests - issue: https://github.com/elastic/elasticsearch/issues/121294 - class: org.elasticsearch.env.NodeEnvironmentTests method: testGetBestDowngradeVersion issue: https://github.com/elastic/elasticsearch/issues/121316 diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java index f6bc7e5981411..55026afd48e6c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java @@ -15,10 +15,10 @@ import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService; -import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Set; +import java.util.concurrent.TimeUnit; import static org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator.DEFAULT_MAX_NODES_PER_GROUPING; import static org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator.SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS; @@ -27,38 +27,36 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 0) public class InferenceServiceNodeLocalRateLimitCalculatorTests extends ESIntegTestCase { + private static final Integer RATE_LIMIT_ASSIGNMENT_MAX_WAIT_TIME_IN_SECONDS = 15; + public void setUp() throws Exception { super.setUp(); } - public void testInitialClusterGrouping_Correct() { + public void testInitialClusterGrouping_Correct() throws Exception { // Start with 2-5 nodes var numNodes = randomIntBetween(2, 5); var nodeNames = internalCluster().startNodes(numNodes); ensureStableCluster(numNodes); - RateLimitAssignment firstAssignment = null; + var firstCalculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + waitForRateLimitingAssignments(firstCalculator); - for (String nodeName : nodeNames) { - var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeName); + RateLimitAssignment firstAssignment = firstCalculator.getRateLimitAssignment( + ElasticInferenceService.NAME, + TaskType.SPARSE_EMBEDDING + ); - // Check first node's assignments - if (firstAssignment == null) { - // Get assignment for a specific service (e.g., EIS) - firstAssignment = calculator.getRateLimitAssignment(ElasticInferenceService.NAME, TaskType.SPARSE_EMBEDDING); - - assertNotNull(firstAssignment); - // Verify there are assignments for this service - assertFalse(firstAssignment.responsibleNodes().isEmpty()); - } else { - // Verify other nodes see the same assignment - var currentAssignment = calculator.getRateLimitAssignment(ElasticInferenceService.NAME, TaskType.SPARSE_EMBEDDING); - assertEquals(firstAssignment, currentAssignment); - } + // Verify that all other nodes land on the same assignment + for (String nodeName : nodeNames.subList(1, nodeNames.size())) { + var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeName); + waitForRateLimitingAssignments(calculator); + var currentAssignment = calculator.getRateLimitAssignment(ElasticInferenceService.NAME, TaskType.SPARSE_EMBEDDING); + assertEquals(firstAssignment, currentAssignment); } } - public void testNumberOfNodesPerGroup_Decreases_When_NodeLeavesCluster() throws IOException { + public void testNumberOfNodesPerGroup_Decreases_When_NodeLeavesCluster() throws Exception { // Start with 3-5 nodes var numNodes = randomIntBetween(3, 5); var nodeNames = internalCluster().startNodes(numNodes); @@ -78,6 +76,7 @@ public void testNumberOfNodesPerGroup_Decreases_When_NodeLeavesCluster() throws } var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeLeftInCluster); + waitForRateLimitingAssignments(calculator); Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); @@ -93,13 +92,14 @@ public void testNumberOfNodesPerGroup_Decreases_When_NodeLeavesCluster() throws } } - public void testGrouping_RespectsMaxNodesPerGroupingLimit() { + public void testGrouping_RespectsMaxNodesPerGroupingLimit() throws Exception { // Start with more nodes possible per grouping var numNodes = DEFAULT_MAX_NODES_PER_GROUPING + randomIntBetween(1, 3); var nodeNames = internalCluster().startNodes(numNodes); ensureStableCluster(numNodes); var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + waitForRateLimitingAssignments(calculator); Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); @@ -111,13 +111,14 @@ public void testGrouping_RespectsMaxNodesPerGroupingLimit() { } } - public void testInitialRateLimitsCalculation_Correct() throws IOException { + public void testInitialRateLimitsCalculation_Correct() throws Exception { // Start with max nodes per grouping (=3) int numNodes = DEFAULT_MAX_NODES_PER_GROUPING; var nodeNames = internalCluster().startNodes(numNodes); ensureStableCluster(numNodes); var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + waitForRateLimitingAssignments(calculator); Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); @@ -129,7 +130,7 @@ public void testInitialRateLimitsCalculation_Correct() throws IOException { if ((service instanceof SenderService senderService)) { var sender = senderService.getSender(); - if (sender instanceof HttpRequestSender httpSender) { + if (sender instanceof HttpRequestSender) { var assignment = calculator.getRateLimitAssignment(service.name(), TaskType.SPARSE_EMBEDDING); assertNotNull(assignment); @@ -141,13 +142,14 @@ public void testInitialRateLimitsCalculation_Correct() throws IOException { } } - public void testRateLimits_Decrease_OnNodeJoin() { + public void testRateLimits_Decrease_OnNodeJoin() throws Exception { // Start with 2 nodes var initialNodes = 2; var nodeNames = internalCluster().startNodes(initialNodes); ensureStableCluster(initialNodes); var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + waitForRateLimitingAssignments(calculator); for (var serviceName : SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet()) { var configs = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.get(serviceName); @@ -159,6 +161,7 @@ public void testRateLimits_Decrease_OnNodeJoin() { // Add a new node internalCluster().startNode(); ensureStableCluster(initialNodes + 1); + waitForRateLimitingAssignments(calculator); // Get updated assignments var updatedAssignment = calculator.getRateLimitAssignment(serviceName, config.taskType()); @@ -169,13 +172,14 @@ public void testRateLimits_Decrease_OnNodeJoin() { } } - public void testRateLimits_Increase_OnNodeLeave() throws IOException { + public void testRateLimits_Increase_OnNodeLeave() throws Exception { // Start with max nodes per grouping (=3) int numNodes = DEFAULT_MAX_NODES_PER_GROUPING; var nodeNames = internalCluster().startNodes(numNodes); ensureStableCluster(numNodes); var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + waitForRateLimitingAssignments(calculator); for (var serviceName : SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet()) { var configs = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.get(serviceName); @@ -188,6 +192,7 @@ public void testRateLimits_Increase_OnNodeLeave() throws IOException { var nodeToRemove = nodeNames.get(numNodes - 1); internalCluster().stopNode(nodeToRemove); ensureStableCluster(numNodes - 1); + waitForRateLimitingAssignments(calculator); // Get updated assignments var updatedAssignment = calculator.getRateLimitAssignment(serviceName, config.taskType()); @@ -202,4 +207,12 @@ public void testRateLimits_Increase_OnNodeLeave() throws IOException { protected Collection> nodePlugins() { return Arrays.asList(LocalStateInferencePlugin.class); } + + private void waitForRateLimitingAssignments(InferenceServiceNodeLocalRateLimitCalculator calculator) throws Exception { + assertBusy(() -> { + var assignment = calculator.getRateLimitAssignment(ElasticInferenceService.NAME, TaskType.SPARSE_EMBEDDING); + assertNotNull(assignment); + assertFalse(assignment.responsibleNodes().isEmpty()); + }, RATE_LIMIT_ASSIGNMENT_MAX_WAIT_TIME_IN_SECONDS, TimeUnit.SECONDS); + } } From 3444f91345699c41e66a0f3fe2154327ebe82ec7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 1 Feb 2025 02:02:03 +1100 Subject: [PATCH 324/383] Mute org.elasticsearch.xpack.ml.integration.ClassificationIT testDependentVariableIsAliasToNested #121415 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f721b706a29e3..a326f8d9eac4f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -389,6 +389,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/121395 - class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/121407 +- class: org.elasticsearch.xpack.ml.integration.ClassificationIT + method: testDependentVariableIsAliasToNested + issue: https://github.com/elastic/elasticsearch/issues/121415 # Examples: # From 198a187a8ae931f07750fb6fead48b75a14f8da6 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 31 Jan 2025 07:09:20 -0800 Subject: [PATCH 325/383] Trigger DRA snapshots for 9.x branches as well --- .buildkite/pipelines/intake.template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/pipelines/intake.template.yml b/.buildkite/pipelines/intake.template.yml index d1400bdb83da0..75c7a339b8cea 100644 --- a/.buildkite/pipelines/intake.template.yml +++ b/.buildkite/pipelines/intake.template.yml @@ -96,7 +96,7 @@ steps: - trigger: elasticsearch-dra-workflow label: Trigger DRA snapshot workflow async: true - branches: "main 8.* 7.17" + branches: "main 9.* 8.* 7.17" build: branch: "$BUILDKITE_BRANCH" commit: "$BUILDKITE_COMMIT" From 37c929b9dc3443b16eca3d92d946002703cde709 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 31 Jan 2025 07:09:51 -0800 Subject: [PATCH 326/383] Make entitlement IT tests reflective (#121355) This commit adds an EntitlementTest annotation that can be used on classes containing test actions for entitlements. The annotation mirrors the parameters of CheckAction. Only file check actions are currently converted, the rest can be moved and annotated as followups. Note that the check action name is simply the method name, no fancy name manipulation is done. --- .../entitlement/qa/test/EntitlementTest.java | 29 ++ .../entitlement/qa/test/FileCheckActions.java | 11 + .../qa/test/RestEntitlementsCheckAction.java | 318 ++++++++++-------- 3 files changed, 223 insertions(+), 135 deletions(-) create mode 100644 libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java new file mode 100644 index 0000000000000..953d02bccf1e4 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/EntitlementTest.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +public @interface EntitlementTest { + enum ExpectedAccess { + PLUGINS, + ES_MODULES_ONLY, + ALWAYS_DENIED + } + + ExpectedAccess expectedAccess(); + + int fromJavaVersion() default -1; +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java index 6e15ff4d0cdd1..6d43f58c532c9 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java @@ -22,6 +22,8 @@ import java.nio.file.attribute.UserPrincipal; import java.util.Scanner; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; + @SuppressForbidden(reason = "Explicitly checking APIs that are forbidden") class FileCheckActions { @@ -43,38 +45,47 @@ private static Path readWriteFile() { return testRootDir.resolve("read_write_file"); } + @EntitlementTest(expectedAccess = PLUGINS) static void createScannerFile() throws FileNotFoundException { new Scanner(readFile().toFile()); } + @EntitlementTest(expectedAccess = PLUGINS) static void createScannerFileWithCharset() throws IOException { new Scanner(readFile().toFile(), StandardCharsets.UTF_8); } + @EntitlementTest(expectedAccess = PLUGINS) static void createScannerFileWithCharsetName() throws FileNotFoundException { new Scanner(readFile().toFile(), "UTF-8"); } + @EntitlementTest(expectedAccess = PLUGINS) static void createFileOutputStreamString() throws IOException { new FileOutputStream(readWriteFile().toString()).close(); } + @EntitlementTest(expectedAccess = PLUGINS) static void createFileOutputStreamStringWithAppend() throws IOException { new FileOutputStream(readWriteFile().toString(), false).close(); } + @EntitlementTest(expectedAccess = PLUGINS) static void createFileOutputStreamFile() throws IOException { new FileOutputStream(readWriteFile().toFile()).close(); } + @EntitlementTest(expectedAccess = PLUGINS) static void createFileOutputStreamFileWithAppend() throws IOException { new FileOutputStream(readWriteFile().toFile(), false).close(); } + @EntitlementTest(expectedAccess = PLUGINS) static void filesProbeContentType() throws IOException { Files.probeContentType(readFile()); } + @EntitlementTest(expectedAccess = PLUGINS) static void filesSetOwner() throws IOException { UserPrincipal owner = EntitledActions.getFileOwner(readWriteFile()); Files.setOwner(readWriteFile(), owner); // set to existing owner, just trying to execute the method diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java index 9b8cae1b72d29..dfca49d122673 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java @@ -33,6 +33,9 @@ import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.HttpURLConnection; @@ -51,8 +54,10 @@ import java.net.URLStreamHandler; import java.net.spi.URLStreamHandlerProvider; import java.security.NoSuchAlgorithmException; +import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -61,6 +66,7 @@ import javax.net.ssl.SSLContext; import static java.util.Map.entry; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; import static org.elasticsearch.entitlement.qa.test.RestEntitlementsCheckAction.CheckAction.alwaysDenied; import static org.elasticsearch.entitlement.qa.test.RestEntitlementsCheckAction.CheckAction.deniedToPlugins; import static org.elasticsearch.entitlement.qa.test.RestEntitlementsCheckAction.CheckAction.forPlugins; @@ -88,143 +94,185 @@ static CheckAction alwaysDenied(CheckedRunnable action) { } } - private static final Map checkActions = Stream.>of( - entry("runtime_exit", deniedToPlugins(RestEntitlementsCheckAction::runtimeExit)), - entry("runtime_halt", deniedToPlugins(RestEntitlementsCheckAction::runtimeHalt)), - entry("system_exit", deniedToPlugins(RestEntitlementsCheckAction::systemExit)), - entry("create_classloader", forPlugins(RestEntitlementsCheckAction::createClassLoader)), - entry("processBuilder_start", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_start)), - entry("processBuilder_startPipeline", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_startPipeline)), - entry("set_https_connection_properties", forPlugins(RestEntitlementsCheckAction::setHttpsConnectionProperties)), - entry("set_default_ssl_socket_factory", alwaysDenied(RestEntitlementsCheckAction::setDefaultSSLSocketFactory)), - entry("set_default_hostname_verifier", alwaysDenied(RestEntitlementsCheckAction::setDefaultHostnameVerifier)), - entry("set_default_ssl_context", alwaysDenied(RestEntitlementsCheckAction::setDefaultSSLContext)), - entry("system_setIn", alwaysDenied(RestEntitlementsCheckAction::system$$setIn)), - entry("system_setOut", alwaysDenied(RestEntitlementsCheckAction::system$$setOut)), - entry("system_setErr", alwaysDenied(RestEntitlementsCheckAction::system$$setErr)), - entry("runtime_addShutdownHook", alwaysDenied(RestEntitlementsCheckAction::runtime$addShutdownHook)), - entry("runtime_removeShutdownHook", alwaysDenied(RestEntitlementsCheckAction::runtime$$removeShutdownHook)), - entry( - "thread_setDefaultUncaughtExceptionHandler", - alwaysDenied(RestEntitlementsCheckAction::thread$$setDefaultUncaughtExceptionHandler) + private static final Map checkActions = Stream.concat( + Stream.>of( + entry("runtime_exit", deniedToPlugins(RestEntitlementsCheckAction::runtimeExit)), + entry("runtime_halt", deniedToPlugins(RestEntitlementsCheckAction::runtimeHalt)), + entry("system_exit", deniedToPlugins(RestEntitlementsCheckAction::systemExit)), + entry("create_classloader", forPlugins(RestEntitlementsCheckAction::createClassLoader)), + entry("processBuilder_start", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_start)), + entry("processBuilder_startPipeline", deniedToPlugins(RestEntitlementsCheckAction::processBuilder_startPipeline)), + entry("set_https_connection_properties", forPlugins(RestEntitlementsCheckAction::setHttpsConnectionProperties)), + entry("set_default_ssl_socket_factory", alwaysDenied(RestEntitlementsCheckAction::setDefaultSSLSocketFactory)), + entry("set_default_hostname_verifier", alwaysDenied(RestEntitlementsCheckAction::setDefaultHostnameVerifier)), + entry("set_default_ssl_context", alwaysDenied(RestEntitlementsCheckAction::setDefaultSSLContext)), + entry("system_setIn", alwaysDenied(RestEntitlementsCheckAction::system$$setIn)), + entry("system_setOut", alwaysDenied(RestEntitlementsCheckAction::system$$setOut)), + entry("system_setErr", alwaysDenied(RestEntitlementsCheckAction::system$$setErr)), + entry("runtime_addShutdownHook", alwaysDenied(RestEntitlementsCheckAction::runtime$addShutdownHook)), + entry("runtime_removeShutdownHook", alwaysDenied(RestEntitlementsCheckAction::runtime$$removeShutdownHook)), + entry( + "thread_setDefaultUncaughtExceptionHandler", + alwaysDenied(RestEntitlementsCheckAction::thread$$setDefaultUncaughtExceptionHandler) + ), + entry("localeServiceProvider", alwaysDenied(RestEntitlementsCheckAction::localeServiceProvider$)), + entry("breakIteratorProvider", alwaysDenied(RestEntitlementsCheckAction::breakIteratorProvider$)), + entry("collatorProvider", alwaysDenied(RestEntitlementsCheckAction::collatorProvider$)), + entry("dateFormatProvider", alwaysDenied(RestEntitlementsCheckAction::dateFormatProvider$)), + entry("dateFormatSymbolsProvider", alwaysDenied(RestEntitlementsCheckAction::dateFormatSymbolsProvider$)), + entry("decimalFormatSymbolsProvider", alwaysDenied(RestEntitlementsCheckAction::decimalFormatSymbolsProvider$)), + entry("numberFormatProvider", alwaysDenied(RestEntitlementsCheckAction::numberFormatProvider$)), + entry("calendarDataProvider", alwaysDenied(RestEntitlementsCheckAction::calendarDataProvider$)), + entry("calendarNameProvider", alwaysDenied(RestEntitlementsCheckAction::calendarNameProvider$)), + entry("currencyNameProvider", alwaysDenied(RestEntitlementsCheckAction::currencyNameProvider$)), + entry("localeNameProvider", alwaysDenied(RestEntitlementsCheckAction::localeNameProvider$)), + entry("timeZoneNameProvider", alwaysDenied(RestEntitlementsCheckAction::timeZoneNameProvider$)), + entry("logManager", alwaysDenied(RestEntitlementsCheckAction::logManager$)), + + entry("locale_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultLocale)), + entry("locale_setDefaultForCategory", alwaysDenied(WritePropertiesCheckActions::setDefaultLocaleForCategory)), + entry("timeZone_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultTimeZone)), + + entry("system_setProperty", forPlugins(WritePropertiesCheckActions::setSystemProperty)), + entry("system_clearProperty", forPlugins(WritePropertiesCheckActions::clearSystemProperty)), + entry("system_setSystemProperties", alwaysDenied(WritePropertiesCheckActions::setSystemProperties)), + + // This group is a bit nasty: if entitlements don't prevent these, then networking is + // irreparably borked for the remainder of the test run. + entry( + "datagramSocket_setDatagramSocketImplFactory", + alwaysDenied(RestEntitlementsCheckAction::datagramSocket$$setDatagramSocketImplFactory) + ), + entry("httpURLConnection_setFollowRedirects", alwaysDenied(RestEntitlementsCheckAction::httpURLConnection$$setFollowRedirects)), + entry("serverSocket_setSocketFactory", alwaysDenied(RestEntitlementsCheckAction::serverSocket$$setSocketFactory)), + entry("socket_setSocketImplFactory", alwaysDenied(RestEntitlementsCheckAction::socket$$setSocketImplFactory)), + entry("url_setURLStreamHandlerFactory", alwaysDenied(RestEntitlementsCheckAction::url$$setURLStreamHandlerFactory)), + entry("urlConnection_setFileNameMap", alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setFileNameMap)), + entry( + "urlConnection_setContentHandlerFactory", + alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setContentHandlerFactory) + ), + + entry("proxySelector_setDefault", alwaysDenied(RestEntitlementsCheckAction::setDefaultProxySelector)), + entry("responseCache_setDefault", alwaysDenied(RestEntitlementsCheckAction::setDefaultResponseCache)), + entry( + "createInetAddressResolverProvider", + new CheckAction(VersionSpecificNetworkChecks::createInetAddressResolverProvider, true, 18) + ), + entry("createURLStreamHandlerProvider", alwaysDenied(RestEntitlementsCheckAction::createURLStreamHandlerProvider)), + entry("createURLWithURLStreamHandler", alwaysDenied(RestEntitlementsCheckAction::createURLWithURLStreamHandler)), + entry("createURLWithURLStreamHandler2", alwaysDenied(RestEntitlementsCheckAction::createURLWithURLStreamHandler2)), + entry("datagram_socket_bind", forPlugins(RestEntitlementsCheckAction::bindDatagramSocket)), + entry("datagram_socket_connect", forPlugins(RestEntitlementsCheckAction::connectDatagramSocket)), + entry("datagram_socket_send", forPlugins(RestEntitlementsCheckAction::sendDatagramSocket)), + entry("datagram_socket_receive", forPlugins(RestEntitlementsCheckAction::receiveDatagramSocket)), + entry("datagram_socket_join_group", forPlugins(RestEntitlementsCheckAction::joinGroupDatagramSocket)), + entry("datagram_socket_leave_group", forPlugins(RestEntitlementsCheckAction::leaveGroupDatagramSocket)), + + entry("create_socket_with_proxy", forPlugins(NetworkAccessCheckActions::createSocketWithProxy)), + entry("socket_bind", forPlugins(NetworkAccessCheckActions::socketBind)), + entry("socket_connect", forPlugins(NetworkAccessCheckActions::socketConnect)), + entry("server_socket_bind", forPlugins(NetworkAccessCheckActions::serverSocketBind)), + entry("server_socket_accept", forPlugins(NetworkAccessCheckActions::serverSocketAccept)), + + entry("url_open_connection_proxy", forPlugins(NetworkAccessCheckActions::urlOpenConnectionWithProxy)), + entry("http_client_send", forPlugins(VersionSpecificNetworkChecks::httpClientSend)), + entry("http_client_send_async", forPlugins(VersionSpecificNetworkChecks::httpClientSendAsync)), + entry("create_ldap_cert_store", forPlugins(NetworkAccessCheckActions::createLDAPCertStore)), + + entry("server_socket_channel_bind", forPlugins(NetworkAccessCheckActions::serverSocketChannelBind)), + entry("server_socket_channel_bind_backlog", forPlugins(NetworkAccessCheckActions::serverSocketChannelBindWithBacklog)), + entry("server_socket_channel_accept", forPlugins(NetworkAccessCheckActions::serverSocketChannelAccept)), + entry("asynchronous_server_socket_channel_bind", forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelBind)), + entry( + "asynchronous_server_socket_channel_bind_backlog", + forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelBindWithBacklog) + ), + entry( + "asynchronous_server_socket_channel_accept", + forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelAccept) + ), + entry( + "asynchronous_server_socket_channel_accept_with_handler", + forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelAcceptWithHandler) + ), + entry("socket_channel_bind", forPlugins(NetworkAccessCheckActions::socketChannelBind)), + entry("socket_channel_connect", forPlugins(NetworkAccessCheckActions::socketChannelConnect)), + entry("asynchronous_socket_channel_bind", forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelBind)), + entry("asynchronous_socket_channel_connect", forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelConnect)), + entry( + "asynchronous_socket_channel_connect_with_completion", + forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelConnectWithCompletion) + ), + entry("datagram_channel_bind", forPlugins(NetworkAccessCheckActions::datagramChannelBind)), + entry("datagram_channel_connect", forPlugins(NetworkAccessCheckActions::datagramChannelConnect)), + entry("datagram_channel_send", forPlugins(NetworkAccessCheckActions::datagramChannelSend)), + entry("datagram_channel_receive", forPlugins(NetworkAccessCheckActions::datagramChannelReceive)), + + entry("runtime_load", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoad)), + entry("runtime_load_library", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoadLibrary)), + entry("system_load", forPlugins(LoadNativeLibrariesCheckActions::systemLoad)), + entry("system_load_library", forPlugins(LoadNativeLibrariesCheckActions::systemLoadLibrary)), + entry("enable_native_access", new CheckAction(VersionSpecificNativeChecks::enableNativeAccess, false, 22)), + entry("address_target_layout", new CheckAction(VersionSpecificNativeChecks::addressLayoutWithTargetLayout, false, 22)), + entry("donwncall_handle", new CheckAction(VersionSpecificNativeChecks::linkerDowncallHandle, false, 22)), + entry( + "donwncall_handle_with_address", + new CheckAction(VersionSpecificNativeChecks::linkerDowncallHandleWithAddress, false, 22) + ), + entry("upcall_stub", new CheckAction(VersionSpecificNativeChecks::linkerUpcallStub, false, 22)), + entry("reinterpret", new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpret, false, 22)), + entry("reinterpret_cleanup", new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpretWithCleanup, false, 22)), + entry( + "reinterpret_size_cleanup", + new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpretWithSizeAndCleanup, false, 22) + ), + entry("symbol_lookup_name", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithName, false, 22)), + entry("symbol_lookup_path", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithPath, false, 22)) ), - entry("localeServiceProvider", alwaysDenied(RestEntitlementsCheckAction::localeServiceProvider$)), - entry("breakIteratorProvider", alwaysDenied(RestEntitlementsCheckAction::breakIteratorProvider$)), - entry("collatorProvider", alwaysDenied(RestEntitlementsCheckAction::collatorProvider$)), - entry("dateFormatProvider", alwaysDenied(RestEntitlementsCheckAction::dateFormatProvider$)), - entry("dateFormatSymbolsProvider", alwaysDenied(RestEntitlementsCheckAction::dateFormatSymbolsProvider$)), - entry("decimalFormatSymbolsProvider", alwaysDenied(RestEntitlementsCheckAction::decimalFormatSymbolsProvider$)), - entry("numberFormatProvider", alwaysDenied(RestEntitlementsCheckAction::numberFormatProvider$)), - entry("calendarDataProvider", alwaysDenied(RestEntitlementsCheckAction::calendarDataProvider$)), - entry("calendarNameProvider", alwaysDenied(RestEntitlementsCheckAction::calendarNameProvider$)), - entry("currencyNameProvider", alwaysDenied(RestEntitlementsCheckAction::currencyNameProvider$)), - entry("localeNameProvider", alwaysDenied(RestEntitlementsCheckAction::localeNameProvider$)), - entry("timeZoneNameProvider", alwaysDenied(RestEntitlementsCheckAction::timeZoneNameProvider$)), - entry("logManager", alwaysDenied(RestEntitlementsCheckAction::logManager$)), - - entry("locale_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultLocale)), - entry("locale_setDefaultForCategory", alwaysDenied(WritePropertiesCheckActions::setDefaultLocaleForCategory)), - entry("timeZone_setDefault", alwaysDenied(WritePropertiesCheckActions::setDefaultTimeZone)), - - entry("system_setProperty", forPlugins(WritePropertiesCheckActions::setSystemProperty)), - entry("system_clearProperty", forPlugins(WritePropertiesCheckActions::clearSystemProperty)), - entry("system_setSystemProperties", alwaysDenied(WritePropertiesCheckActions::setSystemProperties)), - - // This group is a bit nasty: if entitlements don't prevent these, then networking is - // irreparably borked for the remainder of the test run. - entry( - "datagramSocket_setDatagramSocketImplFactory", - alwaysDenied(RestEntitlementsCheckAction::datagramSocket$$setDatagramSocketImplFactory) - ), - entry("httpURLConnection_setFollowRedirects", alwaysDenied(RestEntitlementsCheckAction::httpURLConnection$$setFollowRedirects)), - entry("serverSocket_setSocketFactory", alwaysDenied(RestEntitlementsCheckAction::serverSocket$$setSocketFactory)), - entry("socket_setSocketImplFactory", alwaysDenied(RestEntitlementsCheckAction::socket$$setSocketImplFactory)), - entry("url_setURLStreamHandlerFactory", alwaysDenied(RestEntitlementsCheckAction::url$$setURLStreamHandlerFactory)), - entry("urlConnection_setFileNameMap", alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setFileNameMap)), - entry("urlConnection_setContentHandlerFactory", alwaysDenied(RestEntitlementsCheckAction::urlConnection$$setContentHandlerFactory)), - - entry("proxySelector_setDefault", alwaysDenied(RestEntitlementsCheckAction::setDefaultProxySelector)), - entry("responseCache_setDefault", alwaysDenied(RestEntitlementsCheckAction::setDefaultResponseCache)), - entry( - "createInetAddressResolverProvider", - new CheckAction(VersionSpecificNetworkChecks::createInetAddressResolverProvider, true, 18) - ), - entry("createURLStreamHandlerProvider", alwaysDenied(RestEntitlementsCheckAction::createURLStreamHandlerProvider)), - entry("createURLWithURLStreamHandler", alwaysDenied(RestEntitlementsCheckAction::createURLWithURLStreamHandler)), - entry("createURLWithURLStreamHandler2", alwaysDenied(RestEntitlementsCheckAction::createURLWithURLStreamHandler2)), - entry("datagram_socket_bind", forPlugins(RestEntitlementsCheckAction::bindDatagramSocket)), - entry("datagram_socket_connect", forPlugins(RestEntitlementsCheckAction::connectDatagramSocket)), - entry("datagram_socket_send", forPlugins(RestEntitlementsCheckAction::sendDatagramSocket)), - entry("datagram_socket_receive", forPlugins(RestEntitlementsCheckAction::receiveDatagramSocket)), - entry("datagram_socket_join_group", forPlugins(RestEntitlementsCheckAction::joinGroupDatagramSocket)), - entry("datagram_socket_leave_group", forPlugins(RestEntitlementsCheckAction::leaveGroupDatagramSocket)), - - entry("create_socket_with_proxy", forPlugins(NetworkAccessCheckActions::createSocketWithProxy)), - entry("socket_bind", forPlugins(NetworkAccessCheckActions::socketBind)), - entry("socket_connect", forPlugins(NetworkAccessCheckActions::socketConnect)), - entry("server_socket_bind", forPlugins(NetworkAccessCheckActions::serverSocketBind)), - entry("server_socket_accept", forPlugins(NetworkAccessCheckActions::serverSocketAccept)), - - entry("url_open_connection_proxy", forPlugins(NetworkAccessCheckActions::urlOpenConnectionWithProxy)), - entry("http_client_send", forPlugins(VersionSpecificNetworkChecks::httpClientSend)), - entry("http_client_send_async", forPlugins(VersionSpecificNetworkChecks::httpClientSendAsync)), - entry("create_ldap_cert_store", forPlugins(NetworkAccessCheckActions::createLDAPCertStore)), - - entry("server_socket_channel_bind", forPlugins(NetworkAccessCheckActions::serverSocketChannelBind)), - entry("server_socket_channel_bind_backlog", forPlugins(NetworkAccessCheckActions::serverSocketChannelBindWithBacklog)), - entry("server_socket_channel_accept", forPlugins(NetworkAccessCheckActions::serverSocketChannelAccept)), - entry("asynchronous_server_socket_channel_bind", forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelBind)), - entry( - "asynchronous_server_socket_channel_bind_backlog", - forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelBindWithBacklog) - ), - entry("asynchronous_server_socket_channel_accept", forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelAccept)), - entry( - "asynchronous_server_socket_channel_accept_with_handler", - forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelAcceptWithHandler) - ), - entry("socket_channel_bind", forPlugins(NetworkAccessCheckActions::socketChannelBind)), - entry("socket_channel_connect", forPlugins(NetworkAccessCheckActions::socketChannelConnect)), - entry("asynchronous_socket_channel_bind", forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelBind)), - entry("asynchronous_socket_channel_connect", forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelConnect)), - entry( - "asynchronous_socket_channel_connect_with_completion", - forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelConnectWithCompletion) - ), - entry("datagram_channel_bind", forPlugins(NetworkAccessCheckActions::datagramChannelBind)), - entry("datagram_channel_connect", forPlugins(NetworkAccessCheckActions::datagramChannelConnect)), - entry("datagram_channel_send", forPlugins(NetworkAccessCheckActions::datagramChannelSend)), - entry("datagram_channel_receive", forPlugins(NetworkAccessCheckActions::datagramChannelReceive)), - - entry("runtime_load", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoad)), - entry("runtime_load_library", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoadLibrary)), - entry("system_load", forPlugins(LoadNativeLibrariesCheckActions::systemLoad)), - entry("system_load_library", forPlugins(LoadNativeLibrariesCheckActions::systemLoadLibrary)), - entry("enable_native_access", new CheckAction(VersionSpecificNativeChecks::enableNativeAccess, false, 22)), - entry("address_target_layout", new CheckAction(VersionSpecificNativeChecks::addressLayoutWithTargetLayout, false, 22)), - entry("donwncall_handle", new CheckAction(VersionSpecificNativeChecks::linkerDowncallHandle, false, 22)), - entry("donwncall_handle_with_address", new CheckAction(VersionSpecificNativeChecks::linkerDowncallHandleWithAddress, false, 22)), - entry("upcall_stub", new CheckAction(VersionSpecificNativeChecks::linkerUpcallStub, false, 22)), - entry("reinterpret", new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpret, false, 22)), - entry("reinterpret_cleanup", new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpretWithCleanup, false, 22)), - entry( - "reinterpret_size_cleanup", - new CheckAction(VersionSpecificNativeChecks::memorySegmentReinterpretWithSizeAndCleanup, false, 22) - ), - entry("symbol_lookup_name", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithName, false, 22)), - entry("symbol_lookup_path", new CheckAction(VersionSpecificNativeChecks::symbolLookupWithPath, false, 22)), - entry("create_scanner", forPlugins(FileCheckActions::createScannerFile)), - entry("create_scanner_with_charset", forPlugins(FileCheckActions::createScannerFileWithCharset)), - entry("create_scanner_with_charset_name", forPlugins(FileCheckActions::createScannerFileWithCharsetName)), - entry("create_file_output_stream_string", forPlugins(FileCheckActions::createFileOutputStreamString)), - entry("create_file_output_stream_string_with_append", forPlugins(FileCheckActions::createFileOutputStreamStringWithAppend)), - entry("create_file_output_stream_file", forPlugins(FileCheckActions::createFileOutputStreamFile)), - entry("create_file_output_stream_file_with_append", forPlugins(FileCheckActions::createFileOutputStreamFileWithAppend)), - entry("files_probe_content_type", forPlugins(FileCheckActions::filesProbeContentType)), - entry("files_set_owner", forPlugins(FileCheckActions::filesSetOwner)) + getTestEntries(FileCheckActions.class) ) .filter(entry -> entry.getValue().fromJavaVersion() == null || Runtime.version().feature() >= entry.getValue().fromJavaVersion()) - .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); + .collect(Collectors.toUnmodifiableMap(Entry::getKey, Entry::getValue)); + + @SuppressForbidden(reason = "Need package private methods so we don't have to make them all public") + private static Method[] getDeclaredMethods(Class clazz) { + return clazz.getDeclaredMethods(); + } + + private static Stream> getTestEntries(Class actionsClass) { + List> entries = new ArrayList<>(); + for (var method : getDeclaredMethods(actionsClass)) { + var testAnnotation = method.getAnnotation(EntitlementTest.class); + if (testAnnotation == null) { + continue; + } + if (Modifier.isStatic(method.getModifiers()) == false) { + throw new AssertionError("Entitlement test method [" + method + "] must be static"); + } + if (method.getParameterTypes().length != 0) { + throw new AssertionError("Entitlement test method [" + method + "] must not have parameters"); + } + + CheckedRunnable runnable = () -> { + try { + method.invoke(null); + } catch (IllegalAccessException e) { + throw new AssertionError(e); + } catch (InvocationTargetException e) { + if (e.getCause() instanceof Exception exc) { + throw exc; + } else { + throw new AssertionError(e); + } + } + }; + boolean deniedToPlugins = testAnnotation.expectedAccess() == PLUGINS; + Integer fromJavaVersion = testAnnotation.fromJavaVersion() == -1 ? null : testAnnotation.fromJavaVersion(); + entries.add(entry(method.getName(), new CheckAction(runnable, deniedToPlugins, fromJavaVersion))); + } + return entries.stream(); + } private static void createURLStreamHandlerProvider() { var x = new URLStreamHandlerProvider() { @@ -470,7 +518,7 @@ public static Set getCheckActionsAllowedInPlugins() { return checkActions.entrySet() .stream() .filter(kv -> kv.getValue().isAlwaysDeniedToPlugins() == false) - .map(Map.Entry::getKey) + .map(Entry::getKey) .collect(Collectors.toSet()); } From 82b9b56039915133dca8eaa828bb50cd3a8ac890 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 31 Jan 2025 07:11:21 -0800 Subject: [PATCH 327/383] Update buildkite pipeline --- .buildkite/pipelines/intake.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index a207eeafaaae6..dbe6422356e10 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -97,7 +97,7 @@ steps: - trigger: elasticsearch-dra-workflow label: Trigger DRA snapshot workflow async: true - branches: "main 8.* 7.17" + branches: "main 9.* 8.* 7.17" build: branch: "$BUILDKITE_BRANCH" commit: "$BUILDKITE_COMMIT" From 3fafb5f1610cb6702a9643d2df816c1db26e61af Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Fri, 31 Jan 2025 10:20:04 -0500 Subject: [PATCH 328/383] Improve resolve/cluster yaml test (#121315) Updated indices.resolve_cluster.json to match new resolve/cluster spec. Added new test for the no-index-expression endpoint. Adjust syntax in 10_basic_resolve_cluster.yml syntax fix so that the elasticsearch-specification validation tests pass. --- .../api/indices.resolve_cluster.json | 79 ++++++++++--------- .../10_basic_resolve_cluster.yml | 35 +++++--- 2 files changed, 63 insertions(+), 51 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_cluster.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_cluster.json index 8af2dde4f8032..c41233664de0e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_cluster.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.resolve_cluster.json @@ -1,55 +1,56 @@ { - "indices.resolve_cluster":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-resolve-cluster-api.html", - "description":"Resolves the specified index expressions to return information about each cluster, including the local cluster, if included." + "indices.resolve_cluster": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-resolve-cluster-api.html", + "description": "Resolves the specified index expressions to return information about each cluster. If no index expression is provided, this endpoint will return information about all the remote clusters that are configured on the local cluster." }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/json"] + "stability": "stable", + "visibility": "public", + "headers": { + "accept": ["application/json"] }, - "url":{ - "paths":[ + "url": { + "paths": [ { - "path":"/_resolve/cluster/{name}", - "methods":[ - "GET" - ], - "parts":{ - "name":{ - "type":"list", - "description":"A comma-separated list of cluster:index names or wildcard expressions" + "path": "/_resolve/cluster", + "methods": ["GET"] + }, + { + "path": "/_resolve/cluster/{name}", + "methods": ["GET"], + "parts": { + "name": { + "type": "list", + "description": "A comma-separated list of cluster:index names or wildcard expressions" } } } ] }, - "params":{ - "ignore_unavailable":{ - "type":"boolean", - "description":"Whether specified concrete indices should be ignored when unavailable (missing or closed)" + "params": { + "ignore_unavailable": { + "type": "boolean", + "description": "Whether specified concrete indices should be ignored when unavailable (missing or closed). Only allowed when providing an index expression." + }, + "ignore_throttled": { + "type": "boolean", + "description": "Whether specified concrete, expanded or aliased indices should be ignored when throttled. Only allowed when providing an index expression." }, - "ignore_throttled":{ - "type":"boolean", - "description":"Whether specified concrete, expanded or aliased indices should be ignored when throttled" + "allow_no_indices": { + "type": "boolean", + "description": "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified). Only allowed when providing an index expression." }, - "allow_no_indices":{ - "type":"boolean", - "description":"Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" + "expand_wildcards": { + "type": "enum", + "options": ["open", "closed", "hidden", "none", "all"], + "default": "open", + "description": "Whether wildcard expressions should get expanded to open or closed indices (default: open). Only allowed when providing an index expression." }, - "expand_wildcards":{ - "type":"enum", - "options":[ - "open", - "closed", - "hidden", - "none", - "all" - ], - "default":"open", - "description":"Whether wildcard expressions should get expanded to open or closed indices (default: open)" + "timeout": { + "type": "time", + "description": "The maximum time to wait for remote clusters to respond" } } } } + diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.resolve_cluster/10_basic_resolve_cluster.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.resolve_cluster/10_basic_resolve_cluster.yml index 46bd0b8099e4a..0d1d93513aa68 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.resolve_cluster/10_basic_resolve_cluster.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.resolve_cluster/10_basic_resolve_cluster.yml @@ -39,7 +39,7 @@ setup: - do: indices.resolve_cluster: name: '*' - expand_wildcards: [closed] + expand_wildcards: closed - match: {(local).connected: true} - match: {(local).skip_unavailable: false} @@ -65,7 +65,7 @@ setup: - do: indices.resolve_cluster: name: 'index2*' - expand_wildcards: [open,closed] + expand_wildcards: open,closed - match: {(local).connected: true} - match: {(local).skip_unavailable: false} @@ -75,7 +75,7 @@ setup: - do: indices.resolve_cluster: name: 'index2*' - expand_wildcards: [closed] + expand_wildcards: closed - match: {(local).connected: true} - match: {(local).skip_unavailable: false} @@ -115,7 +115,7 @@ setup: - do: indices.resolve_cluster: name: 'my_alias2,doesnotexist*' - expand_wildcards: [all] + expand_wildcards: all - match: {(local).connected: true} - match: {(local).skip_unavailable: false} @@ -141,10 +141,10 @@ setup: - do: indices.resolve_cluster: name: '*' - expand_wildcards: [all] - ignore_unavailable: [true] - ignore_throttled: [true] - allow_no_indices: [true] + expand_wildcards: all + ignore_unavailable: true + ignore_throttled: true + allow_no_indices: true allowed_warnings: - "[ignore_throttled] parameter is deprecated because frozen indices have been deprecated. Consider cold or frozen tiers in place of frozen indices." @@ -157,10 +157,10 @@ setup: - do: indices.resolve_cluster: name: '*' - expand_wildcards: [open] - ignore_unavailable: [false] - ignore_throttled: [false] - allow_no_indices: [false] + expand_wildcards: open + ignore_unavailable: false + ignore_throttled: false + allow_no_indices: false allowed_warnings: - "[ignore_throttled] parameter is deprecated because frozen indices have been deprecated. Consider cold or frozen tiers in place of frozen indices." @@ -170,3 +170,14 @@ setup: - is_false: (local).error # should not be present - exists: (local).version.number +--- +"Resolve cluster with no index expression": + - requires: + cluster_features: ["gte_v8.18.0"] + reason: "resolve cluster with no index expression introduced in 8.18" + + - do: + indices.resolve_cluster: + timeout: 400s + + - is_false: (local).error # should not be present - body should be empty since no remotes configured From f205061e91f72473a8eddb4a10e21cf37fb76b23 Mon Sep 17 00:00:00 2001 From: Patrick Doyle <810052+prdoyle@users.noreply.github.com> Date: Fri, 31 Jan 2025 10:29:53 -0500 Subject: [PATCH 329/383] Reduce duplicate and dead entitlements code (#121409) * Refactor: remove duplicate canWrite methods. This serves as a good example of how Path and File handling could be specialized in the future, but as long as they are identical, the duplication causes more harm than good. * Refactor: just one neverEntitled. The original motivation was to avoid allocating a lambda object on each call, but since that's a highly optimized operation in the JVM, it's unlikely to make a difference in practice, and this smacks of premature optimization. We're pretty liberal about lambdas elsewhere, so let's not sweat it here until we have some evidence that it matters. * Remove dead code --- .../runtime/policy/FileAccessTree.java | 12 ---- .../runtime/policy/PolicyManager.java | 59 +------------------ .../runtime/policy/PolicyManagerTests.java | 8 --- 3 files changed, 3 insertions(+), 76 deletions(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java index c16f776176d88..d574609d13218 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java @@ -9,10 +9,8 @@ package org.elasticsearch.entitlement.runtime.policy; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.runtime.policy.entitlements.FileEntitlement; -import java.io.File; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; @@ -51,20 +49,10 @@ boolean canRead(Path path) { return checkPath(normalize(path), readPaths); } - @SuppressForbidden(reason = "Explicitly checking File apis") - boolean canRead(File file) { - return checkPath(normalize(file.toPath()), readPaths); - } - boolean canWrite(Path path) { return checkPath(normalize(path), writePaths); } - @SuppressForbidden(reason = "Explicitly checking File apis") - boolean canWrite(File file) { - return checkPath(normalize(file.toPath()), writePaths); - } - private static String normalize(Path path) { return path.toAbsolutePath().normalize().toString(); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index 2243d94911ca4..092f5ce8455cb 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -169,23 +169,7 @@ private static void validateEntitlementsPerModule(String sourceName, String modu } public void checkStartProcess(Class callerClass) { - neverEntitled(callerClass, "start process"); - } - - private void neverEntitled(Class callerClass, String operationDescription) { - var requestingClass = requestingClass(callerClass); - if (isTriviallyAllowed(requestingClass)) { - return; - } - - throw new NotEntitledException( - Strings.format( - "Not entitled: caller [%s], module [%s], operation [%s]", - callerClass, - requestingClass.getModule() == null ? "" : requestingClass.getModule().getName(), - operationDescription - ) - ); + neverEntitled(callerClass, () -> "start process"); } /** @@ -241,31 +225,9 @@ public void checkChangeNetworkHandling(Class callerClass) { checkChangeJVMGlobalState(callerClass); } - /** - * Check for operations that can access sensitive network information, e.g. secrets, tokens or SSL sessions - */ - public void checkReadSensitiveNetworkInformation(Class callerClass) { - neverEntitled(callerClass, "access sensitive network information"); - } - @SuppressForbidden(reason = "Explicitly checking File apis") public void checkFileRead(Class callerClass, File file) { - var requestingClass = requestingClass(callerClass); - if (isTriviallyAllowed(requestingClass)) { - return; - } - - ModuleEntitlements entitlements = getEntitlements(requestingClass); - if (entitlements.fileAccess().canRead(file) == false) { - throw new NotEntitledException( - Strings.format( - "Not entitled: caller [%s], module [%s], entitlement [file], operation [read], path [%s]", - callerClass, - requestingClass.getModule(), - file - ) - ); - } + checkFileRead(callerClass, file.toPath()); } public void checkFileRead(Class callerClass, Path path) { @@ -289,22 +251,7 @@ public void checkFileRead(Class callerClass, Path path) { @SuppressForbidden(reason = "Explicitly checking File apis") public void checkFileWrite(Class callerClass, File file) { - var requestingClass = requestingClass(callerClass); - if (isTriviallyAllowed(requestingClass)) { - return; - } - - ModuleEntitlements entitlements = getEntitlements(requestingClass); - if (entitlements.fileAccess().canWrite(file) == false) { - throw new NotEntitledException( - Strings.format( - "Not entitled: caller [%s], module [%s], entitlement [file], operation [write], path [%s]", - callerClass, - requestingClass.getModule(), - file - ) - ); - } + checkFileWrite(callerClass, file.toPath()); } public void checkFileWrite(Class callerClass, Path path) { diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index 3e4896fd714e4..24be0f6f43a4c 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -238,7 +238,6 @@ public void testRequestingClassFastPath() throws IOException, ClassNotFoundExcep } public void testRequestingModuleWithStackWalk() throws IOException, ClassNotFoundException { - var agentsClass = new TestAgent(); var entitlementsClass = makeClassInItsOwnModule(); // A class in the entitlements library itself var requestingClass = makeClassInItsOwnModule(); // This guy is always the right answer var instrumentedClass = makeClassInItsOwnModule(); // The class that called the check method @@ -365,13 +364,6 @@ private static Class makeClassInItsOwnModule() throws IOException, ClassNotFo return layer.findLoader("org.example.plugin").loadClass("q.B"); } - private static Class makeClassInItsOwnUnnamedModule() throws IOException, ClassNotFoundException { - final Path home = createTempDir(); - Path jar = createMockPluginJar(home); - var layer = createLayerForJar(jar, "org.example.plugin"); - return layer.findLoader("org.example.plugin").loadClass("q.B"); - } - private static PolicyManager policyManager(String agentsPackageName, Module entitlementsModule) { return new PolicyManager(createEmptyTestServerPolicy(), List.of(), Map.of(), c -> "test", agentsPackageName, entitlementsModule); } From 0cf00091fddda542d4f38f7cc0d95cfeea6ab26e Mon Sep 17 00:00:00 2001 From: Kathleen DeRusso Date: Fri, 31 Jan 2025 10:36:42 -0500 Subject: [PATCH 330/383] Fix bug where intercepted semantic knn queries did not respect filters (#121410) --- .../xpack/inference/InferenceFeatures.java | 4 +- ...anticKnnVectorQueryRewriteInterceptor.java | 10 ++++- .../test/inference/47_semantic_text_knn.yml | 40 +++++++++++++++++++ 3 files changed, 51 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index 8c2be17777cca..d63e3f773b14b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -15,6 +15,7 @@ import java.util.Set; +import static org.elasticsearch.xpack.inference.queries.SemanticKnnVectorQueryRewriteInterceptor.SEMANTIC_KNN_FILTER_FIX; import static org.elasticsearch.xpack.inference.queries.SemanticKnnVectorQueryRewriteInterceptor.SEMANTIC_KNN_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED; import static org.elasticsearch.xpack.inference.queries.SemanticMatchQueryRewriteInterceptor.SEMANTIC_MATCH_QUERY_REWRITE_INTERCEPTION_SUPPORTED; import static org.elasticsearch.xpack.inference.queries.SemanticSparseVectorQueryRewriteInterceptor.SEMANTIC_SPARSE_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED; @@ -42,7 +43,8 @@ public Set getTestFeatures() { SEMANTIC_KNN_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED, TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_ALIAS_HANDLING_FIX, SemanticInferenceMetadataFieldsMapper.INFERENCE_METADATA_FIELDS_ENABLED_BY_DEFAULT, - SEMANTIC_TEXT_HIGHLIGHTER_DEFAULT + SEMANTIC_TEXT_HIGHLIGHTER_DEFAULT, + SEMANTIC_KNN_FILTER_FIX ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java index 57805d5277ffc..9e513a1ed9226 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticKnnVectorQueryRewriteInterceptor.java @@ -28,6 +28,7 @@ public class SemanticKnnVectorQueryRewriteInterceptor extends SemanticQueryRewri public static final NodeFeature SEMANTIC_KNN_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED = new NodeFeature( "search.semantic_knn_vector_query_rewrite_interception_supported" ); + public static final NodeFeature SEMANTIC_KNN_FILTER_FIX = new NodeFeature("search.semantic_knn_filter_fix"); public SemanticKnnVectorQueryRewriteInterceptor() {} @@ -147,6 +148,7 @@ private KnnVectorQueryBuilder addIndexFilterToKnnVectorQuery(Collection ); } + copy.addFilterQueries(original.filterQueries()); copy.addFilterQuery(new TermsQueryBuilder(IndexFieldMapper.NAME, indices)); return copy; } @@ -165,8 +167,9 @@ private KnnVectorQueryBuilder buildNewKnnVectorQuery( KnnVectorQueryBuilder original, QueryVectorBuilder queryVectorBuilder ) { + KnnVectorQueryBuilder newQueryBuilder; if (original.queryVectorBuilder() != null) { - return new KnnVectorQueryBuilder( + newQueryBuilder = new KnnVectorQueryBuilder( fieldName, queryVectorBuilder, original.k(), @@ -174,7 +177,7 @@ private KnnVectorQueryBuilder buildNewKnnVectorQuery( original.getVectorSimilarity() ); } else { - return new KnnVectorQueryBuilder( + newQueryBuilder = new KnnVectorQueryBuilder( fieldName, original.queryVector(), original.k(), @@ -183,6 +186,9 @@ private KnnVectorQueryBuilder buildNewKnnVectorQuery( original.getVectorSimilarity() ); } + + newQueryBuilder.addFilterQueries(original.filterQueries()); + return newQueryBuilder; } @Override diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml index dec4e127e501c..64ecb0f2d882c 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/47_semantic_text_knn.yml @@ -43,6 +43,8 @@ setup: body: mappings: properties: + keyword_field: + type: keyword inference_field: type: semantic_text inference_id: dense-inference-id @@ -53,6 +55,8 @@ setup: body: mappings: properties: + keyword_field: + type: keyword inference_field: type: semantic_text inference_id: dense-inference-id-2 @@ -63,6 +67,8 @@ setup: body: mappings: properties: + keyword_field: + type: keyword inference_field: type: dense_vector dims: 10 @@ -74,6 +80,8 @@ setup: body: mappings: properties: + keyword_field: + type: keyword inference_field: type: dense_vector dims: 3 @@ -84,6 +92,7 @@ setup: index: test-semantic-text-index id: doc_1 body: + keyword_field: "foo" inference_field: [ "inference test", "another inference test" ] refresh: true @@ -92,6 +101,7 @@ setup: index: test-semantic-text-index-2 id: doc_2 body: + keyword_field: "bar" inference_field: [ "inference test", "another inference test" ] refresh: true @@ -100,6 +110,7 @@ setup: index: test-dense-vector-index id: doc_3 body: + keyword_field: "baz" inference_field: [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ] refresh: true @@ -108,6 +119,7 @@ setup: index: test-incompatible-dense-vector-index id: doc_4 body: + keyword_field: "qux" inference_field: [ 1, 2, 3 ] refresh: true @@ -311,6 +323,34 @@ setup: - match: { hits.total.value: 2 } +--- +"knn query respects filters": + - requires: + cluster_features: "search.semantic_knn_filter_fix" + reason: filters fixed in 8.18.0 + + - do: + search: + index: + - test-semantic-text-index + - test-semantic-text-index-2 + body: + query: + knn: + field: inference_field + k: 10 + num_candidates: 100 + query_vector_builder: + text_embedding: + model_text: test + filter: + term: + keyword_field: "foo" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + + --- "knn query against multiple semantic_text fields with multiple inference IDs specified in semantic_text fields with smaller k returns k for each index": From 0c787bd8f01aceda6299df8eb827f3ef787665b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Fern=C3=A1ndez=20Casta=C3=B1o?= Date: Fri, 31 Jan 2025 16:50:55 +0100 Subject: [PATCH 331/383] Rename IndexShardRoutingTable unpromotable related methods (#121155) --- .../unpromotable/TransportBroadcastUnpromotableAction.java | 2 +- .../action/support/replication/PostWriteRefresh.java | 2 +- .../cluster/routing/IndexShardRoutingTable.java | 4 ++-- .../TransportBroadcastUnpromotableActionTests.java | 2 +- .../action/support/replication/PostWriteRefreshTests.java | 6 +++--- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableAction.java index 1255dbdf7419d..c44a0118111f3 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableAction.java @@ -76,7 +76,7 @@ protected TransportBroadcastUnpromotableAction( @Override protected void doExecute(Task task, Request request, ActionListener listener) { - final var unpromotableShards = request.indexShardRoutingTable.unpromotableShards(); + final var unpromotableShards = request.indexShardRoutingTable.assignedUnpromotableShards(); final var responses = new ArrayList(unpromotableShards.size()); try (var listeners = new RefCountingListener(listener.map(v -> combineUnpromotableShardResponses(responses)))) { diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java index 997d859ec35a2..cefb27376f9ea 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java @@ -65,7 +65,7 @@ public void onFailure(Exception e) { } }); case IMMEDIATE -> immediate(indexShard, listener.delegateFailureAndWrap((l, r) -> { - if (indexShard.getReplicationGroup().getRoutingTable().allUnpromotableShards().size() > 0) { + if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0) { sendUnpromotableRequests(indexShard, r.generation(), true, l, postWriteRefreshTimeout); } else { l.onResponse(true); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java index 74c2c1d14b77c..14a512a206775 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java @@ -193,7 +193,7 @@ public List assignedShards() { * * @return a {@link List} of shards */ - public List unpromotableShards() { + public List assignedUnpromotableShards() { return this.assignedUnpromotableShards; } @@ -202,7 +202,7 @@ public List unpromotableShards() { * * @return a {@link List} of shards */ - public List allUnpromotableShards() { + public List unpromotableShards() { return this.unpromotableShards; } diff --git a/server/src/test/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableActionTests.java b/server/src/test/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableActionTests.java index ed12a3fda6eec..f5152dfbe7a5e 100644 --- a/server/src/test/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/broadcast/unpromotable/TransportBroadcastUnpromotableActionTests.java @@ -333,7 +333,7 @@ public void testInvalidNodes() throws Exception { // We were able to mark shards as stale, so the request finishes successfully assertThat(safeAwait(broadcastUnpromotableRequest(wrongRoutingTable, true)), equalTo(ActionResponse.Empty.INSTANCE)); - for (var shardRouting : wrongRoutingTable.unpromotableShards()) { + for (var shardRouting : wrongRoutingTable.assignedUnpromotableShards()) { Mockito.verify(shardStateAction) .remoteShardFailed( eq(shardRouting.shardId()), diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/PostWriteRefreshTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/PostWriteRefreshTests.java index 9897fe9a42547..4337d4c3d9e99 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/PostWriteRefreshTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/PostWriteRefreshTests.java @@ -162,7 +162,7 @@ public void testPrimaryWithUnpromotables() throws IOException { new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "message"), ShardRouting.Role.SEARCH_ONLY ); - when(routingTable.allUnpromotableShards()).thenReturn(List.of(shardRouting)); + when(routingTable.unpromotableShards()).thenReturn(List.of(shardRouting)); when(routingTable.shardId()).thenReturn(shardId); WriteRequest.RefreshPolicy policy = randomFrom(WriteRequest.RefreshPolicy.IMMEDIATE, WriteRequest.RefreshPolicy.WAIT_UNTIL); postWriteRefresh.refreshShard(policy, primary, result.getTranslogLocation(), f, postWriteRefreshTimeout); @@ -238,9 +238,9 @@ public void testWaitForWithNullLocationCompletedImmediately() throws IOException ); // Randomly test scenarios with and without unpromotables if (randomBoolean()) { - when(routingTable.allUnpromotableShards()).thenReturn(Collections.emptyList()); + when(routingTable.unpromotableShards()).thenReturn(Collections.emptyList()); } else { - when(routingTable.allUnpromotableShards()).thenReturn(List.of(shardRouting)); + when(routingTable.unpromotableShards()).thenReturn(List.of(shardRouting)); } WriteRequest.RefreshPolicy policy = WriteRequest.RefreshPolicy.WAIT_UNTIL; postWriteRefresh.refreshShard(policy, primary, null, f, postWriteRefreshTimeout); From a4455d42e1f32c5de550f29055c9b22cf9f56e03 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 31 Jan 2025 16:55:14 +0100 Subject: [PATCH 332/383] Remove AwaitsFix for #99929 (#118147) This should be long fixed but we forgot the AwaitsFix. closes #99929 --- .../java/org/elasticsearch/search/SearchCancellationIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index 5f8bc57dcbe09..0cc1c89b36d19 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -238,7 +238,6 @@ public void testCancelMultiSearch() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99929") public void testCancelFailedSearchWhenPartialResultDisallowed() throws Exception { // Have at least two nodes so that we have parallel execution of two request guaranteed even if max concurrent requests per node // are limited to 1 From 2993998b3277a7fe59aa8d6d9cae9c8e80d8fd7d Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Fri, 31 Jan 2025 17:18:51 +0100 Subject: [PATCH 333/383] [Inference API] Remove second calculator instance as component and update tests (#121284) --- .../xpack/inference/InferencePlugin.java | 1 - ...viceNodeLocalRateLimitCalculatorTests.java | 37 +++++++++++++++---- 2 files changed, 30 insertions(+), 8 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 6f302f944c005..23df62caab430 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -331,7 +331,6 @@ public Collection createComponents(PluginServices services) { // Add binding for interface -> implementation components.add(new PluginComponentBinding<>(InferenceServiceRateLimitCalculator.class, calculator)); - components.add(calculator); return components; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java index 55026afd48e6c..8e25931d9a8e3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/InferenceServiceNodeLocalRateLimitCalculatorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.services.SenderService; @@ -23,6 +24,7 @@ import static org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator.DEFAULT_MAX_NODES_PER_GROUPING; import static org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculator.SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 0) public class InferenceServiceNodeLocalRateLimitCalculatorTests extends ESIntegTestCase { @@ -39,7 +41,7 @@ public void testInitialClusterGrouping_Correct() throws Exception { var nodeNames = internalCluster().startNodes(numNodes); ensureStableCluster(numNodes); - var firstCalculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + var firstCalculator = getCalculatorInstance(internalCluster(), nodeNames.getFirst()); waitForRateLimitingAssignments(firstCalculator); RateLimitAssignment firstAssignment = firstCalculator.getRateLimitAssignment( @@ -49,7 +51,7 @@ public void testInitialClusterGrouping_Correct() throws Exception { // Verify that all other nodes land on the same assignment for (String nodeName : nodeNames.subList(1, nodeNames.size())) { - var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeName); + var calculator = getCalculatorInstance(internalCluster(), nodeName); waitForRateLimitingAssignments(calculator); var currentAssignment = calculator.getRateLimitAssignment(ElasticInferenceService.NAME, TaskType.SPARSE_EMBEDDING); assertEquals(firstAssignment, currentAssignment); @@ -75,7 +77,7 @@ public void testNumberOfNodesPerGroup_Decreases_When_NodeLeavesCluster() throws ensureStableCluster(currentNumberOfNodes); } - var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeLeftInCluster); + var calculator = getCalculatorInstance(internalCluster(), nodeLeftInCluster); waitForRateLimitingAssignments(calculator); Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); @@ -98,7 +100,7 @@ public void testGrouping_RespectsMaxNodesPerGroupingLimit() throws Exception { var nodeNames = internalCluster().startNodes(numNodes); ensureStableCluster(numNodes); - var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + var calculator = getCalculatorInstance(internalCluster(), nodeNames.getFirst()); waitForRateLimitingAssignments(calculator); Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); @@ -117,7 +119,7 @@ public void testInitialRateLimitsCalculation_Correct() throws Exception { var nodeNames = internalCluster().startNodes(numNodes); ensureStableCluster(numNodes); - var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + var calculator = getCalculatorInstance(internalCluster(), nodeNames.getFirst()); waitForRateLimitingAssignments(calculator); Set supportedServices = SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet(); @@ -148,7 +150,7 @@ public void testRateLimits_Decrease_OnNodeJoin() throws Exception { var nodeNames = internalCluster().startNodes(initialNodes); ensureStableCluster(initialNodes); - var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + var calculator = getCalculatorInstance(internalCluster(), nodeNames.getFirst()); waitForRateLimitingAssignments(calculator); for (var serviceName : SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet()) { @@ -178,7 +180,7 @@ public void testRateLimits_Increase_OnNodeLeave() throws Exception { var nodeNames = internalCluster().startNodes(numNodes); ensureStableCluster(numNodes); - var calculator = internalCluster().getInstance(InferenceServiceNodeLocalRateLimitCalculator.class, nodeNames.getFirst()); + var calculator = getCalculatorInstance(internalCluster(), nodeNames.getFirst()); waitForRateLimitingAssignments(calculator); for (var serviceName : SERVICE_NODE_LOCAL_RATE_LIMIT_CONFIGS.keySet()) { @@ -208,6 +210,27 @@ protected Collection> nodePlugins() { return Arrays.asList(LocalStateInferencePlugin.class); } + private InferenceServiceNodeLocalRateLimitCalculator getCalculatorInstance(InternalTestCluster internalTestCluster, String nodeName) { + InferenceServiceRateLimitCalculator calculatorInstance = internalTestCluster.getInstance( + InferenceServiceRateLimitCalculator.class, + nodeName + ); + assertThat( + "[" + + InferenceServiceNodeLocalRateLimitCalculatorTests.class.getName() + + "] should use [" + + InferenceServiceNodeLocalRateLimitCalculator.class.getName() + + "] as implementation for [" + + InferenceServiceRateLimitCalculator.class.getName() + + "]. Provided implementation was [" + + calculatorInstance.getClass().getName() + + "].", + calculatorInstance, + instanceOf(InferenceServiceNodeLocalRateLimitCalculator.class) + ); + return (InferenceServiceNodeLocalRateLimitCalculator) calculatorInstance; + } + private void waitForRateLimitingAssignments(InferenceServiceNodeLocalRateLimitCalculator calculator) throws Exception { assertBusy(() -> { var assignment = calculator.getRateLimitAssignment(ElasticInferenceService.NAME, TaskType.SPARSE_EMBEDDING); From d3a8a4ba04a427ee674b1ff27d1563e0024ec211 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 31 Jan 2025 16:42:27 +0000 Subject: [PATCH 334/383] [ML] Fix infer on and elasticsearch service endpoint created with a deployment id (#121428) Fixes a bug where the deployment Id was lost creating the text embedding model configuration --- docs/changelog/121428.yaml | 6 +++ .../inference/CreateFromDeploymentIT.java | 27 ++++++++++ .../CustomElandInternalServiceSettings.java | 10 ---- ...dInternalTextEmbeddingServiceSettings.java | 26 +++------ .../ElasticRerankerServiceSettings.java | 10 +++- .../ElasticsearchInternalService.java | 18 ++----- .../ElasticsearchInternalServiceSettings.java | 16 +----- .../ElserInternalServiceSettings.java | 35 +++++------- ...lingualE5SmallInternalServiceSettings.java | 16 +++++- ...rnalTextEmbeddingServiceSettingsTests.java | 6 +++ ...ticsearchInternalServiceSettingsTests.java | 2 +- .../ElasticsearchInternalServiceTests.java | 54 ++++++++++++++----- .../ElserInternalModelTests.java | 2 +- .../ElserInternalServiceSettingsTests.java | 7 ++- 14 files changed, 133 insertions(+), 102 deletions(-) create mode 100644 docs/changelog/121428.yaml diff --git a/docs/changelog/121428.yaml b/docs/changelog/121428.yaml new file mode 100644 index 0000000000000..b0a31b2399611 --- /dev/null +++ b/docs/changelog/121428.yaml @@ -0,0 +1,6 @@ +pr: 121428 +summary: Fix infer on and elasticsearch service endpoint created with a deployment + id +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java index e5eda9a71b472..47f34fa486daf 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/CreateFromDeploymentIT.java @@ -40,6 +40,14 @@ public void testAttachToDeployment() throws IOException { is(Map.of("num_allocations", 1, "num_threads", 1, "model_id", "attach_to_deployment", "deployment_id", "existing_deployment")) ); + var getModel = getModel(inferenceId); + serviceSettings = getModel.get("service_settings"); + assertThat( + getModel.toString(), + serviceSettings, + is(Map.of("num_allocations", 1, "num_threads", 1, "model_id", "attach_to_deployment", "deployment_id", "existing_deployment")) + ); + var results = infer(inferenceId, List.of("washing machine")); assertNotNull(results.get("sparse_embedding")); @@ -98,6 +106,25 @@ public void testAttachWithModelId() throws IOException { ) ); + var getModel = getModel(inferenceId); + serviceSettings = getModel.get("service_settings"); + assertThat( + getModel.toString(), + serviceSettings, + is( + Map.of( + "num_allocations", + 1, + "num_threads", + 1, + "model_id", + "attach_with_model_id", + "deployment_id", + "existing_deployment_with_model_id" + ) + ) + ); + var results = infer(inferenceId, List.of("washing machine")); assertNotNull(results.get("sparse_embedding")); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java index 3cc7e0c6c2b53..4591418419ded 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import java.io.IOException; @@ -20,15 +19,6 @@ public CustomElandInternalServiceSettings(ElasticsearchInternalServiceSettings o super(other); } - public CustomElandInternalServiceSettings( - Integer numAllocations, - int numThreads, - String modelId, - AdaptiveAllocationsSettings adaptiveAllocationsSettings - ) { - super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); - } - public CustomElandInternalServiceSettings(StreamInput in) throws IOException { super(in); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java index 133be5e2b7623..d5f4143b65d36 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettings.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.SimilarityMeasure; @@ -105,33 +106,17 @@ private static CommonFields commonFieldsFromMap(Map map, Validat private final SimilarityMeasure similarityMeasure; private final DenseVectorFieldMapper.ElementType elementType; - public CustomElandInternalTextEmbeddingServiceSettings( - int numAllocations, - int numThreads, - String modelId, - AdaptiveAllocationsSettings adaptiveAllocationsSettings - ) { - this( - numAllocations, - numThreads, - modelId, - adaptiveAllocationsSettings, - null, - SimilarityMeasure.COSINE, - DenseVectorFieldMapper.ElementType.FLOAT - ); - } - - public CustomElandInternalTextEmbeddingServiceSettings( + CustomElandInternalTextEmbeddingServiceSettings( int numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings, + @Nullable String deploymentId, Integer dimensions, SimilarityMeasure similarityMeasure, DenseVectorFieldMapper.ElementType elementType ) { - super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, deploymentId); this.dimensions = dimensions; this.similarityMeasure = Objects.requireNonNull(similarityMeasure); this.elementType = Objects.requireNonNull(elementType); @@ -159,7 +144,8 @@ private CustomElandInternalTextEmbeddingServiceSettings(CommonFields commonField commonFields.internalServiceSettings.getNumAllocations(), commonFields.internalServiceSettings.getNumThreads(), commonFields.internalServiceSettings.modelId(), - commonFields.internalServiceSettings.getAdaptiveAllocationsSettings() + commonFields.internalServiceSettings.getAdaptiveAllocationsSettings(), + commonFields.internalServiceSettings.getDeploymentId() ); this.dimensions = dimensions; similarityMeasure = commonFields.similarityMeasure; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java index 316dc092e03c7..2b7904e615682 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java @@ -14,21 +14,27 @@ import java.io.IOException; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.RERANKER_ID; + public class ElasticRerankerServiceSettings extends ElasticsearchInternalServiceSettings { public static final String NAME = "elastic_reranker_service_settings"; + public static ElasticRerankerServiceSettings defaultEndpointSettings() { + return new ElasticRerankerServiceSettings(null, 1, RERANKER_ID, new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32)); + } + public ElasticRerankerServiceSettings(ElasticsearchInternalServiceSettings other) { super(other); } - public ElasticRerankerServiceSettings( + private ElasticRerankerServiceSettings( Integer numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings ) { - super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, null); } public ElasticRerankerServiceSettings(StreamInput in) throws IOException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 9dfa21a323c33..9a150a5e13778 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -39,7 +39,6 @@ import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; -import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentStats; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; @@ -562,6 +561,7 @@ private static CustomElandEmbeddingModel updateModelWithEmbeddingDetails(CustomE model.getServiceSettings().getNumThreads(), model.getServiceSettings().modelId(), model.getServiceSettings().getAdaptiveAllocationsSettings(), + model.getServiceSettings().getDeploymentId(), embeddingSize, model.getServiceSettings().similarity(), model.getServiceSettings().elementType() @@ -903,12 +903,7 @@ private List defaultConfigs(boolean useLinuxOptimizedModel) { DEFAULT_ELSER_ID, TaskType.SPARSE_EMBEDDING, NAME, - new ElserInternalServiceSettings( - null, - 1, - useLinuxOptimizedModel ? ELSER_V2_MODEL_LINUX_X86 : ELSER_V2_MODEL, - new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) - ), + ElserInternalServiceSettings.defaultEndpointSettings(useLinuxOptimizedModel), ElserMlNodeTaskSettings.DEFAULT, ChunkingSettingsBuilder.DEFAULT_SETTINGS ); @@ -916,19 +911,14 @@ private List defaultConfigs(boolean useLinuxOptimizedModel) { DEFAULT_E5_ID, TaskType.TEXT_EMBEDDING, NAME, - new MultilingualE5SmallInternalServiceSettings( - null, - 1, - useLinuxOptimizedModel ? MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 : MULTILINGUAL_E5_SMALL_MODEL_ID, - new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) - ), + MultilingualE5SmallInternalServiceSettings.defaultEndpointSettings(useLinuxOptimizedModel), ChunkingSettingsBuilder.DEFAULT_SETTINGS ); var defaultRerank = new ElasticRerankerModel( DEFAULT_RERANK_ID, TaskType.RERANK, NAME, - new ElasticRerankerServiceSettings(null, 1, RERANKER_ID, new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32)), + ElasticRerankerServiceSettings.defaultEndpointSettings(), RerankTaskSettings.DEFAULT_SETTINGS ); return List.of(defaultElser, defaultE5, defaultRerank); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index 244108edc3dd4..98730f33d10f9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.ToXContentObject; @@ -108,25 +109,12 @@ protected static ElasticsearchInternalServiceSettings.Builder fromMap( .setDeploymentId(deploymentId); } - public ElasticsearchInternalServiceSettings( - Integer numAllocations, - int numThreads, - String modelId, - AdaptiveAllocationsSettings adaptiveAllocationsSettings - ) { - this.numAllocations = numAllocations; - this.numThreads = numThreads; - this.modelId = Objects.requireNonNull(modelId); - this.adaptiveAllocationsSettings = adaptiveAllocationsSettings; - this.deploymentId = null; - } - public ElasticsearchInternalServiceSettings( Integer numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings, - String deploymentId + @Nullable String deploymentId ) { this.numAllocations = numAllocations; this.numThreads = numThreads; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java index da9164bf3f288..b94b9feb8a049 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettings.java @@ -9,14 +9,14 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.inference.MinimalServiceSettings; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import java.io.IOException; -import java.util.Arrays; -import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels.ELSER_V2_MODEL; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElserModels.ELSER_V2_MODEL_LINUX_X86; public class ElserInternalServiceSettings extends ElasticsearchInternalServiceSettings { @@ -26,37 +26,26 @@ public static MinimalServiceSettings minimalServiceSettings() { return MinimalServiceSettings.sparseEmbedding(); } - public static Builder fromRequestMap(Map map) { - ValidationException validationException = new ValidationException(); - var baseSettings = ElasticsearchInternalServiceSettings.fromMap(map, validationException); - - String modelId = baseSettings.getModelId(); - if (modelId != null && ElserModels.isValidModel(modelId) == false) { - var ve = new ValidationException(); - ve.addValidationError( - "Unknown ELSER model ID [" + modelId + "]. Valid models are " + Arrays.toString(ElserModels.VALID_ELSER_MODEL_IDS.toArray()) - ); - throw ve; - } - - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; - } - - return baseSettings; + public static ElserInternalServiceSettings defaultEndpointSettings(boolean useLinuxOptimizedModel) { + return new ElserInternalServiceSettings( + null, + 1, + useLinuxOptimizedModel ? ELSER_V2_MODEL_LINUX_X86 : ELSER_V2_MODEL, + new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) + ); } public ElserInternalServiceSettings(ElasticsearchInternalServiceSettings other) { super(other); } - public ElserInternalServiceSettings( + private ElserInternalServiceSettings( Integer numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings ) { - this(new ElasticsearchInternalServiceSettings(numAllocations, numThreads, modelId, adaptiveAllocationsSettings)); + this(new ElasticsearchInternalServiceSettings(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, null)); } public ElserInternalServiceSettings(StreamInput in) throws IOException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java index 317cc48172fca..45d52d3c8deaa 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java @@ -18,6 +18,9 @@ import java.util.Arrays; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86; + public class MultilingualE5SmallInternalServiceSettings extends ElasticsearchInternalServiceSettings { public static final String NAME = "multilingual_e5_small_service_settings"; @@ -29,17 +32,26 @@ public static MinimalServiceSettings minimalServiceSettings() { return MinimalServiceSettings.textEmbedding(DIMENSIONS, SIMILARITY, DenseVectorFieldMapper.ElementType.FLOAT); } + public static MultilingualE5SmallInternalServiceSettings defaultEndpointSettings(boolean useLinuxOptimizedModel) { + return new MultilingualE5SmallInternalServiceSettings( + null, + 1, + useLinuxOptimizedModel ? MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 : MULTILINGUAL_E5_SMALL_MODEL_ID, + new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) + ); + } + public MultilingualE5SmallInternalServiceSettings(ElasticsearchInternalServiceSettings other) { super(other); } - public MultilingualE5SmallInternalServiceSettings( + MultilingualE5SmallInternalServiceSettings( Integer numAllocations, int numThreads, String modelId, AdaptiveAllocationsSettings adaptiveAllocationsSettings ) { - super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, null); } public MultilingualE5SmallInternalServiceSettings(StreamInput in) throws IOException { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettingsTests.java index ebb9c964e4c9a..3b8ce3a7cc64c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalTextEmbeddingServiceSettingsTests.java @@ -48,6 +48,7 @@ public static CustomElandInternalTextEmbeddingServiceSettings createRandom() { numThreads, modelId, null, + null, dims, similarityMeasure, elementType @@ -86,6 +87,7 @@ public void testFromMap_Request_CreatesSettingsCorrectly() { modelId, null, null, + null, SimilarityMeasure.DOT_PRODUCT, DenseVectorFieldMapper.ElementType.FLOAT ) @@ -111,6 +113,7 @@ public void testFromMap_Request_DoesNotDefaultSimilarityElementType() { modelId, null, null, + null, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT ) @@ -152,6 +155,7 @@ public void testFromMap_Request_IgnoresDimensions() { modelId, null, null, + null, SimilarityMeasure.DOT_PRODUCT, DenseVectorFieldMapper.ElementType.FLOAT ) @@ -192,6 +196,7 @@ public void testFromMap_Persistent_CreatesSettingsCorrectly() { numThreads, modelId, null, + null, 1, SimilarityMeasure.DOT_PRODUCT, DenseVectorFieldMapper.ElementType.FLOAT @@ -206,6 +211,7 @@ public void testToXContent_WritesAllValues() throws IOException { 1, "model_id", null, + null, 100, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.BYTE diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java index 0db0a7669c8aa..4ec575420613f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettingsTests.java @@ -103,7 +103,7 @@ public void testFromMap() { ) ) ).build(); - assertEquals(new ElasticsearchInternalServiceSettings(1, 4, ".elser_model_1", null), serviceSettings); + assertEquals(new ElasticsearchInternalServiceSettings(1, 4, ".elser_model_1", null, null), serviceSettings); } public void testFromMapMissingOptions() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 3b634f45dc751..1615d46b349ea 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -351,7 +351,9 @@ public void testParseRequestConfig_elser() { ) ); - var elserServiceSettings = new ElserInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null); + var elserServiceSettings = new ElserInternalServiceSettings( + new ElasticsearchInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null, null) + ); service.parseRequestConfig( randomInferenceEntityId, @@ -381,7 +383,9 @@ public void testParseRequestConfig_elser() { ) ); - var elserServiceSettings = new ElserInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null); + var elserServiceSettings = new ElserInternalServiceSettings( + new ElasticsearchInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null, null) + ); String criticalWarning = "Putting elasticsearch service inference endpoints (including elser service) without a model_id field is" @@ -450,7 +454,9 @@ public void testParseRequestConfig_elser() { ); config.put(ModelConfigurations.CHUNKING_SETTINGS, createRandomChunkingSettingsMap()); - var elserServiceSettings = new ElserInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null); + var elserServiceSettings = new ElserInternalServiceSettings( + new ElasticsearchInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null, null) + ); service.parseRequestConfig( randomInferenceEntityId, @@ -486,7 +492,9 @@ public void testParseRequestConfig_elser() { ) ); - var elserServiceSettings = new ElserInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null); + var elserServiceSettings = new ElserInternalServiceSettings( + new ElasticsearchInternalServiceSettings(1, 4, ElserModels.ELSER_V2_MODEL, null, null) + ); service.parseRequestConfig( randomInferenceEntityId, @@ -742,7 +750,16 @@ public void testParsePersistedConfig() { TaskType.TEXT_EMBEDDING, settings ); - var elandServiceSettings = new CustomElandInternalTextEmbeddingServiceSettings(1, 4, "invalid", null); + var elandServiceSettings = new CustomElandInternalTextEmbeddingServiceSettings( + 1, + 4, + "invalid", + null, + null, + null, + SimilarityMeasure.COSINE, + DenseVectorFieldMapper.ElementType.FLOAT + ); assertEquals( new CustomElandEmbeddingModel( randomInferenceEntityId, @@ -933,7 +950,7 @@ private void testChunkInfer_Sparse(ChunkingSettings chunkingSettings) throws Int "foo", TaskType.SPARSE_EMBEDDING, "elasticsearch", - new ElasticsearchInternalServiceSettings(1, 1, "model-id", null), + new ElasticsearchInternalServiceSettings(1, 1, "model-id", null, null), chunkingSettings ); var service = createService(client); @@ -1003,7 +1020,7 @@ private void testChunkInfer_Elser(ChunkingSettings chunkingSettings) throws Inte "foo", TaskType.SPARSE_EMBEDDING, "elasticsearch", - new ElserInternalServiceSettings(1, 1, "model-id", null), + new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, "model-id", null, null)), new ElserMlNodeTaskSettings(), chunkingSettings ); @@ -1328,11 +1345,20 @@ private CustomElandModel getCustomElandModel(TaskType taskType) { randomInferenceEntityId, taskType, ElasticsearchInternalService.NAME, - new CustomElandInternalServiceSettings(1, 4, "custom-model", null), + new CustomElandInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 4, "custom-model", null, null)), RerankTaskSettings.DEFAULT_SETTINGS ); } else if (taskType == TaskType.TEXT_EMBEDDING) { - var serviceSettings = new CustomElandInternalTextEmbeddingServiceSettings(1, 4, "custom-model", null); + var serviceSettings = new CustomElandInternalTextEmbeddingServiceSettings( + 1, + 4, + "custom-model", + null, + null, + null, + SimilarityMeasure.COSINE, + DenseVectorFieldMapper.ElementType.FLOAT + ); expectedModel = new CustomElandEmbeddingModel( randomInferenceEntityId, @@ -1346,7 +1372,7 @@ private CustomElandModel getCustomElandModel(TaskType taskType) { randomInferenceEntityId, taskType, ElasticsearchInternalService.NAME, - new CustomElandInternalServiceSettings(1, 4, "custom-model", null), + new CustomElandInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 4, "custom-model", null, null)), (ChunkingSettings) null ); } @@ -1438,6 +1464,7 @@ public void testParseRequestConfigEland_SetsDimensionsToOne() { 4, "custom-model", null, + null, 1, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT @@ -1463,6 +1490,7 @@ public void testParseRequestConfigEland_SetsDimensionsToOne() { "custom-model", null, null, + null, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT ), @@ -1511,7 +1539,7 @@ public void testEmbeddingTypeFromTaskTypeAndSettings() { EmbeddingRequestChunker.EmbeddingType.SPARSE, ElasticsearchInternalService.embeddingTypeFromTaskTypeAndSettings( TaskType.SPARSE_EMBEDDING, - new ElasticsearchInternalServiceSettings(1, 1, "foo", null) + new ElasticsearchInternalServiceSettings(1, 1, "foo", null, null) ) ); assertEquals( @@ -1526,7 +1554,7 @@ public void testEmbeddingTypeFromTaskTypeAndSettings() { ElasticsearchStatusException.class, () -> ElasticsearchInternalService.embeddingTypeFromTaskTypeAndSettings( TaskType.COMPLETION, - new ElasticsearchInternalServiceSettings(1, 1, "foo", null) + new ElasticsearchInternalServiceSettings(1, 1, "foo", null, null) ) ); assertThat(e1.getMessage(), containsString("Chunking is not supported for task type [completion]")); @@ -1535,7 +1563,7 @@ public void testEmbeddingTypeFromTaskTypeAndSettings() { ElasticsearchStatusException.class, () -> ElasticsearchInternalService.embeddingTypeFromTaskTypeAndSettings( TaskType.RERANK, - new ElasticsearchInternalServiceSettings(1, 1, "foo", null) + new ElasticsearchInternalServiceSettings(1, 1, "foo", null, null) ) ); assertThat(e2.getMessage(), containsString("Chunking is not supported for task type [rerank]")); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java index 96cd42efa42f5..5b21717ac03e4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModelTests.java @@ -16,7 +16,7 @@ public void testUpdateNumAllocation() { "foo", TaskType.SPARSE_EMBEDDING, ElasticsearchInternalService.NAME, - new ElserInternalServiceSettings(null, 1, "elser", null), + new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(null, 1, "elser", null, null)), new ElserMlNodeTaskSettings(), null ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettingsTests.java index f4e97b2c2e5e0..dd4513db0d50a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalServiceSettingsTests.java @@ -24,12 +24,12 @@ public static ElserInternalServiceSettings createRandom() { public void testBwcWrite() throws IOException { { - var settings = new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, ".elser_model_1", null)); + var settings = new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, ".elser_model_1", null, null)); var copy = copyInstance(settings, TransportVersions.V_8_12_0); assertEquals(settings, copy); } { - var settings = new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, ".elser_model_1", null)); + var settings = new ElserInternalServiceSettings(new ElasticsearchInternalServiceSettings(1, 1, ".elser_model_1", null, null)); var copy = copyInstance(settings, TransportVersions.V_8_11_X); assertEquals(settings, copy); } @@ -53,6 +53,7 @@ protected ElserInternalServiceSettings mutateInstance(ElserInternalServiceSettin instance.getNumAllocations() == null ? 1 : instance.getNumAllocations() + 1, instance.getNumThreads(), instance.modelId(), + null, null ) ); @@ -61,6 +62,7 @@ protected ElserInternalServiceSettings mutateInstance(ElserInternalServiceSettin instance.getNumAllocations(), instance.getNumThreads() + 1, instance.modelId(), + null, null ) ); @@ -72,6 +74,7 @@ yield new ElserInternalServiceSettings( instance.getNumAllocations(), instance.getNumThreads(), versions.iterator().next(), + null, null ) ); From 67c2f41724af6d0a8fde350f0c3292f10a6939f3 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Fri, 31 Jan 2025 17:45:52 +0100 Subject: [PATCH 335/383] Fix serverless test - wait for index green just after first insertion (#121180) --- .../synonyms/90_synonyms_reloading_for_synset.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml index 9543783f0d6a3..02db799e52e51 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml @@ -14,6 +14,12 @@ setup: - synonyms: "bye => goodbye" id: "synonym-rule-2" + # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. + - do: + cluster.health: + index: .synonyms + wait_for_status: green + # Create synonyms synonyms_set2 - do: synonyms.put_synonym: @@ -25,12 +31,6 @@ setup: - synonyms: "bye => goodbye" id: "synonym-rule-2" - # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - - do: - cluster.health: - index: .synonyms - wait_for_status: green - # Create my_index1 with synonym_filter that uses synonyms_set1 - do: indices.create: From e1c6c3f9b2516574267000e33563d90c75e9d673 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 31 Jan 2025 17:53:12 +0000 Subject: [PATCH 336/383] Configurable limit on concurrent shard closing (#121267) Today we limit the number of shards concurrently closed by the `IndicesClusterStateService`, but this limit is currently a function of the CPU count of the node. On nodes with plentiful CPU but poor IO performance we may want to restrict this limit further. This commit exposes the throttling limit as a setting. --- .../common/settings/ClusterSettings.java | 1 + .../cluster/IndicesClusterStateService.java | 21 +++++- .../cluster/ShardCloseExecutorTests.java | 69 +++++++++++++++++++ 3 files changed, 88 insertions(+), 3 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/indices/cluster/ShardCloseExecutorTests.java diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index aecc750bd4e39..e9b9a5ea4ab9e 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -614,6 +614,7 @@ public void apply(Settings value, Settings current, Settings previous) { DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING, IndicesClusterStateService.SHARD_LOCK_RETRY_INTERVAL_SETTING, IndicesClusterStateService.SHARD_LOCK_RETRY_TIMEOUT_SETTING, + IndicesClusterStateService.CONCURRENT_SHARD_CLOSE_LIMIT, IngestSettings.GROK_WATCHDOG_INTERVAL, IngestSettings.GROK_WATCHDOG_MAX_EXECUTION_TIME, TDigestExecutionHint.SETTING, diff --git a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 0e9901bc05682..9e31bc1aef9a7 100644 --- a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -116,6 +116,18 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple Setting.Property.NodeScope ); + /** + * Maximum number of shards to try and close concurrently. Defaults to the smaller of {@code node.processors} and {@code 10}, but can be + * set to any positive integer. + */ + public static final Setting CONCURRENT_SHARD_CLOSE_LIMIT = Setting.intSetting( + "indices.store.max_concurrent_closing_shards", + settings -> Integer.toString(Math.min(10, EsExecutors.NODE_PROCESSORS_SETTING.get(settings).roundUp())), + 1, + Integer.MAX_VALUE, + Setting.Property.NodeScope + ); + final AllocatedIndices> indicesService; private final ClusterService clusterService; private final ThreadPool threadPool; @@ -1347,7 +1359,7 @@ enum IndexRemovalReason { } } - private static class ShardCloseExecutor implements Executor { + static class ShardCloseExecutor implements Executor { private final ThrottledTaskRunner throttledTaskRunner; @@ -1360,8 +1372,11 @@ private static class ShardCloseExecutor implements Executor { // can't close the old ones down fast enough. Maybe we could block or throttle new shards starting while old shards are still // shutting down, given that starting new shards is already async. Since this seems unlikely in practice, we opt for the simple // approach here. - final var maxThreads = Math.max(EsExecutors.NODE_PROCESSORS_SETTING.get(settings).roundUp(), 10); - throttledTaskRunner = new ThrottledTaskRunner(IndicesClusterStateService.class.getCanonicalName(), maxThreads, delegate); + throttledTaskRunner = new ThrottledTaskRunner( + IndicesClusterStateService.class.getCanonicalName(), + CONCURRENT_SHARD_CLOSE_LIMIT.get(settings), + delegate + ); } @Override diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ShardCloseExecutorTests.java b/server/src/test/java/org/elasticsearch/indices/cluster/ShardCloseExecutorTests.java new file mode 100644 index 0000000000000..d4699454a4b6e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ShardCloseExecutorTests.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.indices.cluster; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.concurrent.atomic.AtomicInteger; + +public class ShardCloseExecutorTests extends ESTestCase { + + public void testThrottling() { + // This defaults to the number of CPUs of the machine running the tests which could be either side of 10. + final var defaultProcessors = EsExecutors.NODE_PROCESSORS_SETTING.get(Settings.EMPTY).roundUp(); + ensureThrottling(Math.min(10, defaultProcessors), Settings.EMPTY); + + if (10 < defaultProcessors) { + ensureThrottling( + 10, + Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), between(10, defaultProcessors - 1)).build() + ); + } // else we cannot run this check, the machine running the tests doesn't have enough CPUs + + if (1 < defaultProcessors) { + final var fewProcessors = between(1, Math.min(10, defaultProcessors - 1)); + ensureThrottling(fewProcessors, Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), fewProcessors).build()); + } // else we cannot run this check, the machine running the tests has less than 2 whole CPUs (and we already tested the 1 case) + + // but in any case we can override the throttle regardless of its default value + final var override = between(1, defaultProcessors * 2); + ensureThrottling( + override, + Settings.builder().put(IndicesClusterStateService.CONCURRENT_SHARD_CLOSE_LIMIT.getKey(), override).build() + ); + } + + private static void ensureThrottling(int expectedLimit, Settings settings) { + final var tasksToRun = new ArrayList(expectedLimit + 1); + final var executor = new IndicesClusterStateService.ShardCloseExecutor(settings, tasksToRun::add); + final var runCount = new AtomicInteger(); + + // enqueue one more task than the throttling limit + for (int i = 0; i < expectedLimit + 1; i++) { + executor.execute(runCount::incrementAndGet); + } + + // check that we submitted tasks up to the expected limit, holding back the final task behind the throttle for now + assertEquals(expectedLimit, tasksToRun.size()); + + // now execute all the tasks one by one + for (int i = 0; i < expectedLimit + 1; i++) { + assertEquals(i, runCount.get()); + tasksToRun.get(i).run(); + assertEquals(i + 1, runCount.get()); + + // executing the first task enqueues the final task + assertEquals(expectedLimit + 1, tasksToRun.size()); + } + } +} From d9da7c9940ec3a1ddd42aeade88b1d1d407ec216 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 31 Jan 2025 13:11:13 -0500 Subject: [PATCH 337/383] ESQL: Expand HeapAttack for LOOKUP (#120754) * ESQL: Expand HeapAttack for LOOKUP This expands the heap attack tests for LOOKUP. Now there are three flavors: 1. LOOKUP a single geo_point - about 30 bytes or so. 2. LOOKUP a one mb string. 3. LOOKUP no fields - just JOIN to alter cardinality. Fetching a geo_point is fine with about 500 repeated docs before it circuit breaks which works out to about 256mb of buffered results. That's sensible on our 512mb heap and likely to work ok for most folks. We'll flip to a streaming method eventually and this won't be a problem any more. But for now, we buffer. The no lookup fields is fine with like 7500 matches per incoming row. That's quite a lot, really. The 1mb string is trouble! We circuit break properly which is great and safe, but if you join 1mb worth of columns in LOOKUP you are going to need bigger heaps than our test. Again, we'll move from buffering these results to streaming them and it'll work better, but for now we buffer. * updates --- .../xpack/esql/heap_attack/HeapAttackIT.java | 86 ++++++++++++++++++- 1 file changed, 83 insertions(+), 3 deletions(-) diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index f732f7cbbf00d..59ce3c2dbb1cf 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -628,7 +628,7 @@ private Response fetchMvLongs() throws IOException { } public void testLookupExplosion() throws IOException { - int sensorDataCount = 7500; + int sensorDataCount = 500; int lookupEntries = 10000; Map map = lookupExplosion(sensorDataCount, lookupEntries); assertMap(map, matchesMap().extraOk().entry("values", List.of(List.of(sensorDataCount * lookupEntries)))); @@ -636,16 +636,71 @@ public void testLookupExplosion() throws IOException { public void testLookupExplosionManyMatches() throws IOException { assertCircuitBreaks(() -> { - Map result = lookupExplosion(8500, 10000); + Map result = lookupExplosion(1500, 10000); + logger.error("should have failed but got {}", result); + }); + } + + public void testLookupExplosionNoFetch() throws IOException { + int sensorDataCount = 7500; + int lookupEntries = 10000; + Map map = lookupExplosionNoFetch(sensorDataCount, lookupEntries); + assertMap(map, matchesMap().extraOk().entry("values", List.of(List.of(sensorDataCount * lookupEntries)))); + } + + public void testLookupExplosionNoFetchManyMatches() throws IOException { + assertCircuitBreaks(() -> { + Map result = lookupExplosionNoFetch(8500, 10000); + logger.error("should have failed but got {}", result); + }); + } + + public void testLookupExplosionBigString() throws IOException { + int sensorDataCount = 150; + int lookupEntries = 1; + Map map = lookupExplosionBigString(sensorDataCount, lookupEntries); + assertMap(map, matchesMap().extraOk().entry("values", List.of(List.of(sensorDataCount * lookupEntries)))); + } + + public void testLookupExplosionBigStringManyMatches() throws IOException { + assertCircuitBreaks(() -> { + Map result = lookupExplosionBigString(500, 1); logger.error("should have failed but got {}", result); }); } private Map lookupExplosion(int sensorDataCount, int lookupEntries) throws IOException { + lookupExplosionData(sensorDataCount, lookupEntries); + StringBuilder query = startQuery(); + query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(location)\"}"); + return responseAsMap(query(query.toString(), null)); + } + + private Map lookupExplosionNoFetch(int sensorDataCount, int lookupEntries) throws IOException { + lookupExplosionData(sensorDataCount, lookupEntries); + StringBuilder query = startQuery(); + query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(*)\"}"); + return responseAsMap(query(query.toString(), null)); + } + + private void lookupExplosionData(int sensorDataCount, int lookupEntries) throws IOException { initSensorData(sensorDataCount, 1); initSensorLookup(lookupEntries, 1, i -> "73.9857 40.7484"); + } + + private Map lookupExplosionBigString(int sensorDataCount, int lookupEntries) throws IOException { + initSensorData(sensorDataCount, 1); + initSensorLookupString(lookupEntries, 1, i -> { + int target = Math.toIntExact(ByteSizeValue.ofMb(1).getBytes()); + StringBuilder str = new StringBuilder(Math.toIntExact(ByteSizeValue.ofMb(2).getBytes())); + while (str.length() < target) { + str.append("Lorem ipsum dolor sit amet, consectetur adipiscing elit."); + } + logger.info("big string is {} characters", str.length()); + return str.toString(); + }); StringBuilder query = startQuery(); - query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(*)\"}"); + query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(string)\"}"); return responseAsMap(query(query.toString(), null)); } @@ -834,6 +889,31 @@ private void initSensorLookup(int lookupEntries, int sensorCount, IntFunction string) throws IOException { + logger.info("loading sensor lookup with huge strings"); + createIndex("sensor_lookup", Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.LOOKUP.getName()).build(), """ + { + "properties": { + "id": { "type": "long" }, + "string": { "type": "text" } + } + }"""); + int docsPerBulk = 10; + StringBuilder data = new StringBuilder(); + for (int i = 0; i < lookupEntries; i++) { + int sensor = i % sensorCount; + data.append(String.format(Locale.ROOT, """ + {"create":{}} + {"id": %d, "string": "%s"} + """, sensor, string.apply(sensor))); + if (i % docsPerBulk == docsPerBulk - 1) { + bulk("sensor_lookup", data.toString()); + data.setLength(0); + } + } + initIndex("sensor_lookup", data.toString()); + } + private void initSensorEnrich(int lookupEntries, int sensorCount, IntFunction location) throws IOException { initSensorLookup(lookupEntries, sensorCount, location); logger.info("loading sensor enrich"); From 06fee76fb1067d7ac25d6ec643f7ddb283ce892b Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Fri, 31 Jan 2025 13:13:17 -0500 Subject: [PATCH 338/383] [ES|QL] Correct line and column numbers of missing named parameters (#120852) * correct line and column numbers of missing named parameters --- docs/changelog/120852.yaml | 5 +++ .../xpack/esql/qa/rest/RestEsqlTestCase.java | 27 +++++++++++++ .../xpack/esql/parser/LogicalPlanBuilder.java | 12 +----- .../xpack/esql/parser/ParsingException.java | 38 +++++++++++++++++++ 4 files changed, 71 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/120852.yaml diff --git a/docs/changelog/120852.yaml b/docs/changelog/120852.yaml new file mode 100644 index 0000000000000..90a05aa860f3f --- /dev/null +++ b/docs/changelog/120852.yaml @@ -0,0 +1,5 @@ +pr: 120852 +summary: Correct line and column numbers of missing named parameters +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 66333421eeb75..88fc8a9a36312 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -773,6 +773,33 @@ public void testNamedParamsForIdentifierAndIdentifierPatterns() throws IOExcepti } } + public void testErrorMessageForMissingParams() throws IOException { + ResponseException re = expectThrows( + ResponseException.class, + () -> runEsql(requestObjectBuilder().query("from idx | where x == ?n1").params("[]")) + ); + assertThat( + EntityUtils.toString(re.getResponse().getEntity()).replaceAll("\\\\\n\s+\\\\", ""), + containsString("line 1:23: Unknown query parameter [n1]") + ); + + re = expectThrows( + ResponseException.class, + () -> runEsql(requestObjectBuilder().query("from idx | where x == ?n1 and y == ?n2").params("[{\"n\" : \"v\"}]")) + ); + assertThat(EntityUtils.toString(re.getResponse().getEntity()).replaceAll("\\\\\n\s+\\\\", ""), containsString(""" + line 1:23: Unknown query parameter [n1], did you mean [n]?; line 1:36: Unknown query parameter [n2], did you mean [n]?""")); + + re = expectThrows( + ResponseException.class, + () -> runEsql(requestObjectBuilder().query("from idx | where x == ?n1 and y == ?n2").params("[{\"n1\" : \"v1\"}]")) + ); + assertThat( + EntityUtils.toString(re.getResponse().getEntity()).replaceAll("\\\\\n\s+\\\\", ""), + containsString("line 1:36: Unknown query parameter [n2], did you mean [n1]") + ); + } + public void testErrorMessageForLiteralDateMathOverflow() throws IOException { List dateMathOverflowExpressions = List.of( "2147483647 day + 1 day", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index ba8aaf6251c57..5ccdda5b1839b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -108,17 +108,7 @@ protected LogicalPlan plan(ParseTree ctx) { if (errors.hasNext() == false) { return p; } else { - StringBuilder message = new StringBuilder(); - int i = 0; - - while (errors.hasNext()) { - if (i > 0) { - message.append("; "); - } - message.append(errors.next().getMessage()); - i++; - } - throw new ParsingException(message.toString()); + throw ParsingException.combineParsingExceptions(errors); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java index c25ab92437bfc..119e96bbd865c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java @@ -9,6 +9,8 @@ import org.elasticsearch.xpack.esql.EsqlClientException; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.util.Iterator; + import static org.elasticsearch.common.logging.LoggerMessageFormat.format; public class ParsingException extends EsqlClientException { @@ -21,6 +23,10 @@ public ParsingException(String message, Exception cause, int line, int charPosit this.charPositionInLine = charPositionInLine + 1; } + /** + * To be used only if the exception cannot be associated with a specific position in the query. + * Error message will start with {@code line -1:-1:} instead of using specific location. + */ public ParsingException(String message, Object... args) { this(Source.EMPTY, message, args); } @@ -37,6 +43,38 @@ public ParsingException(Exception cause, Source source, String message, Object.. this.charPositionInLine = source.source().getColumnNumber(); } + private ParsingException(int line, int charPositionInLine, String message, Object... args) { + super(message, args); + this.line = line; + this.charPositionInLine = charPositionInLine; + } + + /** + * Combine multiple {@code ParsingException} into one, this is used by {@code LogicalPlanBuilder} to + * consolidate multiple named parameters related {@code ParsingException}. + */ + public static ParsingException combineParsingExceptions(Iterator parsingExceptions) { + StringBuilder message = new StringBuilder(); + int i = 0; + int line = -1; + int charPositionInLine = -1; + + while (parsingExceptions.hasNext()) { + ParsingException e = parsingExceptions.next(); + if (i > 0) { + message.append("; "); + message.append(e.getMessage()); + } else { + // line and column numbers are the associated with the first error + line = e.getLineNumber(); + charPositionInLine = e.getColumnNumber(); + message.append(e.getErrorMessage()); + } + i++; + } + return new ParsingException(line, charPositionInLine, message.toString()); + } + public int getLineNumber() { return line; } From 38a384444169c0458186cb1ddd9b2e08c87a930e Mon Sep 17 00:00:00 2001 From: Patrick Doyle <810052+prdoyle@users.noreply.github.com> Date: Fri, 31 Jan 2025 13:18:17 -0500 Subject: [PATCH 339/383] Entitlement tests using reflection (#121436) * Entitlement IT cases for reflection * EntitlementBootstrap selfTest using reflection * Remove errant logging setting * Lambdas instead of booleans * [CI] Auto commit changes from spotless * Refactor: Extract lambdas to method refs --------- Co-authored-by: elasticsearchmachine --- .../elasticsearch/core/CheckedSupplier.java | 18 ++++++++ .../qa/test/DummyImplementations.java | 5 +-- .../qa/test/RestEntitlementsCheckAction.java | 17 +++++++ .../bootstrap/EntitlementBootstrap.java | 44 ++++++++++++++----- 4 files changed, 71 insertions(+), 13 deletions(-) create mode 100644 libs/core/src/main/java/org/elasticsearch/core/CheckedSupplier.java diff --git a/libs/core/src/main/java/org/elasticsearch/core/CheckedSupplier.java b/libs/core/src/main/java/org/elasticsearch/core/CheckedSupplier.java new file mode 100644 index 0000000000000..5d3831881f285 --- /dev/null +++ b/libs/core/src/main/java/org/elasticsearch/core/CheckedSupplier.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.core; + +/** + * A {@link java.util.function.Supplier}-like interface which allows throwing checked exceptions. + */ +@FunctionalInterface +public interface CheckedSupplier { + T get() throws E; +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java index 6564e0eed41e1..2169b60df21c5 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java @@ -52,10 +52,9 @@ *

* A bit like Mockito but way more painful. */ -class DummyImplementations { - - static class DummyLocaleServiceProvider extends LocaleServiceProvider { +public class DummyImplementations { + public static class DummyLocaleServiceProvider extends LocaleServiceProvider { @Override public Locale[] getAvailableLocales() { throw unexpected(); diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java index dfca49d122673..2581593b730f3 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java @@ -96,6 +96,9 @@ static CheckAction alwaysDenied(CheckedRunnable action) { private static final Map checkActions = Stream.concat( Stream.>of( + entry("static_reflection", deniedToPlugins(RestEntitlementsCheckAction::staticMethodNeverEntitledViaReflection)), + entry("nonstatic_reflection", deniedToPlugins(RestEntitlementsCheckAction::nonstaticMethodNeverEntitledViaReflection)), + entry("constructor_reflection", deniedToPlugins(RestEntitlementsCheckAction::constructorNeverEntitledViaReflection)), entry("runtime_exit", deniedToPlugins(RestEntitlementsCheckAction::runtimeExit)), entry("runtime_halt", deniedToPlugins(RestEntitlementsCheckAction::runtimeHalt)), entry("system_exit", deniedToPlugins(RestEntitlementsCheckAction::systemExit)), @@ -338,6 +341,11 @@ private static void systemExit() { System.exit(123); } + private static void staticMethodNeverEntitledViaReflection() throws Exception { + Method systemExit = System.class.getMethod("exit", int.class); + systemExit.invoke(null, 123); + } + private static void createClassLoader() throws IOException { try (var classLoader = new URLClassLoader("test", new URL[0], RestEntitlementsCheckAction.class.getClassLoader())) { logger.info("Created URLClassLoader [{}]", classLoader.getName()); @@ -348,6 +356,11 @@ private static void processBuilder_start() throws IOException { new ProcessBuilder("").start(); } + private static void nonstaticMethodNeverEntitledViaReflection() throws Exception { + Method processBuilderStart = ProcessBuilder.class.getMethod("start"); + processBuilderStart.invoke(new ProcessBuilder("")); + } + private static void processBuilder_startPipeline() throws IOException { ProcessBuilder.startPipeline(List.of()); } @@ -386,6 +399,10 @@ private static void setHttpsConnectionProperties() { new DummyLocaleServiceProvider(); } + private static void constructorNeverEntitledViaReflection() throws Exception { + DummyLocaleServiceProvider.class.getConstructor().newInstance(); + } + private static void breakIteratorProvider$() { new DummyBreakIteratorProvider(); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index e7312103f9921..4badc4bb3a44e 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -14,6 +14,8 @@ import com.sun.tools.attach.AttachNotSupportedException; import com.sun.tools.attach.VirtualMachine; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.CheckedSupplier; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.initialization.EntitlementInitialization; import org.elasticsearch.entitlement.runtime.api.NotEntitledException; @@ -22,8 +24,10 @@ import org.elasticsearch.logging.Logger; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.attribute.FileAttribute; import java.util.Map; import java.util.function.Function; @@ -144,30 +148,31 @@ private static String findAgentJar() { * @throws IllegalStateException if the entitlements system can't prevent an unauthorized action of our choosing */ private static void selfTest() { - ensureCannotStartProcess(); - ensureCanCreateTempFile(); + ensureCannotStartProcess(ProcessBuilder::start); + ensureCanCreateTempFile(EntitlementBootstrap::createTempFile); + + // Try again with reflection + ensureCannotStartProcess(EntitlementBootstrap::reflectiveStartProcess); + ensureCanCreateTempFile(EntitlementBootstrap::reflectiveCreateTempFile); } - private static void ensureCannotStartProcess() { + private static void ensureCannotStartProcess(CheckedConsumer startProcess) { try { // The command doesn't matter; it doesn't even need to exist - new ProcessBuilder("").start(); + startProcess.accept(new ProcessBuilder("")); } catch (NotEntitledException e) { logger.debug("Success: Entitlement protection correctly prevented process creation"); return; - } catch (IOException e) { + } catch (Exception e) { throw new IllegalStateException("Failed entitlement protection self-test", e); } throw new IllegalStateException("Entitlement protection self-test was incorrectly permitted"); } - /** - * Originally {@code Security.selfTest}. - */ @SuppressForbidden(reason = "accesses jvm default tempdir as a self-test") - private static void ensureCanCreateTempFile() { + private static void ensureCanCreateTempFile(CheckedSupplier createTempFile) { try { - Path p = Files.createTempFile(null, null); + Path p = createTempFile.get(); p.toFile().deleteOnExit(); // Make an effort to clean up the file immediately; also, deleteOnExit leaves the file if the JVM exits abnormally. @@ -184,5 +189,24 @@ private static void ensureCanCreateTempFile() { logger.debug("Success: Entitlement protection correctly permitted temp file creation"); } + @SuppressForbidden(reason = "accesses jvm default tempdir as a self-test") + private static Path createTempFile() throws Exception { + return Files.createTempFile(null, null); + } + + private static void reflectiveStartProcess(ProcessBuilder pb) throws Exception { + try { + var start = ProcessBuilder.class.getMethod("start"); + start.invoke(pb); + } catch (InvocationTargetException e) { + throw (Exception) e.getCause(); + } + } + + private static Path reflectiveCreateTempFile() throws Exception { + return (Path) Files.class.getMethod("createTempFile", String.class, String.class, FileAttribute[].class) + .invoke(null, null, null, new FileAttribute[0]); + } + private static final Logger logger = LogManager.getLogger(EntitlementBootstrap.class); } From f7901f0795b8909e7c898d4e2c101bad96cbf4c6 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Fri, 31 Jan 2025 13:58:14 -0500 Subject: [PATCH 340/383] Support duplicate suggestions in completion field (#121324) Currently if a document has duplicate suggestions across different contexts, only the first gets indexed, and when a user tries to search using the second context, she will get 0 results. This PR addresses this, but adding support for duplicate suggestions across different contexts, so documents like below with duplicate inputs can be searched across all provided contexts. ```json { "my_suggest": [ { "input": [ "foox", "boo" ], "weight" : 2, "contexts": { "color": [ "red" ] } }, { "input": [ "foox" ], "weight" : 3, "contexts": { "color": [ "blue" ] } } ] } ``` Closes #82432 --- docs/changelog/121324.yaml | 6 ++ .../rest-api-spec/test/suggest/30_context.yml | 72 ++++++++++++++ .../50_completion_with_multi_fields.yml | 77 +++++++++++++++ .../index/mapper/CompletionFieldMapper.java | 94 +++++++++++++++---- .../elasticsearch/search/SearchFeatures.java | 5 +- .../mapper/CompletionFieldMapperTests.java | 49 ++++++++++ 6 files changed, 283 insertions(+), 20 deletions(-) create mode 100644 docs/changelog/121324.yaml diff --git a/docs/changelog/121324.yaml b/docs/changelog/121324.yaml new file mode 100644 index 0000000000000..d105ea0b46b4c --- /dev/null +++ b/docs/changelog/121324.yaml @@ -0,0 +1,6 @@ +pr: 121324 +summary: Support duplicate suggestions in completion field +area: Suggesters +type: bug +issues: + - 82432 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml index f88726469f51c..71b4ec9c128d8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml @@ -395,3 +395,75 @@ setup: field: suggest_multi_contexts contexts: location: [] + +--- +"Duplicate suggestions in different contexts": + - requires: + cluster_features: [ "search.completion_field.duplicate.support" ] + reason: "Support for duplicate suggestions in different contexts" + + - do: + index: + refresh: true + index: test + id: "1" + body: + suggest_context: + - + input: "foox" + weight: 2 + contexts: + color: ["red", "yellow"] + - + input: "foox" + weight: 3 + contexts: + color: ["blue", "green", "yellow"] + - do: + search: + body: + suggest: + result: + text: "foo" + completion: + field: suggest_context + contexts: + color: "red" + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 1 } + - match: { suggest.result.0.options.0.text: "foox" } + - match: { suggest.result.0.options.0._score: 2 } + + - do: + search: + body: + suggest: + result: + text: "foo" + completion: + field: suggest_context + contexts: + color: "yellow" + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 1 } + - match: { suggest.result.0.options.0.text: "foox" } + # the highest weight wins + - match: { suggest.result.0.options.0._score: 3 } + + - do: + search: + body: + suggest: + result: + text: "foo" + completion: + field: suggest_context + contexts: + color: "blue" + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 1 } + - match: { suggest.result.0.options.0.text: "foox" } + - match: { suggest.result.0.options.0._score: 3 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml index 8bbda56db7e53..37a937bd59b5a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml @@ -268,3 +268,80 @@ - length: { suggest.result: 1 } - length: { suggest.result.0.options: 1 } + +--- +"Duplicate suggestions in different contexts in sub-fields": + - requires: + cluster_features: [ "search.completion_field.duplicate.support" ] + reason: "Support for duplicate suggestions in different contexts" + + - do: + indices.create: + index: completion_with_context + body: + mappings: + "properties": + "suggest_1": + "type": "completion" + "contexts": + - + "name": "color" + "type": "category" + "fields": + "suggest_2": + "type": "completion" + "contexts": + - + "name": "color" + "type": "category" + + + - do: + index: + refresh: true + index: completion_with_context + id: "1" + body: + suggest_1: + - + input: "foox" + weight: 2 + contexts: + color: ["red"] + - + input: "foox" + weight: 3 + contexts: + color: ["blue", "green"] + - do: + search: + body: + suggest: + result: + text: "foo" + completion: + field: suggest_1.suggest_2 + contexts: + color: "red" + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 1 } + - match: { suggest.result.0.options.0.text: "foox" } + - match: { suggest.result.0.options.0._score: 2 } + + + - do: + search: + body: + suggest: + result: + text: "foo" + completion: + field: suggest_1.suggest_2 + contexts: + color: "blue" + + - length: { suggest.result: 1 } + - length: { suggest.result.0.options: 1 } + - match: { suggest.result.0.options.0.text: "foox" } + - match: { suggest.result.0.options.0._score: 3 } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index f0c679d4f4994..af691c61abe2e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -392,7 +392,7 @@ public void parse(DocumentParserContext context) throws IOException { // parse XContentParser parser = context.parser(); Token token = parser.currentToken(); - Map inputMap = Maps.newMapWithExpectedSize(1); + Map inputMap = Maps.newMapWithExpectedSize(1); if (token == Token.VALUE_NULL) { // ignore null values return; @@ -405,7 +405,7 @@ public void parse(DocumentParserContext context) throws IOException { } // index - for (Map.Entry completionInput : inputMap.entrySet()) { + for (Map.Entry completionInput : inputMap.entrySet()) { String input = completionInput.getKey(); if (input.trim().isEmpty()) { context.addIgnoredField(mappedFieldType.name()); @@ -420,21 +420,33 @@ public void parse(DocumentParserContext context) throws IOException { } input = input.substring(0, len); } - CompletionInputMetadata metadata = completionInput.getValue(); + CompletionInputMetadataContainer cmc = completionInput.getValue(); if (fieldType().hasContextMappings()) { - fieldType().getContextMappings().addField(context.doc(), fieldType().name(), input, metadata.weight, metadata.contexts); + for (CompletionInputMetadata metadata : cmc.getValues()) { + fieldType().getContextMappings().addField(context.doc(), fieldType().name(), input, metadata.weight, metadata.contexts); + } } else { - context.doc().add(new SuggestField(fieldType().name(), input, metadata.weight)); + context.doc().add(new SuggestField(fieldType().name(), input, cmc.getWeight())); } } - context.addToFieldNames(fieldType().name()); - for (CompletionInputMetadata metadata : inputMap.values()) { - multiFields().parse( - this, - context, - () -> context.switchParser(new MultiFieldParser(metadata, fieldType().name(), context.parser().getTokenLocation())) - ); + for (CompletionInputMetadataContainer cmc : inputMap.values()) { + if (fieldType().hasContextMappings()) { + for (CompletionInputMetadata metadata : cmc.getValues()) { + multiFields().parse( + this, + context, + () -> context.switchParser(new MultiFieldParser(metadata, fieldType().name(), context.parser().getTokenLocation())) + ); + } + } else { + CompletionInputMetadata metadata = cmc.getValue(); + multiFields().parse( + this, + context, + () -> context.switchParser(new MultiFieldParser(metadata, fieldType().name(), context.parser().getTokenLocation())) + ); + } } } @@ -447,11 +459,13 @@ private void parse( DocumentParserContext documentParserContext, Token token, XContentParser parser, - Map inputMap + Map inputMap ) throws IOException { String currentFieldName = null; if (token == Token.VALUE_STRING) { - inputMap.put(parser.text(), new CompletionInputMetadata(parser.text(), Collections.>emptyMap(), 1)); + CompletionInputMetadataContainer cmc = new CompletionInputMetadataContainer(fieldType().hasContextMappings()); + cmc.add(new CompletionInputMetadata(parser.text(), Collections.emptyMap(), 1)); + inputMap.put(parser.text(), cmc); } else if (token == Token.START_OBJECT) { Set inputs = new HashSet<>(); int weight = 1; @@ -531,8 +545,14 @@ private void parse( } } for (String input : inputs) { - if (inputMap.containsKey(input) == false || inputMap.get(input).weight < weight) { - inputMap.put(input, new CompletionInputMetadata(input, contextsMap, weight)); + CompletionInputMetadata cm = new CompletionInputMetadata(input, contextsMap, weight); + CompletionInputMetadataContainer cmc = inputMap.get(input); + if (cmc != null) { + cmc.add(cm); + } else { + cmc = new CompletionInputMetadataContainer(fieldType().hasContextMappings()); + cmc.add(cm); + inputMap.put(input, cmc); } } } else { @@ -543,10 +563,46 @@ private void parse( } } + static class CompletionInputMetadataContainer { + private final boolean hasContexts; + private final List list; + private CompletionInputMetadata single; + + CompletionInputMetadataContainer(boolean hasContexts) { + this.hasContexts = hasContexts; + this.list = hasContexts ? new ArrayList<>() : null; + } + + void add(CompletionInputMetadata cm) { + if (hasContexts) { + list.add(cm); + } else { + if (single == null || single.weight < cm.weight) { + single = cm; + } + } + } + + List getValues() { + assert hasContexts; + return list; + } + + CompletionInputMetadata getValue() { + assert hasContexts == false; + return single; + } + + int getWeight() { + assert hasContexts == false; + return single.weight; + } + } + static class CompletionInputMetadata { - public final String input; - public final Map> contexts; - public final int weight; + private final String input; + private final Map> contexts; + private final int weight; CompletionInputMetadata(String input, Map> contexts, int weight) { this.input = input; diff --git a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java index 8077da130c34e..3970b6effe70c 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java +++ b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java @@ -25,9 +25,12 @@ public Set getFeatures() { } public static final NodeFeature RETRIEVER_RESCORER_ENABLED = new NodeFeature("search.retriever.rescorer.enabled"); + public static final NodeFeature COMPLETION_FIELD_SUPPORTS_DUPLICATE_SUGGESTIONS = new NodeFeature( + "search.completion_field.duplicate.support" + ); @Override public Set getTestFeatures() { - return Set.of(RETRIEVER_RESCORER_ENABLED); + return Set.of(RETRIEVER_RESCORER_ENABLED, COMPLETION_FIELD_SUPPORTS_DUPLICATE_SUGGESTIONS); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index 755d5dde2f162..b093307f3733b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -303,6 +303,55 @@ public void testKeywordWithSubCompletionAndContext() throws Exception { ); } + public void testDuplicateSuggestionsWithContexts() throws IOException { + DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { + b.field("type", "completion"); + b.startArray("contexts"); + { + b.startObject(); + b.field("name", "place"); + b.field("type", "category"); + b.endObject(); + } + b.endArray(); + })); + + ParsedDocument parsedDocument = defaultMapper.parse(source(b -> { + b.startArray("field"); + { + b.startObject(); + { + b.array("input", "timmy", "starbucks"); + b.startObject("contexts").array("place", "cafe", "food").endObject(); + b.field("weight", 10); + } + b.endObject(); + b.startObject(); + { + b.array("input", "timmy", "starbucks"); + b.startObject("contexts").array("place", "restaurant").endObject(); + b.field("weight", 1); + } + b.endObject(); + } + b.endArray(); + })); + + List indexedFields = parsedDocument.rootDoc().getFields("field"); + assertThat(indexedFields, hasSize(4)); + + assertThat( + indexedFields, + containsInAnyOrder( + contextSuggestField("timmy"), + contextSuggestField("timmy"), + contextSuggestField("starbucks"), + contextSuggestField("starbucks") + ) + ); + + } + public void testCompletionWithContextAndSubCompletion() throws Exception { DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "completion"); From 0698d7384ef92202f7d41bbddc8347f705bcb294 Mon Sep 17 00:00:00 2001 From: Patrick Doyle <810052+prdoyle@users.noreply.github.com> Date: Fri, 31 Jan 2025 14:52:58 -0500 Subject: [PATCH 341/383] Add length limit FileSettingsHealthIndicatorService.description (#121334) * Add length limit FileSettingsHealthIndicatorService.description * [CI] Auto commit changes from spotless * Add javadocs explaining `fileSettings.descriptionLengthLimit` setting --------- Co-authored-by: elasticsearchmachine --- .../elasticsearch/node/NodeConstruction.java | 2 +- .../service/FileSettingsService.java | 33 +++++++++++++++++-- .../ingest/ReservedPipelineActionTests.java | 2 +- ...leSettingsHealthIndicatorServiceTests.java | 28 +++++++++++++++- .../service/FileSettingsServiceTests.java | 2 +- 5 files changed, 61 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 61ac8bbbfc69a..beeb1c3c86a44 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -1067,7 +1067,7 @@ public Map searchFields() { actionModule.getReservedClusterStateService().installStateHandler(new ReservedRepositoryAction(repositoriesService)); actionModule.getReservedClusterStateService().installStateHandler(new ReservedPipelineAction()); - FileSettingsHealthIndicatorService fileSettingsHealthIndicatorService = new FileSettingsHealthIndicatorService(); + FileSettingsHealthIndicatorService fileSettingsHealthIndicatorService = new FileSettingsHealthIndicatorService(settings); FileSettingsService fileSettingsService = new FileSettingsService( clusterService, actionModule.getReservedClusterStateService(), diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index e36604f9a58c8..035e0c6c70365 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -21,6 +21,8 @@ import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.file.MasterNodeFileWatchingService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; @@ -212,7 +214,7 @@ protected void onProcessFileChangesException(Exception e) { } @Override - protected void processInitialFileMissing() throws ExecutionException, InterruptedException, IOException { + protected void processInitialFileMissing() throws ExecutionException, InterruptedException { PlainActionFuture completion = new PlainActionFuture<>(); logger.info("setting file [{}] not found, initializing [{}] as empty", watchedFile(), NAMESPACE); stateService.initEmpty(NAMESPACE, completion); @@ -236,11 +238,29 @@ public static class FileSettingsHealthIndicatorService implements HealthIndicato ) ); + /** + * We want a length limit so we don't blow past the indexing limit in the case of a long description string. + * This is an {@code OperatorDynamic} setting so that if the truncation hampers troubleshooting efforts, + * the operator could override it and retry the operation without necessarily restarting the cluster. + */ + public static final String DESCRIPTION_LENGTH_LIMIT_KEY = "fileSettings.descriptionLengthLimit"; + static final Setting DESCRIPTION_LENGTH_LIMIT = Setting.intSetting( + DESCRIPTION_LENGTH_LIMIT_KEY, + 100, + 1, // Need room for the ellipsis + Setting.Property.OperatorDynamic + ); + + private final Settings settings; private boolean isActive = false; private long changeCount = 0; private long failureStreak = 0; private String mostRecentFailure = null; + public FileSettingsHealthIndicatorService(Settings settings) { + this.settings = settings; + } + public synchronized void startOccurred() { isActive = true; failureStreak = 0; @@ -262,7 +282,16 @@ public synchronized void successOccurred() { public synchronized void failureOccurred(String description) { ++failureStreak; - mostRecentFailure = description; + mostRecentFailure = limitLength(description); + } + + private String limitLength(String description) { + int descriptionLengthLimit = DESCRIPTION_LENGTH_LIMIT.get(settings); + if (description.length() > descriptionLengthLimit) { + return description.substring(0, descriptionLengthLimit - 1) + "…"; + } else { + return description; + } } @Override diff --git a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java index dc1698e3459ec..41a5919060095 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java @@ -138,7 +138,7 @@ public void setup() { clusterService, mock(ReservedClusterStateService.class), newEnvironment(Settings.EMPTY), - new FileSettingsService.FileSettingsHealthIndicatorService() + new FileSettingsService.FileSettingsHealthIndicatorService(Settings.EMPTY) ) ); } diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java index 20ea43910e68d..e973073efb184 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.reservedstate.service; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorResult; import org.elasticsearch.health.SimpleHealthIndicatorDetails; @@ -21,6 +22,7 @@ import static org.elasticsearch.health.HealthStatus.GREEN; import static org.elasticsearch.health.HealthStatus.YELLOW; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.DESCRIPTION_LENGTH_LIMIT_KEY; import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.FAILURE_SYMPTOM; import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.INACTIVE_SYMPTOM; import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.NO_CHANGES_SYMPTOM; @@ -37,7 +39,7 @@ public class FileSettingsHealthIndicatorServiceTests extends ESTestCase { @Before public void initialize() { - healthIndicatorService = new FileSettingsHealthIndicatorService(); + healthIndicatorService = new FileSettingsHealthIndicatorService(Settings.EMPTY); } public void testInitiallyGreen() {} @@ -101,4 +103,28 @@ public void testGreenYellowYellowGreen() { healthIndicatorService.calculate(false, null) ); } + + public void testDescriptionIsTruncated() { + checkTruncatedDescription(9, "123456789", "123456789"); + checkTruncatedDescription(8, "123456789", "1234567…"); + checkTruncatedDescription(1, "12", "…"); + } + + private void checkTruncatedDescription(int lengthLimit, String description, String expectedTruncatedDescription) { + var service = new FileSettingsHealthIndicatorService(Settings.builder().put(DESCRIPTION_LENGTH_LIMIT_KEY, lengthLimit).build()); + service.startOccurred(); + service.changeOccurred(); + service.failureOccurred(description); + assertEquals( + new HealthIndicatorResult( + "file_settings", + YELLOW, + FAILURE_SYMPTOM, + new SimpleHealthIndicatorDetails(Map.of("failure_streak", 1L, "most_recent_failure", expectedTruncatedDescription)), + STALE_SETTINGS_IMPACT, + List.of() + ), + service.calculate(false, null) + ); + } } diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 7cb12c1b316e8..78d57d62e21e1 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -138,7 +138,7 @@ public void setUp() throws Exception { List.of(new ReservedClusterSettingsAction(clusterSettings)) ) ); - healthIndicatorService = spy(new FileSettingsHealthIndicatorService()); + healthIndicatorService = spy(new FileSettingsHealthIndicatorService(Settings.EMPTY)); fileSettingsService = spy(new FileSettingsService(clusterService, controller, env, healthIndicatorService)); } From 38052b25943e90bda912189327cb16316b9ef775 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 1 Feb 2025 09:06:05 +1100 Subject: [PATCH 342/383] Mute org.elasticsearch.datastreams.TSDBPassthroughIndexingIT org.elasticsearch.datastreams.TSDBPassthroughIndexingIT #121464 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index a326f8d9eac4f..f1d598e972395 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -392,6 +392,8 @@ tests: - class: org.elasticsearch.xpack.ml.integration.ClassificationIT method: testDependentVariableIsAliasToNested issue: https://github.com/elastic/elasticsearch/issues/121415 +- class: org.elasticsearch.datastreams.TSDBPassthroughIndexingIT + issue: https://github.com/elastic/elasticsearch/issues/121464 # Examples: # From 09e1426616b3d6d95b7c6b4378e67926acabfb42 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 1 Feb 2025 09:18:19 +1100 Subject: [PATCH 343/383] Mute org.elasticsearch.xpack.esql.heap_attack.HeapAttackIT testLookupExplosionBigStringManyMatches #121465 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f1d598e972395..c61e5e98518cf 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -394,6 +394,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/121415 - class: org.elasticsearch.datastreams.TSDBPassthroughIndexingIT issue: https://github.com/elastic/elasticsearch/issues/121464 +- class: org.elasticsearch.xpack.esql.heap_attack.HeapAttackIT + method: testLookupExplosionBigStringManyMatches + issue: https://github.com/elastic/elasticsearch/issues/121465 # Examples: # From a589e1f83614a65494f565edb4db8fca8dffad37 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 1 Feb 2025 09:29:13 +1100 Subject: [PATCH 344/383] Mute org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests testClientSecretRotation #120985 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c61e5e98518cf..502acaa57455d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -397,6 +397,9 @@ tests: - class: org.elasticsearch.xpack.esql.heap_attack.HeapAttackIT method: testLookupExplosionBigStringManyMatches issue: https://github.com/elastic/elasticsearch/issues/121465 +- class: org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests + method: testClientSecretRotation + issue: https://github.com/elastic/elasticsearch/issues/120985 # Examples: # From fd1bd79b85d46ccd34931dc28ce9c9c4a50f949f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 31 Jan 2025 23:56:59 +0100 Subject: [PATCH 345/383] WIP (#121463) Under very unfortunate conditions tests that check xContent objects roundtrip parsing (like i.e. [SearchHitsTests testFromXContent](https://github.com/elastic/elasticsearch/issues/97716) can fail when we happen to randomly pick YAML xContent type and create random (realistic)Unicode character sequences that may contain the character U+0085 (133) from the [Latin1 code page](https://de.wikipedia.org/wiki/Unicodeblock_Lateinisch-1,_Erg%C3%A4nzung). That specific character doesn't get parsed back to its original form for YAML xContent, which can lead to [rare but hard to diagnose test failures](https://github.com/elastic/elasticsearch/issues/97716#issuecomment-2464465939). This change adds logic to AbstractXContentTestCase#test() which lies at the core of most of our xContent roundtrip tests that disallows test instances containing that particular character when using YAML xContent type. Closes #97716 --- .../test/AbstractXContentTestCase.java | 19 ++++++++- .../test/AbstractXContentTestCaseTests.java | 40 +++++++++++++++++++ 2 files changed, 57 insertions(+), 2 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java index cc35f63d289eb..24b853c8f6ddb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java @@ -145,8 +145,21 @@ private XContentTester( public void test() throws IOException { for (int runs = 0; runs < numberOfTestRuns; runs++) { XContentType xContentType = randomFrom(XContentType.values()).canonical(); - T testInstance = instanceSupplier.apply(xContentType); + T testInstance = null; try { + if (xContentType.equals(XContentType.YAML)) { + testInstance = randomValueOtherThanMany(instance -> { + // unicode character U+0085 (NEXT LINE (NEL)) doesn't survive YAML round trip tests (see #97716) + // get a new random instance if we detect this character in the xContent output + try { + return toXContent.apply(instance, xContentType).utf8ToString().contains("\u0085"); + } catch (IOException e) { + throw new RuntimeException(e); + } + }, () -> instanceSupplier.apply(xContentType)); + } else { + testInstance = instanceSupplier.apply(xContentType); + } BytesReference originalXContent = toXContent.apply(testInstance, xContentType); BytesReference shuffledContent = insertRandomFieldsAndShuffle( originalXContent, @@ -173,7 +186,9 @@ public void test() throws IOException { dispose.accept(parsed); } } finally { - dispose.accept(testInstance); + if (testInstance != null) { + dispose.accept(testInstance); + } } } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java b/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java index b8f4dcb399ec7..e3cc3bba94a5c 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java @@ -12,11 +12,13 @@ import com.carrotsearch.randomizedtesting.RandomizedContext; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; +import java.io.IOException; import java.util.Map; import static org.hamcrest.Matchers.equalTo; @@ -49,4 +51,42 @@ public void testInsertRandomFieldsAndShuffle() throws Exception { assertThat(mapOrdered.keySet().iterator().next(), not(equalTo("field"))); } } + + private record TestToXContent(String field, String value) implements ToXContentFragment { + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.field(field, value); + } + } + + public void testYamlXContentRoundtripSanitization() throws Exception { + var test = new AbstractXContentTestCase() { + + @Override + protected TestToXContent createTestInstance() { + // we need to randomly create both a "problematic" and an okay version in order to ensure that the sanitization code + // can draw at least one okay version if polled often enough + return randomBoolean() ? new TestToXContent("a\u0085b", "def") : new TestToXContent("a b", "def"); + } + + @Override + protected TestToXContent doParseInstance(XContentParser parser) throws IOException { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + String name = parser.currentName(); + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + String value = parser.text(); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return new TestToXContent(name, value); + }; + + @Override + protected boolean supportsUnknownFields() { + return false; + } + }; + // testFromXContent runs 20 repetitions, enough to hit a YAML xcontent version very likely + test.testFromXContent(); + } } From bf9c93d95aa9995c2a3f5fa6ca94f25c0ea32040 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 1 Feb 2025 16:32:06 +1100 Subject: [PATCH 346/383] Mute org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests testGrantApiKeyForJWT #121039 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 502acaa57455d..277fb2840854a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -400,6 +400,9 @@ tests: - class: org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests method: testClientSecretRotation issue: https://github.com/elastic/elasticsearch/issues/120985 +- class: org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests + method: testGrantApiKeyForJWT + issue: https://github.com/elastic/elasticsearch/issues/121039 # Examples: # From 61e1fb65f3a480cdd930b89de999a67019f529b3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 1 Feb 2025 16:45:25 +1100 Subject: [PATCH 347/383] Mute org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT test {yaml=cluster.health/10_basic/cluster health basic test} #121478 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 277fb2840854a..22cd9f61e570b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -403,6 +403,9 @@ tests: - class: org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests method: testGrantApiKeyForJWT issue: https://github.com/elastic/elasticsearch/issues/121039 +- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT + method: test {yaml=cluster.health/10_basic/cluster health basic test} + issue: https://github.com/elastic/elasticsearch/issues/121478 # Examples: # From a9f03b5ae482f3ccbc9eef136b4f0d7c5bacab22 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 1 Feb 2025 18:18:11 +1100 Subject: [PATCH 348/383] Mute org.elasticsearch.xpack.esql.heap_attack.HeapAttackIT testLookupExplosionManyMatches #121481 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 22cd9f61e570b..134e58e6f005d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -406,6 +406,9 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cluster.health/10_basic/cluster health basic test} issue: https://github.com/elastic/elasticsearch/issues/121478 +- class: org.elasticsearch.xpack.esql.heap_attack.HeapAttackIT + method: testLookupExplosionManyMatches + issue: https://github.com/elastic/elasticsearch/issues/121481 # Examples: # From 4207fe52adcdfb4079652699111d68744eaaef69 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 2 Feb 2025 01:35:47 +1100 Subject: [PATCH 349/383] Mute org.elasticsearch.xpack.security.profile.ProfileIntegTests testGetUsersWithProfileUid #121483 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 134e58e6f005d..41c930ae88809 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -409,6 +409,9 @@ tests: - class: org.elasticsearch.xpack.esql.heap_attack.HeapAttackIT method: testLookupExplosionManyMatches issue: https://github.com/elastic/elasticsearch/issues/121481 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testGetUsersWithProfileUid + issue: https://github.com/elastic/elasticsearch/issues/121483 # Examples: # From fc72c64738dedee5eef0b6921a0622fec140fc98 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 2 Feb 2025 01:43:08 +1100 Subject: [PATCH 350/383] Mute org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT test {yaml=cat.aliases/10_basic/Empty cluster} #121484 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 41c930ae88809..62f3d7d49117a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -412,6 +412,9 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testGetUsersWithProfileUid issue: https://github.com/elastic/elasticsearch/issues/121483 +- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT + method: test {yaml=cat.aliases/10_basic/Empty cluster} + issue: https://github.com/elastic/elasticsearch/issues/121484 # Examples: # From 06dab2d48c9cb62c5a42bdd76491dbe61f7aa9a8 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 2 Feb 2025 01:45:27 +1100 Subject: [PATCH 351/383] Mute org.elasticsearch.xpack.transform.checkpoint.TransformCCSCanMatchIT testTransformLifecycle_RangeQueryThatMatchesNoShards #121480 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 62f3d7d49117a..5003834625eb3 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -415,6 +415,9 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cat.aliases/10_basic/Empty cluster} issue: https://github.com/elastic/elasticsearch/issues/121484 +- class: org.elasticsearch.xpack.transform.checkpoint.TransformCCSCanMatchIT + method: testTransformLifecycle_RangeQueryThatMatchesNoShards + issue: https://github.com/elastic/elasticsearch/issues/121480 # Examples: # From 2b410c44eb8fc684d3986247a49f9696b44f2d4c Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Sat, 1 Feb 2025 15:54:59 +0100 Subject: [PATCH 352/383] Remove outdated assertion from #118214 (#121435) Asserting that we definitely saw the "received a single result" flag and can now deal with null responses, isn't applicable after a few recent fixes. New requests are sent out before responses are fully processed to keep data nodes in a tighter loop (as well as other relaxed ordering relative to when this assertion was added) so the flag is not guaranteed to show up as true for lower numbers of shard requests any longer. Lets just remove it, it was always best effort and accidental that this worked for the numbers the test randomizes over. --- muted-tests.yml | 3 --- .../action/search/SearchQueryThenFetchAsyncActionTests.java | 1 - 2 files changed, 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 5003834625eb3..8e40d709e598e 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -143,9 +143,6 @@ tests: - class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT method: test {p0=data_stream/120_data_streams_stats/Multiple data stream} issue: https://github.com/elastic/elasticsearch/issues/118217 -- class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests - method: testBottomFieldSort - issue: https://github.com/elastic/elasticsearch/issues/118214 - class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests method: testSearcherId issue: https://github.com/elastic/elasticsearch/issues/118374 diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index f005f862720ff..661a9fd8c854c 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -220,7 +220,6 @@ protected void run() { assertFalse(canReturnNullResponse.get()); assertThat(numWithTopDocs.get(), equalTo(0)); } else { - assertTrue(canReturnNullResponse.get()); if (withCollapse) { assertThat(numWithTopDocs.get(), equalTo(0)); } else { From 9e73518097d8fae4203be61e60ec6acd9997c702 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 2 Feb 2025 09:15:05 +1100 Subject: [PATCH 353/383] Mute org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryIT testStopQueryLocal #121487 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 8e40d709e598e..37119fa6750a8 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -415,6 +415,9 @@ tests: - class: org.elasticsearch.xpack.transform.checkpoint.TransformCCSCanMatchIT method: testTransformLifecycle_RangeQueryThatMatchesNoShards issue: https://github.com/elastic/elasticsearch/issues/121480 +- class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryIT + method: testStopQueryLocal + issue: https://github.com/elastic/elasticsearch/issues/121487 # Examples: # From 660419812d14330abc17dbde9f67f3308041623d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 2 Feb 2025 09:15:21 +1100 Subject: [PATCH 354/383] Mute org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryIT testSuccessfulPathways #121488 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 37119fa6750a8..61cd02f4b2a2e 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -418,6 +418,9 @@ tests: - class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryIT method: testStopQueryLocal issue: https://github.com/elastic/elasticsearch/issues/121487 +- class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryIT + method: testSuccessfulPathways + issue: https://github.com/elastic/elasticsearch/issues/121488 # Examples: # From 6c9e64e0146b4234a95287aa7d8b5df17d078af0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 2 Feb 2025 09:15:30 +1100 Subject: [PATCH 355/383] Mute org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryIT testAsyncQueriesWithLimit0 #121489 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 61cd02f4b2a2e..41f1801c511d9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -421,6 +421,9 @@ tests: - class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryIT method: testSuccessfulPathways issue: https://github.com/elastic/elasticsearch/issues/121488 +- class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryIT + method: testAsyncQueriesWithLimit0 + issue: https://github.com/elastic/elasticsearch/issues/121489 # Examples: # From edcec6207ddbf75d7ce6fd213ab8a80cb921c4b9 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 2 Feb 2025 09:48:24 +1100 Subject: [PATCH 356/383] Mute org.elasticsearch.xpack.security.profile.ProfileIntegTests testSuggestProfilesWithHint #121116 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 41f1801c511d9..caa3343993388 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -424,6 +424,9 @@ tests: - class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryIT method: testAsyncQueriesWithLimit0 issue: https://github.com/elastic/elasticsearch/issues/121489 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testSuggestProfilesWithHint + issue: https://github.com/elastic/elasticsearch/issues/121116 # Examples: # From 543fa00e626a690a95c6e83652787be58819df57 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 2 Feb 2025 16:39:07 +1100 Subject: [PATCH 357/383] Mute org.elasticsearch.xpack.sql.qa.single_node.JdbcDocCsvSpecIT test {docs.testFilterToday} #121474 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index caa3343993388..bab5e475d9988 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -427,6 +427,9 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testSuggestProfilesWithHint issue: https://github.com/elastic/elasticsearch/issues/121116 +- class: org.elasticsearch.xpack.sql.qa.single_node.JdbcDocCsvSpecIT + method: test {docs.testFilterToday} + issue: https://github.com/elastic/elasticsearch/issues/121474 # Examples: # From 2b6b7da1905635068cb54754711bf1d174940c5e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 2 Feb 2025 17:36:16 +1100 Subject: [PATCH 358/383] Mute org.elasticsearch.xpack.security.profile.ProfileIntegTests testSuggestProfileWithData #121258 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index bab5e475d9988..1667bef06f324 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -430,6 +430,9 @@ tests: - class: org.elasticsearch.xpack.sql.qa.single_node.JdbcDocCsvSpecIT method: test {docs.testFilterToday} issue: https://github.com/elastic/elasticsearch/issues/121474 +- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests + method: testSuggestProfileWithData + issue: https://github.com/elastic/elasticsearch/issues/121258 # Examples: # From 10fe2d724a74a93234c42aa2e7e2b6217cef7af1 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 2 Feb 2025 17:40:14 +1100 Subject: [PATCH 359/383] Mute org.elasticsearch.ingest.geoip.FullClusterRestartIT testGeoIpSystemFeaturesMigration {cluster=UPGRADED} #121115 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1667bef06f324..f01f363ca0cf4 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -433,6 +433,9 @@ tests: - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests method: testSuggestProfileWithData issue: https://github.com/elastic/elasticsearch/issues/121258 +- class: org.elasticsearch.ingest.geoip.FullClusterRestartIT + method: testGeoIpSystemFeaturesMigration {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/121115 # Examples: # From f5a3de7ef8f2aeb885a57253c247a6f142129f2c Mon Sep 17 00:00:00 2001 From: Gal Lalouche Date: Sun, 2 Feb 2025 14:56:02 +0200 Subject: [PATCH 360/383] ESQL: Even more test type error testing movements (#121321) * Multivalue error tests * Date error tests * Spatial error tests * String error tests * Aggregate error tests * Remove deprecated AbstractScalarFunction checks * Rename DefaultChecks test in AbstractAggregation * [CI] Auto commit changes from spotless --------- Co-authored-by: elasticsearchmachine --- .../function/AbstractAggregationTestCase.java | 23 ++++++--- .../AbstractScalarFunctionTestCase.java | 51 ------------------- .../function/aggregate/AvgErrorTests.java | 37 ++++++++++++++ .../function/aggregate/AvgTests.java | 2 +- .../aggregate/CountDistinctTests.java | 1 - .../function/aggregate/CountTests.java | 1 - .../function/aggregate/MaxErrorTests.java | 39 ++++++++++++++ .../function/aggregate/MaxTests.java | 6 +-- .../MedianAbsoluteDeviationTests.java | 2 +- .../function/aggregate/MedianTests.java | 2 +- .../function/aggregate/MinErrorTests.java | 39 ++++++++++++++ .../function/aggregate/MinTests.java | 6 +-- .../aggregate/PercentileErrorTests.java | 37 ++++++++++++++ .../function/aggregate/PercentileTests.java | 2 +- .../aggregate/SpatialCentroidTests.java | 1 - .../aggregate/SpatialExtentTests.java | 1 - .../function/aggregate/StdDevTests.java | 2 +- .../function/aggregate/SumTests.java | 2 +- .../function/aggregate/TopTests.java | 2 +- .../function/aggregate/ValuesErrorTests.java | 37 ++++++++++++++ .../function/aggregate/ValuesTests.java | 6 +-- .../function/aggregate/WeightedAvgTests.java | 2 +- .../function/scalar/date/NowTests.java | 5 +- .../MvPSeriesWeightedSumErrorTests.java | 37 ++++++++++++++ .../multivalue/MvPSeriesWeightedSumTests.java | 5 +- .../multivalue/MvPercentileErrorTests.java | 37 ++++++++++++++ .../scalar/multivalue/MvPercentileTests.java | 5 +- .../scalar/multivalue/MvSumErrorTests.java | 37 ++++++++++++++ .../scalar/multivalue/MvSumTests.java | 2 +- .../scalar/spatial/StEnvelopeErrorTests.java | 39 ++++++++++++++ .../scalar/spatial/StEnvelopeTests.java | 6 +-- .../scalar/spatial/StXErrorTests.java | 37 ++++++++++++++ .../scalar/spatial/StXMaxErrorTests.java | 39 ++++++++++++++ .../function/scalar/spatial/StXMaxTests.java | 6 +-- .../scalar/spatial/StXMinErrorTests.java | 39 ++++++++++++++ .../function/scalar/spatial/StXMinTests.java | 6 +-- .../function/scalar/spatial/StXTests.java | 2 +- .../scalar/spatial/StYErrorTests.java | 37 ++++++++++++++ .../scalar/spatial/StYMaxErrorTests.java | 39 ++++++++++++++ .../function/scalar/spatial/StYMaxTests.java | 6 +-- .../scalar/spatial/StYMinErrorTests.java | 39 ++++++++++++++ .../function/scalar/spatial/StYMinTests.java | 6 +-- .../function/scalar/spatial/StYTests.java | 2 +- .../scalar/string/RepeatErrorTests.java | 42 +++++++++++++++ .../function/scalar/string/RepeatTests.java | 6 +-- .../scalar/string/ReverseErrorTests.java | 37 ++++++++++++++ .../function/scalar/string/ReverseTests.java | 2 +- 47 files changed, 692 insertions(+), 127 deletions(-) create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatErrorTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseErrorTests.java diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java index 87ea6315d4f3b..9f0fc34b7d539 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java @@ -61,7 +61,7 @@ public abstract class AbstractAggregationTestCase extends AbstractFunctionTestCa * Use if possible, as this method may get updated with new checks in the future. *

*/ - protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecks( + protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( List suppliers, boolean entirelyNullPreservesType, PositionalErrorMessageSupplier positionalErrorMessageSupplier @@ -74,13 +74,24 @@ protected static Iterable parameterSuppliersFromTypedDataWithDefaultCh ); } - // TODO: Remove and migrate everything to the method with all the parameters /** - * @deprecated Use {@link #parameterSuppliersFromTypedDataWithDefaultChecks(List, boolean, PositionalErrorMessageSupplier)} instead. - * This method doesn't add all the default checks. + * Converts a list of test cases into a list of parameter suppliers. + * Also, adds a default set of extra test cases. + *

+ * Use if possible, as this method may get updated with new checks in the future. + *

+ * + * @param entirelyNullPreservesType See {@link #anyNullIsNull(boolean, List)} */ - @Deprecated - protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecks(List suppliers) { + protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( + // TODO remove after removing parameterSuppliersFromTypedDataWithDefaultChecks rename this to that. + List suppliers, + boolean entirelyNullPreservesType + ) { + return parameterSuppliersFromTypedData(anyNullIsNull(entirelyNullPreservesType, randomizeBytesRefsOffset(suppliers))); + } + + protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(List suppliers) { return parameterSuppliersFromTypedData(withNoRowsExpectingNull(randomizeBytesRefsOffset(suppliers))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java index 429e6685a201c..05202159a1bcd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java @@ -51,33 +51,6 @@ * which can be automatically tested against several scenarios (null handling, concurrency, etc). */ public abstract class AbstractScalarFunctionTestCase extends AbstractFunctionTestCase { - - /** - * Converts a list of test cases into a list of parameter suppliers. - * Also, adds a default set of extra test cases. - *

- * Use if possible, as this method may get updated with new checks in the future. - *

- * - * @param entirelyNullPreservesType See {@link #anyNullIsNull(boolean, List)} - * @deprecated use {@link #parameterSuppliersFromTypedDataWithDefaultChecksNoErrors} - * and make a subclass of {@link ErrorsForCasesWithoutExamplesTestCase}. - * It's a long faster. - */ - @Deprecated - protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecks( - boolean entirelyNullPreservesType, - List suppliers, - PositionalErrorMessageSupplier positionalErrorMessageSupplier - ) { - return parameterSuppliersFromTypedData( - errorsForCasesWithoutExamples( - anyNullIsNull(entirelyNullPreservesType, randomizeBytesRefsOffset(suppliers)), - positionalErrorMessageSupplier - ) - ); - } - /** * Converts a list of test cases into a list of parameter suppliers. * Also, adds a default set of extra test cases. @@ -113,30 +86,6 @@ protected static Iterable parameterSuppliersFromTypedDataWithDefaultCh return parameterSuppliersFromTypedData(anyNullIsNull(randomizeBytesRefsOffset(suppliers), nullsExpectedType, evaluatorToString)); } - /** - * Converts a list of test cases into a list of parameter suppliers. - * Also, adds a default set of extra test cases. - *

- * Use if possible, as this method may get updated with new checks in the future. - *

- * - * @param nullsExpectedType See {@link #anyNullIsNull(List, ExpectedType, ExpectedEvaluatorToString)} - * @param evaluatorToString See {@link #anyNullIsNull(List, ExpectedType, ExpectedEvaluatorToString)} - */ - protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecks( - ExpectedType nullsExpectedType, - ExpectedEvaluatorToString evaluatorToString, - List suppliers, - PositionalErrorMessageSupplier positionalErrorMessageSupplier - ) { - return parameterSuppliersFromTypedData( - errorsForCasesWithoutExamples( - anyNullIsNull(randomizeBytesRefsOffset(suppliers), nullsExpectedType, evaluatorToString), - positionalErrorMessageSupplier - ) - ); - } - public final void testEvaluate() { assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); boolean readFloating = randomBoolean(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgErrorTests.java new file mode 100644 index 0000000000000..16f80e4564cff --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class AvgErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(AvgTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Avg(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "numeric except unsigned_long or counter types")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java index ac599c7ff05f8..75d95c3eeac96 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AvgTests.java @@ -53,7 +53,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers, true, (v, p) -> "numeric except unsigned_long or counter types"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, true); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java index e0b8c1356d087..c632909c7d8e1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java @@ -93,7 +93,6 @@ public static Iterable parameters() { } // "No rows" expects 0 here instead of null - // return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java index 0485714959f63..3d14bc1b4bca7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java @@ -82,7 +82,6 @@ public static Iterable parameters() { } // "No rows" expects 0 here instead of null - // return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxErrorTests.java new file mode 100644 index 0000000000000..15fb2c053b981 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxErrorTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MaxErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MaxTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Max(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage(false, validPerPosition, signature, (v, p) -> "representable except unsigned_long and spatial types") + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java index ae5b3691b0a7d..edae496d27a93 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java @@ -157,11 +157,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks( - suppliers, - false, - (v, p) -> "representable except unsigned_long and spatial types" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, false); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationTests.java index ea36170866b0e..047e204c0e0c2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationTests.java @@ -39,7 +39,7 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true) ).flatMap(List::stream).map(MedianAbsoluteDeviationTests::makeSupplier).toList(); - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, true); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianTests.java index 0f7ed1b3e9b10..1c2c06c1ede94 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianTests.java @@ -73,7 +73,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, true); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinErrorTests.java new file mode 100644 index 0000000000000..a9b4730f12fac --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinErrorTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MinErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MinTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Min(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage(false, validPerPosition, signature, (v, p) -> "representable except unsigned_long and spatial types") + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java index ad2953f057635..0016876b1a198 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java @@ -157,11 +157,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks( - suppliers, - false, - (v, p) -> "representable except unsigned_long and spatial types" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, false); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileErrorTests.java new file mode 100644 index 0000000000000..b2f701f41792b --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class PercentileErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(PercentileTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Percentile(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> "numeric except unsigned_long")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileTests.java index 1bbac376edcf3..0033f98222903 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileTests.java @@ -53,7 +53,7 @@ public static Iterable parameters() { } } - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers, false, (v, p) -> "numeric except unsigned_long"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, false); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java index b92b32aa7ad09..a99cb8f60e3fa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java @@ -47,7 +47,6 @@ public static Iterable parameters() { ).flatMap(List::stream).map(SpatialCentroidTests::makeSupplier).toList(); // The withNoRowsExpectingNull() cases don't work here, as this aggregator doesn't return nulls. - // return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java index 225e10f99c853..9a0a62ce2d06e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java @@ -48,7 +48,6 @@ public static Iterable parameters() { ).flatMap(List::stream).map(SpatialExtentTests::makeSupplier).toList(); // The withNoRowsExpectingNull() cases don't work here, as this aggregator doesn't return nulls. - // return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java index 85b96e29d1f6a..409bb5bcba6fb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java @@ -41,7 +41,7 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true) ).flatMap(List::stream).map(StdDevTests::makeSupplier).collect(Collectors.toCollection(() -> suppliers)); - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, true); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SumTests.java index 4f14dafc8b30d..6730c2591ebbf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SumTests.java @@ -77,7 +77,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java index f236e4d8faf98..1d18d66110fe0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java @@ -280,7 +280,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesErrorTests.java new file mode 100644 index 0000000000000..f9dafc954b6f5 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class ValuesErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(ValuesTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Values(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "any type except unsigned_long and spatial types")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java index 5f35f8cada397..80e6a7fc09d56 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java @@ -55,11 +55,7 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.stringCases(1, 20, DataType.SEMANTIC_TEXT) ).flatMap(List::stream).map(ValuesTests::makeSupplier).collect(Collectors.toCollection(() -> suppliers)); - return parameterSuppliersFromTypedDataWithDefaultChecks( - suppliers, - false, - (v, p) -> "any type except unsigned_long and spatial types" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, false); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvgTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvgTests.java index 2c2ffc97f268c..1ad6cdf4c2494 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvgTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvgTests.java @@ -90,7 +90,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java index c667747a8ba75..ed2c45f8c2321 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java @@ -32,7 +32,7 @@ public NowTests(@Name("TestCase") Supplier testCaseSu @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedDataWithDefaultChecks( + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( true, List.of( new TestCaseSupplier( @@ -45,8 +45,7 @@ public static Iterable parameters() { equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli()) ) ) - ), - (valid, position) -> "" + ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumErrorTests.java new file mode 100644 index 0000000000000..4f1f8f911c306 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvPSeriesWeightedSumErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvPSeriesWeightedSumTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvPSeriesWeightedSum(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> "double")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java index 0c905b28ac931..47669cba71894 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java @@ -35,7 +35,7 @@ public static Iterable parameters() { doubles(cases); - return parameterSuppliersFromTypedDataWithDefaultChecks( + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( (nullPosition, nullValueDataType, original) -> nullValueDataType == DataType.NULL ? DataType.NULL : original.expectedType(), (nullPosition, nullData, original) -> { if (nullData.isForceLiteral()) { @@ -43,8 +43,7 @@ public static Iterable parameters() { } return nullData.type() == DataType.NULL ? equalTo("LiteralsEvaluator[lit=null]") : original; }, - cases, - (valid, position) -> "double" + cases ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileErrorTests.java new file mode 100644 index 0000000000000..25e7100b7c418 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvPercentileErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvPercentileTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvPercentile(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> "numeric except unsigned_long")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java index 0a419d44e3448..9c506ee0b5954 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java @@ -352,13 +352,12 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks( + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( (nullPosition, nullValueDataType, original) -> nullValueDataType == DataType.NULL && nullPosition == 0 ? DataType.NULL : original.expectedType(), (nullPosition, nullData, original) -> original, - cases, - (v, p) -> "numeric except unsigned_long" + cases ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumErrorTests.java new file mode 100644 index 0000000000000..bd8168f274f09 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvSumErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvSumTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvSum(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java index 89b148144fc83..19bb915b405db 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java @@ -65,7 +65,7 @@ public static Iterable parameters() { data.add(asLongUnsigned(UNSIGNED_LONG_MAX)); return data; })); - return parameterSuppliersFromTypedDataWithDefaultChecks(false, cases, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, cases); } private static TestCaseSupplier arithmeticExceptionCase(DataType dataType, Supplier dataSupplier) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeErrorTests.java new file mode 100644 index 0000000000000..1c5b867bef73b --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeErrorTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class StEnvelopeErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(StEnvelopeTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new StEnvelope(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape") + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java index 9f629d9127673..6b0449788b1c8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java @@ -55,11 +55,7 @@ public static Iterable parameters() { StEnvelopeTests::valueOfCartesian, List.of() ); - return parameterSuppliersFromTypedDataWithDefaultChecks( - false, - suppliers, - (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, suppliers); } private static BytesRef valueOfGeo(BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXErrorTests.java new file mode 100644 index 0000000000000..77e85ea9c1882 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class StXErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(StXTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new StX(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point or cartesian_point")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxErrorTests.java new file mode 100644 index 0000000000000..e209304305aee --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxErrorTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class StXMaxErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(StXMaxTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new StXMax(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape") + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java index 9205879fa1cb9..aa7ced1d4251d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java @@ -42,11 +42,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryCartesianPoint(suppliers, expectedCartesian, DOUBLE, StXMaxTests::valueOfCartesian, List.of()); TestCaseSupplier.forUnaryGeoShape(suppliers, expectedGeo, DOUBLE, StXMaxTests::valueOfGeo, List.of()); TestCaseSupplier.forUnaryCartesianShape(suppliers, expectedCartesian, DOUBLE, StXMaxTests::valueOfCartesian, List.of()); - return parameterSuppliersFromTypedDataWithDefaultChecks( - true, - suppliers, - (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } private static double valueOfGeo(BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinErrorTests.java new file mode 100644 index 0000000000000..7673d3663df18 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinErrorTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class StXMinErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(StXMinTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new StXMin(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape") + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java index 3603bff9656fe..f728f50cc6260 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java @@ -42,11 +42,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryCartesianPoint(suppliers, expectedCartesian, DOUBLE, StXMinTests::valueOfCartesian, List.of()); TestCaseSupplier.forUnaryGeoShape(suppliers, expectedGeo, DOUBLE, StXMinTests::valueOfGeo, List.of()); TestCaseSupplier.forUnaryCartesianShape(suppliers, expectedCartesian, DOUBLE, StXMinTests::valueOfCartesian, List.of()); - return parameterSuppliersFromTypedDataWithDefaultChecks( - true, - suppliers, - (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } private static double valueOfGeo(BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java index 96cddfdd64099..4e14c23a1bba4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java @@ -36,7 +36,7 @@ public static Iterable parameters() { final List suppliers = new ArrayList<>(); TestCaseSupplier.forUnaryGeoPoint(suppliers, expectedEvaluator, DOUBLE, StXTests::valueOf, List.of()); TestCaseSupplier.forUnaryCartesianPoint(suppliers, expectedEvaluator, DOUBLE, StXTests::valueOf, List.of()); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "geo_point or cartesian_point"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } private static double valueOf(BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYErrorTests.java new file mode 100644 index 0000000000000..ddad9f3e4902f --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class StYErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(StYTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new StY(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point or cartesian_point")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxErrorTests.java new file mode 100644 index 0000000000000..0090da0bc4238 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxErrorTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class StYMaxErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(StYMaxTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new StYMax(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape") + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java index cb2a03c3a9473..9aeda6b106236 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java @@ -42,11 +42,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryCartesianPoint(suppliers, expectedCartesian, DOUBLE, StYMaxTests::valueOfCartesian, List.of()); TestCaseSupplier.forUnaryGeoShape(suppliers, expectedGeo, DOUBLE, StYMaxTests::valueOfGeo, List.of()); TestCaseSupplier.forUnaryCartesianShape(suppliers, expectedCartesian, DOUBLE, StYMaxTests::valueOfCartesian, List.of()); - return parameterSuppliersFromTypedDataWithDefaultChecks( - true, - suppliers, - (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } private static double valueOfGeo(BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinErrorTests.java new file mode 100644 index 0000000000000..29ffac0bac1ff --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinErrorTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class StYMinErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(StYMinTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new StYMin(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape") + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java index 0c191f6dc4c5b..db577b536048b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java @@ -42,11 +42,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryCartesianPoint(suppliers, expectedCartesian, DOUBLE, StYMinTests::valueOfCartesian, List.of()); TestCaseSupplier.forUnaryGeoShape(suppliers, expectedGeo, DOUBLE, StYMinTests::valueOfGeo, List.of()); TestCaseSupplier.forUnaryCartesianShape(suppliers, expectedCartesian, DOUBLE, StYMinTests::valueOfCartesian, List.of()); - return parameterSuppliersFromTypedDataWithDefaultChecks( - true, - suppliers, - (v, p) -> "geo_point, cartesian_point, geo_shape or cartesian_shape" - ); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } private static double valueOfGeo(BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java index 165dbb2c0ab77..33ee6f6c4cdce 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java @@ -36,7 +36,7 @@ public static Iterable parameters() { final List suppliers = new ArrayList<>(); TestCaseSupplier.forUnaryGeoPoint(suppliers, expectedEvaluator, DOUBLE, StYTests::valueOf, List.of()); TestCaseSupplier.forUnaryCartesianPoint(suppliers, expectedEvaluator, DOUBLE, StYTests::valueOf, List.of()); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "geo_point or cartesian_point"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } private static double valueOf(BytesRef wkb) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatErrorTests.java new file mode 100644 index 0000000000000..48cbe2fbc1007 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatErrorTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class RepeatErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + + @Override + protected List cases() { + return paramsToSuppliers(RepeatTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Repeat(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> switch (p) { + case 0 -> "string"; + case 1 -> "integer"; + default -> ""; + })); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java index 5eb654b0d8235..2f1c2e7853c7e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java @@ -122,11 +122,7 @@ public static Iterable parameters() { .withFoldingException(IllegalArgumentException.class, "Number parameter cannot be negative, found [" + number + "]"); })); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, cases, (v, p) -> switch (p) { - case 0 -> "string"; - case 1 -> "integer"; - default -> ""; - }); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, cases); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseErrorTests.java new file mode 100644 index 0000000000000..e77bc574a2acf --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class ReverseErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(ReverseTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Reverse(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "string")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java index 397fb8064626c..8c4f77535c7b0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReverseTests.java @@ -39,7 +39,7 @@ public static Iterable parameters() { } } - return parameterSuppliersFromTypedDataWithDefaultChecks(false, suppliers, (v, p) -> "string"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, suppliers); } @Override From 2838dbb98e8dad7c0af768d7ed5aa89bf0611471 Mon Sep 17 00:00:00 2001 From: Gal Lalouche Date: Sun, 2 Feb 2025 14:56:22 +0200 Subject: [PATCH 361/383] ESQL: Support for _index metadata field in CsvTests (#121261) * ESQL: Support for _index metadata field in CsvTests * Extract INDEX constant to MetadataAttribute * Add comment on capability --- .../core/expression/MetadataAttribute.java | 3 +- .../main/resources/metadata-remote.csv-spec | 2 +- .../src/main/resources/metadata.csv-spec | 6 +-- .../src/main/resources/union_types.csv-spec | 54 +++++++++---------- .../xpack/esql/action/EsqlCapabilities.java | 7 ++- .../xpack/esql/session/IndexResolver.java | 3 +- .../TestPhysicalOperationProviders.java | 10 +++- 7 files changed, 49 insertions(+), 36 deletions(-) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java index 0f1cfbb85039c..dc75ac3a96248 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java @@ -32,6 +32,7 @@ public class MetadataAttribute extends TypedAttribute { public static final String TIMESTAMP_FIELD = "@timestamp"; public static final String TSID_FIELD = "_tsid"; public static final String SCORE = "_score"; + public static final String INDEX = "_index"; static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Attribute.class, @@ -42,7 +43,7 @@ public class MetadataAttribute extends TypedAttribute { private static final Map> ATTRIBUTES_MAP = Map.of( "_version", tuple(DataType.LONG, false), // _version field is not searchable - "_index", + INDEX, tuple(DataType.KEYWORD, true), IdFieldMapper.NAME, tuple(DataType.KEYWORD, false), // actually searchable, but fielddata access on the _id field is disallowed by default diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec index 4d7ee9b1b5af6..88c4fbf7de6cc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec @@ -39,7 +39,7 @@ max:integer |_index:keyword ; metaIndexAliasedInAggs -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: metadata_fields_remote_test from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i | SORT _i; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec index a213c378d33d8..1f41ffdb60691 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec @@ -40,7 +40,7 @@ max:integer |_index:keyword ; metaIndexSorted -required_capability: metadata_fields +required_capability: index_metadata_field from employees metadata _index | sort _index, emp_no desc | keep emp_no, _index | limit 2; @@ -50,7 +50,7 @@ emp_no:integer |_index:keyword ; metaIndexWithInPredicate -required_capability: metadata_fields +required_capability: index_metadata_field from employees metadata _index | where _index in ("employees", "foobar") | sort emp_no desc | keep emp_no, _index | limit 2; @@ -60,7 +60,7 @@ emp_no:integer |_index:keyword ; metaIndexAliasedInAggs -required_capability: metadata_fields +required_capability: index_metadata_field from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec index a2f491e20e3b9..8b19bc589fcff 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec @@ -133,7 +133,7 @@ mc:l | count:l multiIndexIpString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: casting_operator required_capability: union_types_remove_fields @@ -162,7 +162,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringRename required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: casting_operator required_capability: union_types_remove_fields @@ -191,7 +191,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringRenameToString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_str METADATA _index @@ -219,7 +219,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexWhereIpString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_str METADATA _index @@ -237,7 +237,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 3450233 | Connected multiIndexWhereIpStringLike required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_str METADATA _index @@ -445,7 +445,7 @@ count:long | message:keyword multiIndexMissingIpToString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_missing_field FROM sample_data, sample_data_str, missing_ip_sample_data METADATA _index @@ -480,7 +480,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450 multiIndexMissingIpToIp required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_missing_field FROM sample_data, sample_data_str, missing_ip_sample_data METADATA _index @@ -515,7 +515,7 @@ sample_data_str | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_long METADATA _index @@ -543,7 +543,7 @@ sample_data_ts_long | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexTsLongRename required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_long METADATA _index @@ -573,7 +573,7 @@ sample_data_ts_long | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexTsNanosRename required_capability: to_date_nanos required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -602,7 +602,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexTsNanosRenameToNanos required_capability: to_date_nanos required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -631,7 +631,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360123456Z | 172.21.2.162 | 34502 multiIndex sort millis and nanos as nanos required_capability: to_date_nanos required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -660,7 +660,7 @@ sample_data | 2023-10-23T12:15:03.360000000Z | 172.21.2.162 | 34502 multiIndex sort millis and nanos as millis required_capability: to_date_nanos required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -691,7 +691,7 @@ multiIndexTsNanosRenameToNanosWithFiltering required_capability: to_date_nanos required_capability: date_nanos_binary_comparison required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_nanos METADATA _index @@ -716,7 +716,7 @@ sample_data_ts_nanos | 2023-10-23T13:33:34.937123456Z | 172.21.0.5 | 12323 multiIndexTsLongRenameToString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_long METADATA _index @@ -744,7 +744,7 @@ sample_data_ts_long | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexWhereTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields FROM sample_data, sample_data_ts_long METADATA _index @@ -979,7 +979,7 @@ count:long | message:keyword multiIndexIpStringTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1022,7 +1022,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringTsLongDropped required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: to_date_nanos FROM sample_data* METADATA _index @@ -1064,7 +1064,7 @@ sample_data_ts_nanos | 8268153 | Connection error multiIndexIpStringTsLongRename required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1107,7 +1107,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringTsLongRenameDropped required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: to_date_nanos FROM sample_data* METADATA _index @@ -1149,7 +1149,7 @@ sample_data_ts_nanos | 8268153 | Connection error multiIndexIpStringTsLongRenameToString required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1192,7 +1192,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexWhereIpStringTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1226,7 +1226,7 @@ count:long | message:keyword multiIndexWhereIpStringLikeTsLong required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1260,7 +1260,7 @@ count:long | message:keyword multiIndexMultiColumnTypesRename required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1279,7 +1279,7 @@ null | null | 8268153 | Connectio multiIndexMultiColumnTypesRenameAndKeep required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1299,7 +1299,7 @@ sample_data_ts_nanos | 2023-10-23T13:52:55.015Z | 2023-10-23T13:52:55.015123456 multiIndexMultiColumnTypesRenameAndDrop required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: union_types_remove_fields required_capability: to_date_nanos @@ -1591,7 +1591,7 @@ FROM sample_data, sample_data_ts_long shortIntegerWidening required_capability: union_types -required_capability: metadata_fields +required_capability: index_metadata_field required_capability: casting_operator required_capability: union_types_numeric_widening diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 25518220e308b..b7ec21b96be37 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -121,12 +121,17 @@ public enum Cap { * Cast string literals to a desired data type for IN predicate and more types for BinaryComparison. */ STRING_LITERAL_AUTO_CASTING_EXTENDED, - /** * Support for metadata fields. */ METADATA_FIELDS, + /** + * Support specifically for *just* the _index METADATA field. Used by CsvTests, since that is the only metadata field currently + * supported. + */ + INDEX_METADATA_FIELD, + /** * Support for timespan units abbreviations */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java index b11a8580a1e18..3e4dd6849478a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.esql.action.EsqlResolveFieldsAction; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.DateEsField; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -50,7 +51,7 @@ public class IndexResolver { public static final Set ALL_FIELDS = Set.of("*"); - public static final Set INDEX_METADATA_FIELD = Set.of("_index"); + public static final Set INDEX_METADATA_FIELD = Set.of(MetadataAttribute.INDEX); public static final String UNMAPPED = "unmapped"; public static final IndicesOptions FIELD_CAPS_INDICES_OPTIONS = IndicesOptions.builder() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java index 780045077f7b8..1009eaea9b54c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java @@ -48,6 +48,7 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; @@ -292,6 +293,10 @@ private Block getBlockForMultiType(DocBlock indexDoc, MultiTypeEsField multiType private Block extractBlockForSingleDoc(DocBlock docBlock, String columnName, TestBlockCopier blockCopier) { var indexId = docBlock.asVector().shards().getInt(0); var indexPage = indexPages.get(indexId); + if (MetadataAttribute.INDEX.equals(columnName)) { + return docBlock.blockFactory() + .newConstantBytesRefBlockWith(new BytesRef(indexPage.index), blockCopier.docIndices.getPositionCount()); + } int columnIndex = indexPage.columnIndex(columnName) .orElseThrow(() -> new EsqlIllegalArgumentException("Cannot find column named [{}] in {}", columnName, indexPage.columnNames)); var originalData = indexPage.page.getBlock(columnIndex); @@ -410,8 +415,9 @@ private Block extractBlockForColumn( ) { foreachIndexDoc(docBlock, indexDoc -> { TestBlockCopier blockCopier = blockCopier(dataType, extractPreference, indexDoc.asVector().docs()); - Block blockForIndex = extractBlock.apply(indexDoc, blockCopier); - blockBuilder.copyFrom(blockForIndex, 0, blockForIndex.getPositionCount()); + try (Block blockForIndex = extractBlock.apply(indexDoc, blockCopier)) { + blockBuilder.copyFrom(blockForIndex, 0, blockForIndex.getPositionCount()); + } }); var result = blockBuilder.build(); assert result.getPositionCount() == docBlock.getPositionCount() From 7eeb908d88fc85717434c5ba724f22e41f2a834f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 3 Feb 2025 01:02:35 +1100 Subject: [PATCH 362/383] Mute org.elasticsearch.xpack.core.ilm.SetSingleNodeAllocateStepTests testPerformActionSomeShardsOnlyOnNewNodesButNewNodesInvalidAttrs #121495 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f01f363ca0cf4..ea300a1f03d8f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -436,6 +436,9 @@ tests: - class: org.elasticsearch.ingest.geoip.FullClusterRestartIT method: testGeoIpSystemFeaturesMigration {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/121115 +- class: org.elasticsearch.xpack.core.ilm.SetSingleNodeAllocateStepTests + method: testPerformActionSomeShardsOnlyOnNewNodesButNewNodesInvalidAttrs + issue: https://github.com/elastic/elasticsearch/issues/121495 # Examples: # From 7743380509369f2afccca208201d21b7d872702e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 3 Feb 2025 01:41:41 +1100 Subject: [PATCH 363/383] Mute org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT test {yaml=cat.aliases/40_hidden/Test cat aliases output with a visible index with a hidden alias} #121128 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index ea300a1f03d8f..001957a88e332 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -439,6 +439,9 @@ tests: - class: org.elasticsearch.xpack.core.ilm.SetSingleNodeAllocateStepTests method: testPerformActionSomeShardsOnlyOnNewNodesButNewNodesInvalidAttrs issue: https://github.com/elastic/elasticsearch/issues/121495 +- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT + method: test {yaml=cat.aliases/40_hidden/Test cat aliases output with a visible index with a hidden alias} + issue: https://github.com/elastic/elasticsearch/issues/121128 # Examples: # From e7c1ee1a707c7fe4510b22395174fecb992c2bdf Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 3 Feb 2025 02:44:06 +1100 Subject: [PATCH 364/383] Mute org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT #121411 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 001957a88e332..cbbdcc1e82f68 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -442,6 +442,8 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cat.aliases/40_hidden/Test cat aliases output with a visible index with a hidden alias} issue: https://github.com/elastic/elasticsearch/issues/121128 +- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT + issue: https://github.com/elastic/elasticsearch/issues/121411 # Examples: # From 25c300a1e1d7f40061a951857cb589b13598fc95 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Sun, 2 Feb 2025 13:08:04 -0800 Subject: [PATCH 365/383] Unmute EsqlSpecIT for more logging (#121500) Tracked at #121411 --- muted-tests.yml | 2 -- .../java/org/elasticsearch/index/translog/TranslogWriter.java | 3 +++ 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index cbbdcc1e82f68..001957a88e332 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -442,8 +442,6 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cat.aliases/40_hidden/Test cat aliases output with a visible index with a hidden alias} issue: https://github.com/elastic/elasticsearch/issues/121128 -- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT - issue: https://github.com/elastic/elasticsearch/issues/121411 # Examples: # diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 8cf631b660b1e..36b6709661017 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -29,6 +29,7 @@ import org.elasticsearch.index.engine.TranslogOperationAsserter; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.lookup.Source; import java.io.Closeable; import java.io.IOException; @@ -298,8 +299,10 @@ private synchronized boolean assertNoSeqNumberConflict(long seqNo, BytesReferenc + "], with different data. " + "prvOp [" + prvOp + + (prvOp instanceof Translog.Index index ? " source: " + Source.fromBytes(index.source()).source() : "") + "], newOp [" + newOp + + (newOp instanceof Translog.Index index ? " source: " + Source.fromBytes(index.source()).source() : "") + "]", previous.v2() ); From d8b764c0a5cfbe3d314e631c05635a7db95f778a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 3 Feb 2025 09:04:14 +1100 Subject: [PATCH 366/383] Mute org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT test {p0=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only} #121412 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 001957a88e332..b102c4015abff 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -442,6 +442,9 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cat.aliases/40_hidden/Test cat aliases output with a visible index with a hidden alias} issue: https://github.com/elastic/elasticsearch/issues/121128 +- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT + method: test {p0=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only} + issue: https://github.com/elastic/elasticsearch/issues/121412 # Examples: # From 406b4a3dcdb2b931d7cb56f46915d5b791c78536 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 3 Feb 2025 09:22:33 +1100 Subject: [PATCH 367/383] Mute org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculatorTests org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculatorTests #121294 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b102c4015abff..dbfbc7dd243f0 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -445,6 +445,8 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only} issue: https://github.com/elastic/elasticsearch/issues/121412 +- class: org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculatorTests + issue: https://github.com/elastic/elasticsearch/issues/121294 # Examples: # From 10a6f27282919b1c4013468cc9f5af7f9aef684a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 3 Feb 2025 16:56:24 +1100 Subject: [PATCH 368/383] Mute org.elasticsearch.xpack.ml.integration.ClassificationIT testDependentVariableIsAliasToKeyword #121492 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index dbfbc7dd243f0..9c2c9319b0bda 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -447,6 +447,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/121412 - class: org.elasticsearch.xpack.inference.common.InferenceServiceNodeLocalRateLimitCalculatorTests issue: https://github.com/elastic/elasticsearch/issues/121294 +- class: org.elasticsearch.xpack.ml.integration.ClassificationIT + method: testDependentVariableIsAliasToKeyword + issue: https://github.com/elastic/elasticsearch/issues/121492 # Examples: # From 1058df407f55235ef974be4eb3517986712fd711 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Mon, 3 Feb 2025 10:14:49 +0100 Subject: [PATCH 369/383] Add expectThrows with messageMatcher (#120290) --- .../org/elasticsearch/test/ESTestCase.java | 9 +++ .../esql/parser/StatementParserTests.java | 59 +++++++++++-------- 2 files changed, 44 insertions(+), 24 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index a271c999a2ba7..227d7ca3046f8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -2659,6 +2659,15 @@ public static T expectThrows(Class expectedType, Reques ); } + /** + * Checks a specific exception class with matched message is thrown by the given runnable, and returns it. + */ + public static T expectThrows(Class expectedType, Matcher messageMatcher, ThrowingRunnable runnable) { + var e = expectThrows(expectedType, runnable); + assertThat(e.getMessage(), messageMatcher); + return e; + } + /** * Same as {@link #runInParallel(int, IntConsumer)} but also attempts to start all tasks at the same time by blocking execution on a * barrier until all threads are started and ready to execute their task. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 1e1629e6f993b..efa3226ee3308 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -398,8 +398,11 @@ public void testStatsWithoutGroupKeyMixedAggAndFilter() { public void testInlineStatsWithGroups() { var query = "inlinestats b = min(a) by c, d.e"; if (Build.current().isSnapshot() == false) { - var e = expectThrows(ParsingException.class, () -> processingCommand(query)); - assertThat(e.getMessage(), containsString("line 1:13: mismatched input 'inlinestats' expecting {")); + expectThrows( + ParsingException.class, + containsString("line 1:13: mismatched input 'inlinestats' expecting {"), + () -> processingCommand(query) + ); return; } assertEquals( @@ -424,8 +427,11 @@ public void testInlineStatsWithGroups() { public void testInlineStatsWithoutGroups() { var query = "inlinestats min(a), c = 1"; if (Build.current().isSnapshot() == false) { - var e = expectThrows(ParsingException.class, () -> processingCommand(query)); - assertThat(e.getMessage(), containsString("line 1:13: mismatched input 'inlinestats' expecting {")); + expectThrows( + ParsingException.class, + containsString("line 1:13: mismatched input 'inlinestats' expecting {"), + () -> processingCommand(query) + ); return; } assertEquals( @@ -858,16 +864,17 @@ public void testSuggestAvailableSourceCommandsOnParsingError() { Tuple.tuple("a/*hi*/", "a"), Tuple.tuple("explain [ frm a ]", "frm") )) { - ParsingException pe = expectThrows(ParsingException.class, () -> statement(queryWithUnexpectedCmd.v1())); - assertThat( - pe.getMessage(), + expectThrows( + ParsingException.class, allOf( containsString("mismatched input '" + queryWithUnexpectedCmd.v2() + "'"), containsString("'explain'"), containsString("'from'"), containsString("'row'") - ) + ), + () -> statement(queryWithUnexpectedCmd.v1()) ); + } } @@ -882,15 +889,15 @@ public void testSuggestAvailableProcessingCommandsOnParsingError() { Tuple.tuple("from a | a/*hi*/", "a"), Tuple.tuple("explain [ from a | evl b = c ]", "evl") )) { - ParsingException pe = expectThrows(ParsingException.class, () -> statement(queryWithUnexpectedCmd.v1())); - assertThat( - pe.getMessage(), + expectThrows( + ParsingException.class, allOf( containsString("mismatched input '" + queryWithUnexpectedCmd.v2() + "'"), containsString("'eval'"), containsString("'stats'"), containsString("'where'") - ) + ), + () -> statement(queryWithUnexpectedCmd.v1()) ); } } @@ -981,10 +988,10 @@ public void testGrokPattern() { assertEquals("%{WORD:foo}", grok.parser().pattern()); assertEquals(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields()); - ParsingException pe = expectThrows(ParsingException.class, () -> statement("row a = \"foo bar\" | grok a \"%{_invalid_:x}\"")); - assertThat( - pe.getMessage(), - containsString("Invalid pattern [%{_invalid_:x}] for grok: Unable to find pattern [_invalid_] in Grok's pattern dictionary") + expectThrows( + ParsingException.class, + containsString("Invalid pattern [%{_invalid_:x}] for grok: Unable to find pattern [_invalid_] in Grok's pattern dictionary"), + () -> statement("row a = \"foo bar\" | grok a \"%{_invalid_:x}\"") ); cmd = processingCommand("grok a \"%{WORD:foo} %{WORD:foo}\""); @@ -1110,8 +1117,7 @@ public void testKeepStarMvExpand() { public void testUsageOfProject() { String query = "from test | project foo, bar"; - ParsingException e = expectThrows(ParsingException.class, "Expected syntax error for " + query, () -> statement(query)); - assertThat(e.getMessage(), containsString("mismatched input 'project' expecting")); + expectThrows(ParsingException.class, containsString("mismatched input 'project' expecting"), () -> statement(query)); } public void testInputParams() { @@ -2046,8 +2052,7 @@ public void testQuotedName() { private void assertStringAsIndexPattern(String string, String statement) { if (Build.current().isSnapshot() == false && statement.contains("METRIC")) { - var e = expectThrows(ParsingException.class, () -> statement(statement)); - assertThat(e.getMessage(), containsString("mismatched input 'METRICS' expecting {")); + expectThrows(ParsingException.class, containsString("mismatched input 'METRICS' expecting {"), () -> statement(statement)); return; } LogicalPlan from = statement(statement); @@ -2058,8 +2063,11 @@ private void assertStringAsIndexPattern(String string, String statement) { private void assertStringAsLookupIndexPattern(String string, String statement) { if (Build.current().isSnapshot() == false) { - var e = expectThrows(ParsingException.class, () -> statement(statement)); - assertThat(e.getMessage(), containsString("line 1:14: LOOKUP_🐔 is in preview and only available in SNAPSHOT build")); + expectThrows( + ParsingException.class, + containsString("line 1:14: LOOKUP_🐔 is in preview and only available in SNAPSHOT build"), + () -> statement(statement) + ); return; } var plan = statement(statement); @@ -2126,8 +2134,11 @@ public void testInlineConvertWithNonexistentType() { public void testLookup() { String query = "ROW a = 1 | LOOKUP_🐔 t ON j"; if (Build.current().isSnapshot() == false) { - var e = expectThrows(ParsingException.class, () -> statement(query)); - assertThat(e.getMessage(), containsString("line 1:13: mismatched input 'LOOKUP_🐔' expecting {")); + expectThrows( + ParsingException.class, + containsString("line 1:13: mismatched input 'LOOKUP_🐔' expecting {"), + () -> statement(query) + ); return; } var plan = statement(query); From befc5783b79b567a222a514930a69c33b7ff4d0c Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Mon, 3 Feb 2025 10:20:24 +0100 Subject: [PATCH 370/383] Fix docs.testFilterToday JDBC test (#121504) --- .../plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec index 2fa82c05cc1aa..0bdd3fbc1b450 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec @@ -3353,7 +3353,7 @@ Alejandro Amabile Anoosh Basil -Brendon +Cristinel // end::filterToday ; From 541c94160fe7f263f2940d75753fd2202aa8577e Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 3 Feb 2025 11:25:25 +0200 Subject: [PATCH 371/383] Unmute TSDBPassthroughIndexingIT (#121505) --- muted-tests.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 9c2c9319b0bda..9580db6b4deb7 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -389,8 +389,6 @@ tests: - class: org.elasticsearch.xpack.ml.integration.ClassificationIT method: testDependentVariableIsAliasToNested issue: https://github.com/elastic/elasticsearch/issues/121415 -- class: org.elasticsearch.datastreams.TSDBPassthroughIndexingIT - issue: https://github.com/elastic/elasticsearch/issues/121464 - class: org.elasticsearch.xpack.esql.heap_attack.HeapAttackIT method: testLookupExplosionBigStringManyMatches issue: https://github.com/elastic/elasticsearch/issues/121465 From 487b217afe200028d1f27a25546e29cf8160983f Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 3 Feb 2025 10:27:19 +0100 Subject: [PATCH 372/383] Remove ServerlessScope annotation from RestGraphAction (#120789) --- .../elasticsearch/xpack/graph/rest/action/RestGraphAction.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java index 78870cbb9530b..fe9bdef9bebb9 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java @@ -18,8 +18,6 @@ import org.elasticsearch.protocol.xpack.graph.VertexRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.Scope; -import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -38,7 +36,6 @@ /** * @see GraphExploreRequest */ -@ServerlessScope(Scope.PUBLIC) public class RestGraphAction extends BaseRestHandler { public static final ParseField TIMEOUT_FIELD = new ParseField("timeout"); From 3200c06011e0562fdf2e46f6a6c5eb757f33d140 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 3 Feb 2025 20:53:04 +1100 Subject: [PATCH 373/383] Mute org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT test {yaml=cat.aliases/10_basic/Complex alias} #121513 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 9580db6b4deb7..094233d48b88b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -448,6 +448,9 @@ tests: - class: org.elasticsearch.xpack.ml.integration.ClassificationIT method: testDependentVariableIsAliasToKeyword issue: https://github.com/elastic/elasticsearch/issues/121492 +- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT + method: test {yaml=cat.aliases/10_basic/Complex alias} + issue: https://github.com/elastic/elasticsearch/issues/121513 # Examples: # From c8b6d2fe65c62b87b03b696ac2d66bd185274dc5 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Mon, 3 Feb 2025 09:53:38 +0000 Subject: [PATCH 374/383] Create transport version for 9.0 release (#120936) Also bump the minimum compatible version to something similar to what 8.18 will be --- .../org/elasticsearch/TransportVersions.java | 7 +- .../elasticsearch/TransportVersionTests.java | 37 ----- .../FieldCapabilitiesNodeResponseTests.java | 44 ----- .../FieldCapabilitiesResponseTests.java | 45 ----- .../action/shard/ShardStateActionTests.java | 6 +- .../cluster/node/DiscoveryNodeTests.java | 36 ---- .../index/mapper/MappingParserTests.java | 7 +- ...oordinatedInferenceActionRequestTests.java | 4 +- .../security/authc/ApiKeyServiceTests.java | 156 ------------------ ...usterAccessAuthenticationServiceTests.java | 52 ------ .../authz/store/NativeRolesStoreTests.java | 102 ------------ 11 files changed, 8 insertions(+), 488 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 1144f94795713..30d0301bf9517 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -172,6 +172,7 @@ static TransportVersion def(int id) { public static final TransportVersion TIMEOUT_GET_PARAM_FOR_RESOLVE_CLUSTER = def(8_838_00_0); public static final TransportVersion INFERENCE_REQUEST_ADAPTIVE_RATE_LIMITING = def(8_839_00_0); public static final TransportVersion ML_INFERENCE_IBM_WATSONX_RERANK_ADDED = def(8_840_00_0); + public static final TransportVersion ELASTICSEARCH_9_0 = def(9_000_00_0); /* * STOP! READ THIS FIRST! No, really, @@ -230,15 +231,13 @@ static TransportVersion def(int id) { * Reference to the earliest compatible transport version to this version of the codebase. * This should be the transport version used by the highest minor version of the previous major. */ - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - // This needs to be bumped to the 8.last - public static final TransportVersion MINIMUM_COMPATIBLE = V_7_17_0; + public static final TransportVersion MINIMUM_COMPATIBLE = BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1; /** * Reference to the minimum transport version that can be used with CCS. * This should be the transport version used by the previous minor release. */ - public static final TransportVersion MINIMUM_CCS_VERSION = V_8_15_0; + public static final TransportVersion MINIMUM_CCS_VERSION = BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1; /** * Sorted list of all versions defined in this class diff --git a/server/src/test/java/org/elasticsearch/TransportVersionTests.java b/server/src/test/java/org/elasticsearch/TransportVersionTests.java index 00429035f97d3..8ffc2eae4d7b4 100644 --- a/server/src/test/java/org/elasticsearch/TransportVersionTests.java +++ b/server/src/test/java/org/elasticsearch/TransportVersionTests.java @@ -16,7 +16,6 @@ import java.util.Collections; import java.util.List; import java.util.Set; -import java.util.TreeSet; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -199,40 +198,4 @@ public void testToString() { assertEquals("2000099", TransportVersion.fromId(2_00_00_99).toString()); assertEquals("5000099", TransportVersion.fromId(5_00_00_99).toString()); } - - /** - * Until 9.0 bumps its transport version to 9_000_00_0, all transport changes must be backported to 8.x. - * This test ensures transport versions are dense, so that we have confidence backports have not been missed. - * Note that it does not ensure patches are not missed, but it should catch the majority of misordered - * or missing transport versions. - */ - public void testDenseTransportVersions() { - Set missingVersions = new TreeSet<>(); - TransportVersion previous = null; - for (var tv : TransportVersion.getAllVersions()) { - if (tv.before(TransportVersions.V_8_16_0)) { - continue; - } - if (previous == null) { - previous = tv; - continue; - } - - if (previous.id() + 1000 < tv.id()) { - int nextId = previous.id(); - do { - nextId = (nextId + 1000) / 1000 * 1000; - missingVersions.add(nextId); - } while (nextId + 1000 < tv.id()); - } - previous = tv; - } - if (missingVersions.isEmpty() == false) { - StringBuilder msg = new StringBuilder("Missing transport versions:\n"); - for (Integer id : missingVersions) { - msg.append(" " + id + "\n"); - } - fail(msg.toString()); - } - } } diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java index c99c671c69148..fa57431cc582a 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.test.TransportVersionUtils; -import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -37,7 +36,6 @@ import static org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponseTests.randomMappingHashToIndices; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.nullValue; public class FieldCapabilitiesNodeResponseTests extends AbstractWireSerializingTestCase { @@ -145,48 +143,6 @@ public void testSerializeNodeResponseBetweenNewNodes() throws Exception { } } - public void testSerializeNodeResponseBetweenOldNodes() throws IOException { - final TransportVersion minCompactVersion = TransportVersions.MINIMUM_COMPATIBLE; - assertTrue("Remove this test once minCompactVersion >= 8.2.0", minCompactVersion.before(TransportVersions.V_8_2_0)); - List indexResponses = CollectionUtils.concatLists( - randomIndexResponsesWithMappingHash(randomMappingHashToIndices()), - randomIndexResponsesWithoutMappingHash() - ); - Randomness.shuffle(indexResponses); - FieldCapabilitiesNodeResponse inResponse = randomNodeResponse(indexResponses); - TransportVersion version = TransportVersionUtils.randomVersionBetween( - random(), - minCompactVersion, - TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_2_0) - ); - final FieldCapabilitiesNodeResponse outResponse = copyInstance(inResponse, version); - assertThat(outResponse.getFailures().keySet(), equalTo(inResponse.getFailures().keySet())); - assertThat(outResponse.getUnmatchedShardIds(), equalTo(inResponse.getUnmatchedShardIds())); - final List inList = inResponse.getIndexResponses(); - final List outList = outResponse.getIndexResponses(); - assertThat(outList, hasSize(inList.size())); - for (int i = 0; i < inList.size(); i++) { - assertThat("Responses between old nodes don't have mapping hash", outList.get(i).getIndexMappingHash(), nullValue()); - assertThat(outList.get(i).getIndexName(), equalTo(inList.get(i).getIndexName())); - assertThat(outList.get(i).canMatch(), equalTo(inList.get(i).canMatch())); - Map outCap = outList.get(i).get(); - Map inCap = inList.get(i).get(); - if (version.onOrAfter(TransportVersions.V_8_0_0)) { - assertThat(outCap, equalTo(inCap)); - } else { - // Exclude metric types which was introduced in 8.0 - assertThat(outCap.keySet(), equalTo(inCap.keySet())); - for (String field : outCap.keySet()) { - assertThat(outCap.get(field).name(), equalTo(inCap.get(field).name())); - assertThat(outCap.get(field).type(), equalTo(inCap.get(field).type())); - assertThat(outCap.get(field).isSearchable(), equalTo(inCap.get(field).isSearchable())); - assertThat(outCap.get(field).isAggregatable(), equalTo(inCap.get(field).isAggregatable())); - assertThat(outCap.get(field).meta(), equalTo(inCap.get(field).meta())); - } - } - } - } - private static FieldCapabilitiesNodeResponse randomNodeResponse(List indexResponses) { int numUnmatched = randomIntBetween(0, 3); final Set unmatchedShardIds = new HashSet<>(); diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java index 6ea4a1d3dc46b..ceb84e4b2a0d9 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java @@ -40,7 +40,6 @@ import static org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponseTests.randomMappingHashToIndices; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.nullValue; public class FieldCapabilitiesResponseTests extends AbstractWireSerializingTestCase { @@ -198,48 +197,4 @@ public void testSerializeCCSResponseBetweenNewClusters() throws Exception { } } } - - public void testSerializeCCSResponseBetweenOldClusters() throws IOException { - TransportVersion minCompactVersion = TransportVersions.MINIMUM_COMPATIBLE; - assertTrue("Remove this test once minCompactVersion >= 8.2.0", minCompactVersion.before(TransportVersions.V_8_2_0)); - List indexResponses = CollectionUtils.concatLists( - randomIndexResponsesWithMappingHash(randomMappingHashToIndices()), - randomIndexResponsesWithoutMappingHash() - ); - Randomness.shuffle(indexResponses); - FieldCapabilitiesResponse inResponse = randomCCSResponse(indexResponses); - TransportVersion version = TransportVersionUtils.randomVersionBetween( - random(), - minCompactVersion, - TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_2_0) - ); - final FieldCapabilitiesResponse outResponse = copyInstance(inResponse, version); - assertThat( - outResponse.getFailures().stream().flatMap(f -> Arrays.stream(f.getIndices())).toList(), - equalTo(inResponse.getFailures().stream().flatMap(f -> Arrays.stream(f.getIndices())).toList()) - ); - final List inList = inResponse.getIndexResponses(); - final List outList = outResponse.getIndexResponses(); - assertThat(outList, hasSize(inList.size())); - for (int i = 0; i < inList.size(); i++) { - assertThat("Responses between old clusters don't have mapping hash", outList.get(i).getIndexMappingHash(), nullValue()); - assertThat(outList.get(i).getIndexName(), equalTo(inList.get(i).getIndexName())); - assertThat(outList.get(i).canMatch(), equalTo(inList.get(i).canMatch())); - Map outCap = outList.get(i).get(); - Map inCap = inList.get(i).get(); - if (version.onOrAfter(TransportVersions.V_8_0_0)) { - assertThat(outCap, equalTo(inCap)); - } else { - // Exclude metric types which was introduced in 8.0 - assertThat(outCap.keySet(), equalTo(inCap.keySet())); - for (String field : outCap.keySet()) { - assertThat(outCap.get(field).name(), equalTo(inCap.get(field).name())); - assertThat(outCap.get(field).type(), equalTo(inCap.get(field).type())); - assertThat(outCap.get(field).isSearchable(), equalTo(inCap.get(field).isSearchable())); - assertThat(outCap.get(field).isAggregatable(), equalTo(inCap.get(field).isAggregatable())); - assertThat(outCap.get(field).meta(), equalTo(inCap.get(field).meta())); - } - } - } - } } diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index 3c680d891ff13..75cc99e4c280e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -612,11 +612,7 @@ public void testStartedShardEntrySerializationWithOlderTransportVersion() throws final String allocationId = randomRealisticUnicodeOfCodepointLengthBetween(10, 100); final long primaryTerm = randomIntBetween(0, 100); final String message = randomRealisticUnicodeOfCodepointLengthBetween(10, 100); - final TransportVersion version = randomFrom( - getFirstVersion(), - getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE), - getPreviousVersion(TransportVersions.V_8_15_0) - ); + final TransportVersion version = randomFrom(getFirstVersion(), getPreviousVersion(TransportVersions.V_8_15_0)); final ShardLongFieldRange timestampRange = ShardLongFieldRangeWireTests.randomRange(); final ShardLongFieldRange eventIngestedRange = ShardLongFieldRangeWireTests.randomRange(); var startedShardEntry = new StartedShardEntry(shardId, allocationId, primaryTerm, message, timestampRange, eventIngestedRange); diff --git a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java index a91cef576df33..744a12d5ab6e0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java @@ -31,8 +31,6 @@ import static java.util.Collections.emptySet; import static org.elasticsearch.test.NodeRoles.nonRemoteClusterClientNode; import static org.elasticsearch.test.NodeRoles.remoteClusterClientNode; -import static org.elasticsearch.test.TransportVersionUtils.getPreviousVersion; -import static org.elasticsearch.test.TransportVersionUtils.randomVersionBetween; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -274,39 +272,5 @@ public void testDiscoveryNodeMinReadOnlyVersionSerialization() throws Exception } } } - - { - var oldVersion = randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - getPreviousVersion(TransportVersions.NODE_VERSION_INFORMATION_WITH_MIN_READ_ONLY_INDEX_VERSION) - ); - try (var out = new BytesStreamOutput()) { - out.setTransportVersion(oldVersion); - node.writeTo(out); - - try (var in = StreamInput.wrap(out.bytes().array())) { - in.setTransportVersion(oldVersion); - - var deserialized = new DiscoveryNode(in); - assertThat(deserialized.getId(), equalTo(node.getId())); - assertThat(deserialized.getAddress(), equalTo(node.getAddress())); - assertThat(deserialized.getMinIndexVersion(), equalTo(node.getMinIndexVersion())); - assertThat(deserialized.getMaxIndexVersion(), equalTo(node.getMaxIndexVersion())); - assertThat(deserialized.getMinReadOnlyIndexVersion(), equalTo(node.getMinIndexVersion())); - assertThat( - deserialized.getVersionInformation(), - equalTo( - new VersionInformation( - node.getBuildVersion(), - node.getMinIndexVersion(), - node.getMinIndexVersion(), - node.getMaxIndexVersion() - ) - ) - ); - } - } - } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java index b87ab09c530d6..4b674cf1985b2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.hamcrest.CoreMatchers; @@ -327,11 +326,7 @@ public void testBlankFieldNameBefore8_6_0() throws Exception { IndexVersions.MINIMUM_READONLY_COMPATIBLE, IndexVersions.V_8_5_0 ); - TransportVersion transportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersions.V_8_5_0 - ); + TransportVersion transportVersion = TransportVersions.V_8_5_0; { XContentBuilder builder = mapping(b -> b.startObject(" ").field("type", randomFieldType()).endObject()); MappingParser mappingParser = createMappingParser(Settings.EMPTY, version, transportVersion); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java index 3ab5851815474..91070d5768f63 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java @@ -120,7 +120,7 @@ protected CoordinatedInferenceAction.Request mutateInstanceForVersion( instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.NONE); } - return new CoordinatedInferenceAction.Request( + var newInstance = new CoordinatedInferenceAction.Request( instance.getModelId(), instance.getInputs(), instance.getTaskSettings(), @@ -131,5 +131,7 @@ protected CoordinatedInferenceAction.Request mutateInstanceForVersion( instance.getHighPriority(), instance.getRequestModelType() ); + newInstance.setPrefixType(instance.getPrefixType()); + return newInstance; } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 185669a6a203b..c7632943b63b1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; @@ -3008,48 +3007,6 @@ public void testGetApiKeyMetadata() throws IOException { assertThat(e.getMessage(), containsString("authentication realm must be [_es_api_key]")); } - public void testMaybeRemoveRemoteIndicesPrivilegesWithUnsupportedVersion() { - final String apiKeyId = randomAlphaOfLengthBetween(5, 8); - final Set userRoleDescriptors = Set.copyOf( - randomList( - 2, - 5, - () -> RoleDescriptorTestHelper.builder() - .allowReservedMetadata(randomBoolean()) - .allowRemoteIndices(randomBoolean()) - .allowRestriction(randomBoolean()) - .allowRemoteClusters(false) - .build() - ) - ); - - // Selecting random unsupported version. - final TransportVersion minTransportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersionUtils.getPreviousVersion(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY) - ); - - final Set result = ApiKeyService.maybeRemoveRemotePrivileges(userRoleDescriptors, minTransportVersion, apiKeyId); - assertThat(result.stream().anyMatch(RoleDescriptor::hasRemoteIndicesPrivileges), equalTo(false)); - assertThat(result.size(), equalTo(userRoleDescriptors.size())); - - // Roles for which warning headers are added. - final List userRoleNamesWithRemoteIndicesPrivileges = userRoleDescriptors.stream() - .filter(RoleDescriptor::hasRemoteIndicesPrivileges) - .map(RoleDescriptor::getName) - .sorted() - .toList(); - - if (false == userRoleNamesWithRemoteIndicesPrivileges.isEmpty()) { - assertWarnings( - "Removed API key's remote indices privileges from role(s) " - + userRoleNamesWithRemoteIndicesPrivileges - + ". Remote indices are not supported by all nodes in the cluster. " - ); - } - } - public void testMaybeRemoveRemoteClusterPrivilegesWithUnsupportedVersion() { final String apiKeyId = randomAlphaOfLengthBetween(5, 8); final Set userRoleDescriptors = Set.copyOf( @@ -3124,52 +3081,6 @@ public void testBuildDelimitedStringWithLimit() { assertThat(e.getMessage(), equalTo("limit must be positive number")); } - public void testCreateCrossClusterApiKeyMinVersionConstraint() { - final Authentication authentication = randomValueOtherThanMany( - Authentication::isApiKey, - () -> AuthenticationTestHelper.builder().build() - ); - final AbstractCreateApiKeyRequest request = mock(AbstractCreateApiKeyRequest.class); - when(request.getType()).thenReturn(ApiKey.Type.CROSS_CLUSTER); - - final ClusterService clusterService = mock(ClusterService.class); - when(clusterService.getClusterSettings()).thenReturn( - new ClusterSettings(Settings.EMPTY, Set.of(ApiKeyService.DELETE_RETENTION_PERIOD, ApiKeyService.DELETE_INTERVAL)) - ); - final ClusterState clusterState = mock(ClusterState.class); - when(clusterService.state()).thenReturn(clusterState); - final TransportVersion minTransportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersionUtils.getPreviousVersion(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY) - ); - when(clusterState.getMinTransportVersion()).thenReturn(minTransportVersion); - - final ApiKeyService service = new ApiKeyService( - Settings.EMPTY, - clock, - client, - securityIndex, - clusterService, - cacheInvalidatorRegistry, - threadPool, - MeterRegistry.NOOP - ); - - final PlainActionFuture future = new PlainActionFuture<>(); - service.createApiKey(authentication, request, Set.of(), future); - final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, future::actionGet); - - assertThat( - e.getMessage(), - containsString( - "all nodes must have version [" - + TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY.toReleaseVersion() - + "] or higher to support creating cross cluster API keys" - ) - ); - } - public void testAuthenticationFailureWithApiKeyTypeMismatch() throws Exception { final Settings settings = Settings.builder().put(XPackSettings.API_KEY_SERVICE_ENABLED_SETTING.getKey(), true).build(); final ApiKeyService service = spy(createApiKeyService(settings)); @@ -3268,73 +3179,6 @@ public void testValidateApiKeyTypeAndExpiration() throws IOException { assertThat(auth3.getMetadata(), hasEntry(API_KEY_TYPE_KEY, apiKeyDoc3.type.value())); } - public void testCreateOrUpdateApiKeyWithWorkflowsRestrictionForUnsupportedVersion() { - final Authentication authentication = AuthenticationTestHelper.builder().build(); - final ClusterService clusterService = mock(ClusterService.class); - when(clusterService.getClusterSettings()).thenReturn( - new ClusterSettings(Settings.EMPTY, Set.of(ApiKeyService.DELETE_RETENTION_PERIOD, ApiKeyService.DELETE_INTERVAL)) - ); - final ClusterState clusterState = mock(ClusterState.class); - when(clusterService.state()).thenReturn(clusterState); - final TransportVersion minTransportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersionUtils.getPreviousVersion(WORKFLOWS_RESTRICTION_VERSION) - ); - when(clusterState.getMinTransportVersion()).thenReturn(minTransportVersion); - - final ApiKeyService service = new ApiKeyService( - Settings.EMPTY, - clock, - client, - securityIndex, - clusterService, - cacheInvalidatorRegistry, - threadPool, - MeterRegistry.NOOP - ); - - final List roleDescriptorsWithWorkflowsRestriction = randomList( - 1, - 3, - () -> randomRoleDescriptorWithWorkflowsRestriction() - ); - - final AbstractCreateApiKeyRequest createRequest = mock(AbstractCreateApiKeyRequest.class); - when(createRequest.getType()).thenReturn(ApiKey.Type.REST); - when(createRequest.getRoleDescriptors()).thenReturn(roleDescriptorsWithWorkflowsRestriction); - - final PlainActionFuture createFuture = new PlainActionFuture<>(); - service.createApiKey(authentication, createRequest, Set.of(), createFuture); - final IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, createFuture::actionGet); - assertThat( - e1.getMessage(), - containsString( - "all nodes must have version [" - + WORKFLOWS_RESTRICTION_VERSION.toReleaseVersion() - + "] or higher to support restrictions for API keys" - ) - ); - - final BulkUpdateApiKeyRequest updateRequest = new BulkUpdateApiKeyRequest( - randomList(1, 3, () -> randomAlphaOfLengthBetween(3, 5)), - roleDescriptorsWithWorkflowsRestriction, - Map.of(), - ApiKeyTests.randomFutureExpirationTime() - ); - final PlainActionFuture updateFuture = new PlainActionFuture<>(); - service.updateApiKeys(authentication, updateRequest, Set.of(), updateFuture); - final IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, createFuture::actionGet); - assertThat( - e2.getMessage(), - containsString( - "all nodes must have version [" - + WORKFLOWS_RESTRICTION_VERSION.toReleaseVersion() - + "] or higher to support restrictions for API keys" - ) - ); - } - public void testValidateOwnerUserRoleDescriptorsWithWorkflowsRestriction() { final Authentication authentication = AuthenticationTestHelper.builder().build(); final ClusterService clusterService = mock(ClusterService.class); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java index aed39b24f217d..31c6d6f0c2341 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.service.ClusterService; @@ -17,7 +16,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.action.apikey.ApiKey; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -36,7 +34,6 @@ import java.io.IOException; import java.util.concurrent.ExecutionException; -import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -75,55 +72,6 @@ public void init() throws Exception { ); } - public void testAuthenticateThrowsOnUnsupportedMinVersions() throws IOException { - when(clusterService.state().getMinTransportVersion()).thenReturn( - TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - TransportVersionUtils.getPreviousVersion(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY) - ) - ); - final var authcContext = mock(Authenticator.Context.class, Mockito.RETURNS_DEEP_STUBS); - when(authcContext.getThreadContext()).thenReturn(threadContext); - final var crossClusterAccessHeaders = new CrossClusterAccessHeaders( - CrossClusterAccessHeadersTests.randomEncodedApiKeyHeader(), - AuthenticationTestHelper.randomCrossClusterAccessSubjectInfo() - ); - crossClusterAccessHeaders.writeToContext(threadContext); - final AuthenticationService.AuditableRequest auditableRequest = mock(AuthenticationService.AuditableRequest.class); - when(authcContext.getRequest()).thenReturn(auditableRequest); - when(auditableRequest.exceptionProcessingRequest(any(), any())).thenAnswer( - i -> new ElasticsearchSecurityException("potato", (Exception) i.getArguments()[0]) - ); - doAnswer( - invocationOnMock -> new Authenticator.Context( - threadContext, - auditableRequest, - mock(Realms.class), - (AuthenticationToken) invocationOnMock.getArguments()[2] - ) - ).when(authenticationService).newContext(anyString(), any(), any()); - - final PlainActionFuture future = new PlainActionFuture<>(); - crossClusterAccessAuthenticationService.authenticate("action", mock(TransportRequest.class), future); - final ExecutionException actual = expectThrows(ExecutionException.class, future::get); - - assertThat(actual.getCause().getCause(), instanceOf(IllegalArgumentException.class)); - assertThat( - actual.getCause().getCause().getMessage(), - equalTo( - "all nodes must have version [" - + TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY.toReleaseVersion() - + "] or higher to support cross cluster requests through the dedicated remote cluster port" - ) - ); - verify(auditableRequest).exceptionProcessingRequest( - any(Exception.class), - credentialsArgMatches(crossClusterAccessHeaders.credentials()) - ); - verifyNoMoreInteractions(auditableRequest); - } - public void testAuthenticationSuccessOnSuccessfulAuthentication() throws IOException, ExecutionException, InterruptedException { final var crossClusterAccessHeaders = new CrossClusterAccessHeaders( CrossClusterAccessHeadersTests.randomEncodedApiKeyHeader(), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index 2b8a77d63588a..89f32e59f6bad 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.bulk.BulkRequest; @@ -51,7 +50,6 @@ import org.elasticsearch.license.TestUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -64,8 +62,6 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.RoleRestrictionTests; -import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; -import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; @@ -85,20 +81,17 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_FORMAT_SETTING; import static org.elasticsearch.indices.SystemIndexDescriptor.VERSION_META_KEY; -import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; import static org.elasticsearch.xpack.core.security.SecurityField.DOCUMENT_LEVEL_SECURITY_FEATURE; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomApplicationPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomClusterPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteIndicesPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRoleDescriptorMetadata; -import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; @@ -465,101 +458,6 @@ public void testPutOfRoleWithFlsDlsUnlicensed() throws IOException { assertThat(e.getMessage(), containsString("field and document level security")); } - public void testPutRoleWithRemotePrivsUnsupportedMinNodeVersion() throws IOException { - // Init for validation - new ReservedRolesStore(Set.of("superuser")); - enum TEST_MODE { - REMOTE_INDICES_PRIVS, - REMOTE_CLUSTER_PRIVS, - REMOTE_INDICES_AND_CLUSTER_PRIVS - } - for (TEST_MODE testMode : TEST_MODE.values()) { - // default to both remote indices and cluster privileges and use the switch below to remove one or the other - TransportVersion transportVersionBeforeAdvancedRemoteClusterSecurity = TransportVersionUtils.getPreviousVersion( - TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY - ); - RoleDescriptor.RemoteIndicesPrivileges[] remoteIndicesPrivileges = new RoleDescriptor.RemoteIndicesPrivileges[] { - RoleDescriptor.RemoteIndicesPrivileges.builder("remote").privileges("read").indices("index").build() }; - RemoteClusterPermissions remoteClusterPermissions = new RemoteClusterPermissions().addGroup( - new RemoteClusterPermissionGroup( - RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), - new String[] { "remote" } - ) - ); - switch (testMode) { - case REMOTE_CLUSTER_PRIVS -> { - transportVersionBeforeAdvancedRemoteClusterSecurity = TransportVersionUtils.getPreviousVersion( - ROLE_REMOTE_CLUSTER_PRIVS - ); - remoteIndicesPrivileges = null; - } - case REMOTE_INDICES_PRIVS -> remoteClusterPermissions = null; - } - final Client client = mock(Client.class); - - final TransportVersion minTransportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.MINIMUM_COMPATIBLE, - transportVersionBeforeAdvancedRemoteClusterSecurity - ); - final ClusterService clusterService = mockClusterServiceWithMinNodeVersion(minTransportVersion); - - final XPackLicenseState licenseState = mock(XPackLicenseState.class); - - final SecuritySystemIndices systemIndices = new SecuritySystemIndices(clusterService.getSettings()); - final FeatureService featureService = mock(FeatureService.class); - systemIndices.init(client, featureService, clusterService); - final SecurityIndexManager securityIndex = systemIndices.getMainIndexManager(); - - final NativeRolesStore rolesStore = new NativeRolesStore( - Settings.EMPTY, - client, - licenseState, - securityIndex, - clusterService, - mock(FeatureService.class), - mock(ReservedRoleNameChecker.class), - mock(NamedXContentRegistry.class) - ); - // setup the roles store so the security index exists - securityIndex.clusterChanged(new ClusterChangedEvent("source", getClusterStateWithSecurityIndex(), getEmptyClusterState())); - - RoleDescriptor remoteIndicesRole = new RoleDescriptor( - "remote", - null, - null, - null, - null, - null, - null, - null, - remoteIndicesPrivileges, - remoteClusterPermissions, - null, - null - ); - PlainActionFuture future = new PlainActionFuture<>(); - putRole(rolesStore, remoteIndicesRole, future); - IllegalStateException e = expectThrows( - IllegalStateException.class, - String.format(Locale.ROOT, "expected IllegalStateException, but not thrown for mode [%s]", testMode), - future::actionGet - ); - assertThat( - e.getMessage(), - containsString( - "all nodes must have version [" - + (TEST_MODE.REMOTE_CLUSTER_PRIVS.equals(testMode) - ? ROLE_REMOTE_CLUSTER_PRIVS.toReleaseVersion() - : TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY.toReleaseVersion()) - + "] or higher to support remote " - + (remoteIndicesPrivileges != null ? "indices" : "cluster") - + " privileges" - ) - ); - } - } - public void testGetRoleWhenDisabled() throws Exception { final Settings settings = Settings.builder().put(NativeRolesStore.NATIVE_ROLES_ENABLED, "false").build(); NativeRolesStore store = createRoleStoreForTest(settings); From 85f5222d6927b13914e75465a9c6b3d765a527e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 3 Feb 2025 10:58:41 +0100 Subject: [PATCH 375/383] Revert "WIP (#121463)" This reverts commit fd1bd79b85d46ccd34931dc28ce9c9c4a50f949f. PR was merged by a mistake, still needs to get reviewed. --- .../test/AbstractXContentTestCase.java | 19 +-------- .../test/AbstractXContentTestCaseTests.java | 40 ------------------- 2 files changed, 2 insertions(+), 57 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java index 24b853c8f6ddb..cc35f63d289eb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java @@ -145,21 +145,8 @@ private XContentTester( public void test() throws IOException { for (int runs = 0; runs < numberOfTestRuns; runs++) { XContentType xContentType = randomFrom(XContentType.values()).canonical(); - T testInstance = null; + T testInstance = instanceSupplier.apply(xContentType); try { - if (xContentType.equals(XContentType.YAML)) { - testInstance = randomValueOtherThanMany(instance -> { - // unicode character U+0085 (NEXT LINE (NEL)) doesn't survive YAML round trip tests (see #97716) - // get a new random instance if we detect this character in the xContent output - try { - return toXContent.apply(instance, xContentType).utf8ToString().contains("\u0085"); - } catch (IOException e) { - throw new RuntimeException(e); - } - }, () -> instanceSupplier.apply(xContentType)); - } else { - testInstance = instanceSupplier.apply(xContentType); - } BytesReference originalXContent = toXContent.apply(testInstance, xContentType); BytesReference shuffledContent = insertRandomFieldsAndShuffle( originalXContent, @@ -186,9 +173,7 @@ public void test() throws IOException { dispose.accept(parsed); } } finally { - if (testInstance != null) { - dispose.accept(testInstance); - } + dispose.accept(testInstance); } } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java b/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java index e3cc3bba94a5c..b8f4dcb399ec7 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java @@ -12,13 +12,11 @@ import com.carrotsearch.randomizedtesting.RandomizedContext; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; -import java.io.IOException; import java.util.Map; import static org.hamcrest.Matchers.equalTo; @@ -51,42 +49,4 @@ public void testInsertRandomFieldsAndShuffle() throws Exception { assertThat(mapOrdered.keySet().iterator().next(), not(equalTo("field"))); } } - - private record TestToXContent(String field, String value) implements ToXContentFragment { - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.field(field, value); - } - } - - public void testYamlXContentRoundtripSanitization() throws Exception { - var test = new AbstractXContentTestCase() { - - @Override - protected TestToXContent createTestInstance() { - // we need to randomly create both a "problematic" and an okay version in order to ensure that the sanitization code - // can draw at least one okay version if polled often enough - return randomBoolean() ? new TestToXContent("a\u0085b", "def") : new TestToXContent("a b", "def"); - } - - @Override - protected TestToXContent doParseInstance(XContentParser parser) throws IOException { - assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); - assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); - String name = parser.currentName(); - assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); - String value = parser.text(); - assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); - return new TestToXContent(name, value); - }; - - @Override - protected boolean supportsUnknownFields() { - return false; - } - }; - // testFromXContent runs 20 repetitions, enough to hit a YAML xcontent version very likely - test.testFromXContent(); - } } From 106b66682ea6fcaddb8add39179932ab84a4f735 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 3 Feb 2025 12:05:30 +0100 Subject: [PATCH 376/383] [Test] Remove ASYNC translog durability in N-2 bwc upgrade tests (#121278) When adding support for upgrading closed indices in N-2 version, I randomized the Translog.Durability setting of the closed index with the aim to test the 2 phases closing process. This caused at least 1 test failure on Windows with the index being closed and the cluster upgraded before the synchronization of the translog had a chance to be executed. I think this cause the engine to be reset on the replica that is promoted as a primary, causing the loss of the operations that were not yet persisted. Closes #121257 --- muted-tests.yml | 2 -- .../FullClusterRestartLuceneIndexCompatibilityIT.java | 3 --- .../RollingUpgradeLuceneIndexCompatibilityTestCase.java | 8 +------- 3 files changed, 1 insertion(+), 12 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 094233d48b88b..9c3d121f97c58 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -332,8 +332,6 @@ tests: - class: org.elasticsearch.upgrades.VectorSearchIT method: testBBQVectorSearch {upgradedNodes=0} issue: https://github.com/elastic/elasticsearch/issues/121253 -- class: org.elasticsearch.lucene.FullClusterRestartLuceneIndexCompatibilityIT - issue: https://github.com/elastic/elasticsearch/issues/121257 - class: org.elasticsearch.upgrades.VectorSearchIT method: testBBQVectorSearch {upgradedNodes=1} issue: https://github.com/elastic/elasticsearch/issues/121271 diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java index f37fca16a4b78..501a46deca9d1 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/FullClusterRestartLuceneIndexCompatibilityIT.java @@ -11,8 +11,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.translog.Translog; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.test.cluster.util.Version; @@ -184,7 +182,6 @@ public void testClosedIndexUpgrade() throws Exception { Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, randomInt(2)) - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), randomFrom(Translog.Durability.values())) .build() ); indexDocs(index, numDocs); diff --git a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java index 12374cf623a8c..7b9e2d64bbae4 100644 --- a/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java +++ b/qa/lucene-index-compatibility/src/javaRestTest/java/org/elasticsearch/lucene/RollingUpgradeLuceneIndexCompatibilityTestCase.java @@ -13,8 +13,6 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.translog.Translog; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.test.cluster.util.Version; @@ -189,11 +187,7 @@ public void testClosedIndexUpgrade() throws Exception { createIndex( client(), index, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), randomFrom(Translog.Durability.values())) - .build() + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() ); indexDocs(index, numDocs); return; From 68c8fa6b38e9242be010204cae5fe4250a22f217 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Mon, 3 Feb 2025 11:10:58 +0000 Subject: [PATCH 377/383] Update transport and index version id numbers to S_PP (#121380) We need more patch numbers for longer-lived branches --- docs/internal/Versioning.md | 12 +- .../org/elasticsearch/TransportVersion.java | 20 +- .../org/elasticsearch/TransportVersions.java | 174 +++++++++--------- .../elasticsearch/index/IndexVersions.java | 82 ++++----- .../elasticsearch/TransportVersionTests.java | 31 +++- 5 files changed, 166 insertions(+), 153 deletions(-) diff --git a/docs/internal/Versioning.md b/docs/internal/Versioning.md index f0f730f618259..474278e873922 100644 --- a/docs/internal/Versioning.md +++ b/docs/internal/Versioning.md @@ -35,19 +35,19 @@ Every change to the transport protocol is represented by a new transport version higher than all previous transport versions, which then becomes the highest version recognized by that build of Elasticsearch. The version ids are stored as constants in the `TransportVersions` class. -Each id has a standard pattern `M_NNN_SS_P`, where: +Each id has a standard pattern `M_NNN_S_PP`, where: * `M` is the major version * `NNN` is an incrementing id -* `SS` is used in subsidiary repos amending the default transport protocol -* `P` is used for patches and backports +* `S` is used in subsidiary repos amending the default transport protocol +* `PP` is used for patches and backports When you make a change to the serialization form of any object, you need to create a new sequential constant in `TransportVersions`, introduced in the same PR that adds the change, that increments the `NNN` component from the previous highest version, with other components set to zero. -For example, if the previous version number is `8_413_00_1`, -the next version number should be `8_414_00_0`. +For example, if the previous version number is `8_413_0_01`, +the next version number should be `8_414_0_00`. Once you have defined your constant, you then need to use it in serialization code. If the transport version is at or above the new id, @@ -166,7 +166,7 @@ also has that change, and knows about the patch backport ids and what they mean. Index version is a single incrementing version number for the index data format, metadata, and associated mappings. It is declared the same way as the -transport version - with the pattern `M_NNN_SS_P`, for the major version, version id, +transport version - with the pattern `M_NNN_S_PP`, for the major version, version id, subsidiary version id, and patch number respectively. Index version is stored in index metadata when an index is created, diff --git a/server/src/main/java/org/elasticsearch/TransportVersion.java b/server/src/main/java/org/elasticsearch/TransportVersion.java index 64d1c0535a561..032b10f0a30d6 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersion.java +++ b/server/src/main/java/org/elasticsearch/TransportVersion.java @@ -130,20 +130,20 @@ public static TransportVersion fromString(String str) { * When a patch version of an existing transport version is created, {@code transportVersion.isPatchFrom(patchVersion)} * will match any transport version at or above {@code patchVersion} that is also of the same base version. *

- * For example, {@code version.isPatchFrom(8_800_00_4)} will return the following for the given {@code version}: + * For example, {@code version.isPatchFrom(8_800_0_04)} will return the following for the given {@code version}: *

    - *
  • {@code 8_799_00_0.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_799_00_9.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_800_00_0.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_800_00_3.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_800_00_4.isPatchFrom(8_800_00_4)}: {@code true}
  • - *
  • {@code 8_800_00_9.isPatchFrom(8_800_00_4)}: {@code true}
  • - *
  • {@code 8_800_01_0.isPatchFrom(8_800_00_4)}: {@code false}
  • - *
  • {@code 8_801_00_0.isPatchFrom(8_800_00_4)}: {@code false}
  • + *
  • {@code 8_799_0_00.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_799_0_09.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_800_0_00.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_800_0_03.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_800_0_04.isPatchFrom(8_800_0_04)}: {@code true}
  • + *
  • {@code 8_800_0_49.isPatchFrom(8_800_0_04)}: {@code true}
  • + *
  • {@code 8_800_1_00.isPatchFrom(8_800_0_04)}: {@code false}
  • + *
  • {@code 8_801_0_00.isPatchFrom(8_800_0_04)}: {@code false}
  • *
*/ public boolean isPatchFrom(TransportVersion version) { - return onOrAfter(version) && id < version.id + 10 - (version.id % 10); + return onOrAfter(version) && id < version.id + 100 - (version.id % 100); } /** diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 30d0301bf9517..efcebbec31c92 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -90,89 +90,89 @@ static TransportVersion def(int id) { */ public static final TransportVersion V_8_9_X = def(8_500_020); public static final TransportVersion V_8_10_X = def(8_500_061); - public static final TransportVersion V_8_11_X = def(8_512_00_1); - public static final TransportVersion V_8_12_0 = def(8_560_00_0); - public static final TransportVersion V_8_12_1 = def(8_560_00_1); - public static final TransportVersion V_8_13_0 = def(8_595_00_0); - public static final TransportVersion V_8_13_4 = def(8_595_00_1); - public static final TransportVersion V_8_14_0 = def(8_636_00_1); - public static final TransportVersion V_8_15_0 = def(8_702_00_2); - public static final TransportVersion V_8_15_2 = def(8_702_00_3); - public static final TransportVersion V_8_16_0 = def(8_772_00_1); - public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO_BACKPORT_8_16 = def(8_772_00_2); - public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE_BACKPORT_8_16 = def(8_772_00_3); - public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16 = def(8_772_00_4); - public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_00_0); - public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_00_0); - public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_00_0); - public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ = def(8_776_00_0); - public static final TransportVersion SIMULATE_MAPPING_ADDITION = def(8_777_00_0); - public static final TransportVersion INTRODUCE_ALL_APPLICABLE_SELECTOR = def(8_778_00_0); - public static final TransportVersion INDEX_MODE_LOOKUP = def(8_779_00_0); - public static final TransportVersion INDEX_REQUEST_REMOVE_METERING = def(8_780_00_0); - public static final TransportVersion CPU_STAT_STRING_PARSING = def(8_781_00_0); - public static final TransportVersion QUERY_RULES_RETRIEVER = def(8_782_00_0); - public static final TransportVersion ESQL_CCS_EXEC_INFO_WITH_FAILURES = def(8_783_00_0); - public static final TransportVersion LOGSDB_TELEMETRY = def(8_784_00_0); - public static final TransportVersion LOGSDB_TELEMETRY_STATS = def(8_785_00_0); - public static final TransportVersion KQL_QUERY_ADDED = def(8_786_00_0); - public static final TransportVersion ROLE_MONITOR_STATS = def(8_787_00_0); - public static final TransportVersion DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK = def(8_788_00_0); - public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO = def(8_789_00_0); - public static final TransportVersion VERTEX_AI_INPUT_TYPE_ADDED = def(8_790_00_0); - public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE = def(8_791_00_0); - public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES = def(8_792_00_0); - public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_00_0); - public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_00_0); - public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_00_0); - public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_00_0); - public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_00_0); - public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17 = def(8_797_00_1); - public static final TransportVersion SOURCE_MODE_TELEMETRY_FIX_8_17 = def(8_797_00_2); - public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_00_0); - public static final TransportVersion REINDEX_DATA_STREAMS = def(8_799_00_0); - public static final TransportVersion ESQL_REMOVE_NODE_LEVEL_PLAN = def(8_800_00_0); - public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE = def(8_801_00_0); - public static final TransportVersion SOURCE_MODE_TELEMETRY = def(8_802_00_0); - public static final TransportVersion NEW_REFRESH_CLUSTER_BLOCK = def(8_803_00_0); - public static final TransportVersion RETRIES_AND_OPERATIONS_IN_BLOBSTORE_STATS = def(8_804_00_0); - public static final TransportVersion ADD_DATA_STREAM_OPTIONS_TO_TEMPLATES = def(8_805_00_0); - public static final TransportVersion KNN_QUERY_RESCORE_OVERSAMPLE = def(8_806_00_0); - public static final TransportVersion SEMANTIC_QUERY_LENIENT = def(8_807_00_0); - public static final TransportVersion ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS = def(8_808_00_0); - public static final TransportVersion EQL_ALLOW_PARTIAL_SEARCH_RESULTS = def(8_809_00_0); - public static final TransportVersion NODE_VERSION_INFORMATION_WITH_MIN_READ_ONLY_INDEX_VERSION = def(8_810_00_0); - public static final TransportVersion ERROR_TRACE_IN_TRANSPORT_HEADER = def(8_811_00_0); - public static final TransportVersion FAILURE_STORE_ENABLED_BY_CLUSTER_SETTING = def(8_812_00_0); - public static final TransportVersion SIMULATE_IGNORED_FIELDS = def(8_813_00_0); - public static final TransportVersion TRANSFORMS_UPGRADE_MODE = def(8_814_00_0); - public static final TransportVersion NODE_SHUTDOWN_EPHEMERAL_ID_ADDED = def(8_815_00_0); - public static final TransportVersion ESQL_CCS_TELEMETRY_STATS = def(8_816_00_0); - public static final TransportVersion TEXT_EMBEDDING_QUERY_VECTOR_BUILDER_INFER_MODEL_ID = def(8_817_00_0); - public static final TransportVersion ESQL_ENABLE_NODE_LEVEL_REDUCTION = def(8_818_00_0); - public static final TransportVersion JINA_AI_INTEGRATION_ADDED = def(8_819_00_0); - public static final TransportVersion TRACK_INDEX_FAILED_DUE_TO_VERSION_CONFLICT_METRIC = def(8_820_00_0); - public static final TransportVersion REPLACE_FAILURE_STORE_OPTIONS_WITH_SELECTOR_SYNTAX = def(8_821_00_0); - public static final TransportVersion ELASTIC_INFERENCE_SERVICE_UNIFIED_CHAT_COMPLETIONS_INTEGRATION = def(8_822_00_0); - public static final TransportVersion KQL_QUERY_TECH_PREVIEW = def(8_823_00_0); - public static final TransportVersion ESQL_PROFILE_ROWS_PROCESSED = def(8_824_00_0); - public static final TransportVersion BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 = def(8_825_00_0); - public static final TransportVersion REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 = def(8_826_00_0); - public static final TransportVersion ESQL_SKIP_ES_INDEX_SERIALIZATION = def(8_827_00_0); - public static final TransportVersion ADD_INDEX_BLOCK_TWO_PHASE = def(8_828_00_0); - public static final TransportVersion RESOLVE_CLUSTER_NO_INDEX_EXPRESSION = def(8_829_00_0); - public static final TransportVersion ML_ROLLOVER_LEGACY_INDICES = def(8_830_00_0); - public static final TransportVersion ADD_INCLUDE_FAILURE_INDICES_OPTION = def(8_831_00_0); - public static final TransportVersion ESQL_RESPONSE_PARTIAL = def(8_832_00_0); - public static final TransportVersion RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN = def(8_833_00_0); - public static final TransportVersion ILM_ADD_SEARCHABLE_SNAPSHOT_ADD_REPLICATE_FOR = def(8_834_00_0); - public static final TransportVersion INGEST_REQUEST_INCLUDE_SOURCE_ON_ERROR = def(8_835_00_0); - public static final TransportVersion RESOURCE_DEPRECATION_CHECKS = def(8_836_00_0); - public static final TransportVersion LINEAR_RETRIEVER_SUPPORT = def(8_837_00_0); - public static final TransportVersion TIMEOUT_GET_PARAM_FOR_RESOLVE_CLUSTER = def(8_838_00_0); - public static final TransportVersion INFERENCE_REQUEST_ADAPTIVE_RATE_LIMITING = def(8_839_00_0); - public static final TransportVersion ML_INFERENCE_IBM_WATSONX_RERANK_ADDED = def(8_840_00_0); - public static final TransportVersion ELASTICSEARCH_9_0 = def(9_000_00_0); + public static final TransportVersion V_8_11_X = def(8_512_0_01); + public static final TransportVersion V_8_12_0 = def(8_560_0_00); + public static final TransportVersion V_8_12_1 = def(8_560_0_01); + public static final TransportVersion V_8_13_0 = def(8_595_0_00); + public static final TransportVersion V_8_13_4 = def(8_595_0_01); + public static final TransportVersion V_8_14_0 = def(8_636_0_01); + public static final TransportVersion V_8_15_0 = def(8_702_0_02); + public static final TransportVersion V_8_15_2 = def(8_702_0_03); + public static final TransportVersion V_8_16_0 = def(8_772_0_01); + public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO_BACKPORT_8_16 = def(8_772_0_02); + public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE_BACKPORT_8_16 = def(8_772_0_03); + public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16 = def(8_772_0_04); + public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_0_00); + public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_0_00); + public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_0_00); + public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ = def(8_776_0_00); + public static final TransportVersion SIMULATE_MAPPING_ADDITION = def(8_777_0_00); + public static final TransportVersion INTRODUCE_ALL_APPLICABLE_SELECTOR = def(8_778_0_00); + public static final TransportVersion INDEX_MODE_LOOKUP = def(8_779_0_00); + public static final TransportVersion INDEX_REQUEST_REMOVE_METERING = def(8_780_0_00); + public static final TransportVersion CPU_STAT_STRING_PARSING = def(8_781_0_00); + public static final TransportVersion QUERY_RULES_RETRIEVER = def(8_782_0_00); + public static final TransportVersion ESQL_CCS_EXEC_INFO_WITH_FAILURES = def(8_783_0_00); + public static final TransportVersion LOGSDB_TELEMETRY = def(8_784_0_00); + public static final TransportVersion LOGSDB_TELEMETRY_STATS = def(8_785_0_00); + public static final TransportVersion KQL_QUERY_ADDED = def(8_786_0_00); + public static final TransportVersion ROLE_MONITOR_STATS = def(8_787_0_00); + public static final TransportVersion DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK = def(8_788_0_00); + public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO = def(8_789_0_00); + public static final TransportVersion VERTEX_AI_INPUT_TYPE_ADDED = def(8_790_0_00); + public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE = def(8_791_0_00); + public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES = def(8_792_0_00); + public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_0_00); + public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_0_00); + public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_0_00); + public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_0_00); + public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_0_00); + public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17 = def(8_797_0_01); + public static final TransportVersion SOURCE_MODE_TELEMETRY_FIX_8_17 = def(8_797_0_02); + public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_0_00); + public static final TransportVersion REINDEX_DATA_STREAMS = def(8_799_0_00); + public static final TransportVersion ESQL_REMOVE_NODE_LEVEL_PLAN = def(8_800_0_00); + public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE = def(8_801_0_00); + public static final TransportVersion SOURCE_MODE_TELEMETRY = def(8_802_0_00); + public static final TransportVersion NEW_REFRESH_CLUSTER_BLOCK = def(8_803_0_00); + public static final TransportVersion RETRIES_AND_OPERATIONS_IN_BLOBSTORE_STATS = def(8_804_0_00); + public static final TransportVersion ADD_DATA_STREAM_OPTIONS_TO_TEMPLATES = def(8_805_0_00); + public static final TransportVersion KNN_QUERY_RESCORE_OVERSAMPLE = def(8_806_0_00); + public static final TransportVersion SEMANTIC_QUERY_LENIENT = def(8_807_0_00); + public static final TransportVersion ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS = def(8_808_0_00); + public static final TransportVersion EQL_ALLOW_PARTIAL_SEARCH_RESULTS = def(8_809_0_00); + public static final TransportVersion NODE_VERSION_INFORMATION_WITH_MIN_READ_ONLY_INDEX_VERSION = def(8_810_0_00); + public static final TransportVersion ERROR_TRACE_IN_TRANSPORT_HEADER = def(8_811_0_00); + public static final TransportVersion FAILURE_STORE_ENABLED_BY_CLUSTER_SETTING = def(8_812_0_00); + public static final TransportVersion SIMULATE_IGNORED_FIELDS = def(8_813_0_00); + public static final TransportVersion TRANSFORMS_UPGRADE_MODE = def(8_814_0_00); + public static final TransportVersion NODE_SHUTDOWN_EPHEMERAL_ID_ADDED = def(8_815_0_00); + public static final TransportVersion ESQL_CCS_TELEMETRY_STATS = def(8_816_0_00); + public static final TransportVersion TEXT_EMBEDDING_QUERY_VECTOR_BUILDER_INFER_MODEL_ID = def(8_817_0_00); + public static final TransportVersion ESQL_ENABLE_NODE_LEVEL_REDUCTION = def(8_818_0_00); + public static final TransportVersion JINA_AI_INTEGRATION_ADDED = def(8_819_0_00); + public static final TransportVersion TRACK_INDEX_FAILED_DUE_TO_VERSION_CONFLICT_METRIC = def(8_820_0_00); + public static final TransportVersion REPLACE_FAILURE_STORE_OPTIONS_WITH_SELECTOR_SYNTAX = def(8_821_0_00); + public static final TransportVersion ELASTIC_INFERENCE_SERVICE_UNIFIED_CHAT_COMPLETIONS_INTEGRATION = def(8_822_0_00); + public static final TransportVersion KQL_QUERY_TECH_PREVIEW = def(8_823_0_00); + public static final TransportVersion ESQL_PROFILE_ROWS_PROCESSED = def(8_824_0_00); + public static final TransportVersion BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 = def(8_825_0_00); + public static final TransportVersion REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 = def(8_826_0_00); + public static final TransportVersion ESQL_SKIP_ES_INDEX_SERIALIZATION = def(8_827_0_00); + public static final TransportVersion ADD_INDEX_BLOCK_TWO_PHASE = def(8_828_0_00); + public static final TransportVersion RESOLVE_CLUSTER_NO_INDEX_EXPRESSION = def(8_829_0_00); + public static final TransportVersion ML_ROLLOVER_LEGACY_INDICES = def(8_830_0_00); + public static final TransportVersion ADD_INCLUDE_FAILURE_INDICES_OPTION = def(8_831_0_00); + public static final TransportVersion ESQL_RESPONSE_PARTIAL = def(8_832_0_00); + public static final TransportVersion RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN = def(8_833_0_00); + public static final TransportVersion ILM_ADD_SEARCHABLE_SNAPSHOT_ADD_REPLICATE_FOR = def(8_834_0_00); + public static final TransportVersion INGEST_REQUEST_INCLUDE_SOURCE_ON_ERROR = def(8_835_0_00); + public static final TransportVersion RESOURCE_DEPRECATION_CHECKS = def(8_836_0_00); + public static final TransportVersion LINEAR_RETRIEVER_SUPPORT = def(8_837_0_00); + public static final TransportVersion TIMEOUT_GET_PARAM_FOR_RESOLVE_CLUSTER = def(8_838_0_00); + public static final TransportVersion INFERENCE_REQUEST_ADAPTIVE_RATE_LIMITING = def(8_839_0_00); + public static final TransportVersion ML_INFERENCE_IBM_WATSONX_RERANK_ADDED = def(8_840_0_00); + public static final TransportVersion ELASTICSEARCH_9_0 = def(9_000_0_00); /* * STOP! READ THIS FIRST! No, really, @@ -189,17 +189,17 @@ static TransportVersion def(int id) { * To add a new transport version, add a new constant at the bottom of the list, above this comment. Don't add other lines, * comments, etc. The version id has the following layout: * - * M_NNN_SS_P + * M_NNN_S_PP * * M - The major version of Elasticsearch * NNN - The server version part - * SS - The serverless version part. It should always be 00 here, it is used by serverless only. - * P - The patch version part + * S - The subsidiary version part. It should always be 0 here, it is only used in subsidiary repositories. + * PP - The patch version part * * To determine the id of the next TransportVersion constant, do the following: * - Use the same major version, unless bumping majors * - Bump the server version part by 1, unless creating a patch version - * - Leave the serverless part as 00 + * - Leave the subsidiary part as 0 * - Bump the patch part if creating a patch version * * If a patch version is created, it should be placed sorted among the other existing constants. diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 2470bfb7e5c56..f489c80185ef9 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -108,43 +108,43 @@ private static Version parseUnchecked(String version) { public static final IndexVersion UPGRADE_LUCENE_9_9_1 = def(8_500_008, Version.LUCENE_9_9_1); public static final IndexVersion ES_VERSION_8_12_1 = def(8_500_009, Version.LUCENE_9_9_1); public static final IndexVersion UPGRADE_8_12_1_LUCENE_9_9_2 = def(8_500_010, Version.LUCENE_9_9_2); - public static final IndexVersion NEW_INDEXVERSION_FORMAT = def(8_501_00_0, Version.LUCENE_9_9_1); - public static final IndexVersion UPGRADE_LUCENE_9_9_2 = def(8_502_00_0, Version.LUCENE_9_9_2); - public static final IndexVersion TIME_SERIES_ID_HASHING = def(8_502_00_1, Version.LUCENE_9_9_2); - public static final IndexVersion UPGRADE_TO_LUCENE_9_10 = def(8_503_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion TIME_SERIES_ROUTING_HASH_IN_ID = def(8_504_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion DEFAULT_DENSE_VECTOR_TO_INT8_HNSW = def(8_505_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion DOC_VALUES_FOR_IGNORED_META_FIELD = def(8_505_00_1, Version.LUCENE_9_10_0); - public static final IndexVersion SOURCE_MAPPER_LOSSY_PARAMS_CHECK = def(8_506_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion SEMANTIC_TEXT_FIELD_TYPE = def(8_507_00_0, Version.LUCENE_9_10_0); - public static final IndexVersion UPGRADE_TO_LUCENE_9_11 = def(8_508_00_0, Version.LUCENE_9_11_0); - public static final IndexVersion UNIQUE_TOKEN_FILTER_POS_FIX = def(8_509_00_0, Version.LUCENE_9_11_0); - public static final IndexVersion ADD_SECURITY_MIGRATION = def(8_510_00_0, Version.LUCENE_9_11_0); - public static final IndexVersion UPGRADE_TO_LUCENE_9_11_1 = def(8_511_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion INDEX_SORTING_ON_NESTED = def(8_512_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion LENIENT_UPDATEABLE_SYNONYMS = def(8_513_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion ENABLE_IGNORE_MALFORMED_LOGSDB = def(8_514_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion MERGE_ON_RECOVERY_VERSION = def(8_515_00_0, Version.LUCENE_9_11_1); - public static final IndexVersion UPGRADE_TO_LUCENE_9_12 = def(8_516_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion ENABLE_IGNORE_ABOVE_LOGSDB = def(8_517_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion ADD_ROLE_MAPPING_CLEANUP_MIGRATION = def(8_518_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT_BACKPORT = def(8_519_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID_BACKPORT = def(8_520_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion V8_DEPRECATE_SOURCE_MODE_MAPPER = def(8_521_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BACKPORT = def(8_522_00_0, Version.LUCENE_9_12_0); - public static final IndexVersion UPGRADE_TO_LUCENE_9_12_1 = def(8_523_00_0, parseUnchecked("9.12.1")); - public static final IndexVersion INFERENCE_METADATA_FIELDS_BACKPORT = def(8_524_00_0, parseUnchecked("9.12.1")); - public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME_BACKPORT = def(8_525_00_0, parseUnchecked("9.12.1")); - public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT = def(9_001_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion DEPRECATE_SOURCE_MODE_MAPPER = def(9_003_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY = def(9_004_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion INFERENCE_METADATA_FIELDS = def(9_005_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME = def(9_006_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP = def(9_007_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion HOSTNAME_DOC_VALUES_SPARSE_INDEX = def(9_008_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion UPGRADE_TO_LUCENE_10_1_0 = def(9_009_00_0, Version.LUCENE_10_1_0); + public static final IndexVersion NEW_INDEXVERSION_FORMAT = def(8_501_0_00, Version.LUCENE_9_9_1); + public static final IndexVersion UPGRADE_LUCENE_9_9_2 = def(8_502_0_00, Version.LUCENE_9_9_2); + public static final IndexVersion TIME_SERIES_ID_HASHING = def(8_502_0_01, Version.LUCENE_9_9_2); + public static final IndexVersion UPGRADE_TO_LUCENE_9_10 = def(8_503_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion TIME_SERIES_ROUTING_HASH_IN_ID = def(8_504_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion DEFAULT_DENSE_VECTOR_TO_INT8_HNSW = def(8_505_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion DOC_VALUES_FOR_IGNORED_META_FIELD = def(8_505_0_01, Version.LUCENE_9_10_0); + public static final IndexVersion SOURCE_MAPPER_LOSSY_PARAMS_CHECK = def(8_506_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion SEMANTIC_TEXT_FIELD_TYPE = def(8_507_0_00, Version.LUCENE_9_10_0); + public static final IndexVersion UPGRADE_TO_LUCENE_9_11 = def(8_508_0_00, Version.LUCENE_9_11_0); + public static final IndexVersion UNIQUE_TOKEN_FILTER_POS_FIX = def(8_509_0_00, Version.LUCENE_9_11_0); + public static final IndexVersion ADD_SECURITY_MIGRATION = def(8_510_0_00, Version.LUCENE_9_11_0); + public static final IndexVersion UPGRADE_TO_LUCENE_9_11_1 = def(8_511_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion INDEX_SORTING_ON_NESTED = def(8_512_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion LENIENT_UPDATEABLE_SYNONYMS = def(8_513_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion ENABLE_IGNORE_MALFORMED_LOGSDB = def(8_514_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion MERGE_ON_RECOVERY_VERSION = def(8_515_0_00, Version.LUCENE_9_11_1); + public static final IndexVersion UPGRADE_TO_LUCENE_9_12 = def(8_516_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion ENABLE_IGNORE_ABOVE_LOGSDB = def(8_517_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion ADD_ROLE_MAPPING_CLEANUP_MIGRATION = def(8_518_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT_BACKPORT = def(8_519_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID_BACKPORT = def(8_520_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion V8_DEPRECATE_SOURCE_MODE_MAPPER = def(8_521_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BACKPORT = def(8_522_0_00, Version.LUCENE_9_12_0); + public static final IndexVersion UPGRADE_TO_LUCENE_9_12_1 = def(8_523_0_00, parseUnchecked("9.12.1")); + public static final IndexVersion INFERENCE_METADATA_FIELDS_BACKPORT = def(8_524_0_00, parseUnchecked("9.12.1")); + public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME_BACKPORT = def(8_525_0_00, parseUnchecked("9.12.1")); + public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT = def(9_001_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion DEPRECATE_SOURCE_MODE_MAPPER = def(9_003_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY = def(9_004_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion INFERENCE_METADATA_FIELDS = def(9_005_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME = def(9_006_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP = def(9_007_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion HOSTNAME_DOC_VALUES_SPARSE_INDEX = def(9_008_0_00, Version.LUCENE_10_0_0); + public static final IndexVersion UPGRADE_TO_LUCENE_10_1_0 = def(9_009_0_00, Version.LUCENE_10_1_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ @@ -160,17 +160,17 @@ private static Version parseUnchecked(String version) { * To add a new index version, add a new constant at the bottom of the list, above this comment. Don't add other lines, * comments, etc. The version id has the following layout: * - * M_NNN_SS_P + * M_NNN_S_PP * * M - The major version of Elasticsearch * NNN - The server version part - * SS - The serverless version part. It should always be 00 here, it is used by serverless only. - * P - The patch version part + * S - The subsidiary version part. It should always be 0 here, it is only used in subsidiary repositories. + * PP - The patch version part * * To determine the id of the next IndexVersion constant, do the following: * - Use the same major version, unless bumping majors * - Bump the server version part by 1, unless creating a patch version - * - Leave the serverless part as 00 + * - Leave the subsidiary part as 0 * - Bump the patch part if creating a patch version * * If a patch version is created, it should be placed sorted among the other existing constants. diff --git a/server/src/test/java/org/elasticsearch/TransportVersionTests.java b/server/src/test/java/org/elasticsearch/TransportVersionTests.java index 8ffc2eae4d7b4..e0b78e09d8998 100644 --- a/server/src/test/java/org/elasticsearch/TransportVersionTests.java +++ b/server/src/test/java/org/elasticsearch/TransportVersionTests.java @@ -161,15 +161,15 @@ public void testMax() { } public void testIsPatchFrom() { - TransportVersion patchVersion = TransportVersion.fromId(8_800_00_4); - assertThat(TransportVersion.fromId(8_799_00_0).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_799_00_9).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_800_00_0).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_800_00_3).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_800_00_4).isPatchFrom(patchVersion), is(true)); - assertThat(TransportVersion.fromId(8_800_00_9).isPatchFrom(patchVersion), is(true)); - assertThat(TransportVersion.fromId(8_800_01_0).isPatchFrom(patchVersion), is(false)); - assertThat(TransportVersion.fromId(8_801_00_0).isPatchFrom(patchVersion), is(false)); + TransportVersion patchVersion = TransportVersion.fromId(8_800_0_04); + assertThat(TransportVersion.fromId(8_799_0_00).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_799_0_09).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_0_00).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_0_03).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_0_04).isPatchFrom(patchVersion), is(true)); + assertThat(TransportVersion.fromId(8_800_0_49).isPatchFrom(patchVersion), is(true)); + assertThat(TransportVersion.fromId(8_800_1_00).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_801_0_00).isPatchFrom(patchVersion), is(false)); } public void testVersionConstantPresent() { @@ -187,6 +187,19 @@ public void testCURRENTIsLatest() { assertThat(Collections.max(TransportVersion.getAllVersions()), is(TransportVersion.current())); } + public void testPatchVersionsStillAvailable() { + for (TransportVersion tv : TransportVersion.getAllVersions()) { + if (tv.onOrAfter(TransportVersions.V_8_9_X) && (tv.id() % 100) > 90) { + fail( + "Transport version " + + tv + + " is nearing the limit of available patch numbers." + + " Please inform the Core/Infra team that isPatchFrom may need to be modified" + ); + } + } + } + public void testToReleaseVersion() { assertThat(TransportVersion.current().toReleaseVersion(), endsWith(Version.CURRENT.toString())); } From c288684fdbf85fb032510967bb2a4b5f47885b40 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Mon, 3 Feb 2025 14:01:57 +0000 Subject: [PATCH 378/383] Use NavigableSet for representing test version sets, rather than List (#121266) --- .../org/elasticsearch/ReleaseVersions.java | 16 ++-- .../elasticsearch/index/IndexVersions.java | 5 -- .../internal/VersionExtension.java | 6 +- .../elasticsearch/TransportVersionTests.java | 18 ++-- .../index/KnownIndexVersions.java | 10 ++- .../AbstractBWCSerializationTestCase.java | 4 +- .../org/elasticsearch/test/BWCVersions.java | 11 +-- .../test/TransportVersionUtils.java | 69 +++++++-------- .../org/elasticsearch/test/VersionUtils.java | 83 ++++++++++--------- .../test/index/IndexVersionUtils.java | 69 +++++++-------- .../elasticsearch/test/VersionUtilsTests.java | 11 +-- .../AbstractBWCWireSerializationTestCase.java | 4 +- ...stractChunkedBWCSerializationTestCase.java | 4 +- .../eql/AbstractBWCSerializationTestCase.java | 12 ++- .../AbstractBWCWireSerializingTestCase.java | 12 ++- 15 files changed, 149 insertions(+), 185 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/ReleaseVersions.java b/server/src/main/java/org/elasticsearch/ReleaseVersions.java index 22cd18c7b4ac3..5e6986a5bf924 100644 --- a/server/src/main/java/org/elasticsearch/ReleaseVersions.java +++ b/server/src/main/java/org/elasticsearch/ReleaseVersions.java @@ -78,10 +78,10 @@ public static IntFunction generateVersionsLookup(Class versionContain // replace all version lists with the smallest & greatest versions versions.replaceAll((k, v) -> { if (v.size() == 1) { - return List.of(v.get(0)); + return List.of(v.getFirst()); } else { v.sort(Comparator.naturalOrder()); - return List.of(v.get(0), v.get(v.size() - 1)); + return List.of(v.getFirst(), v.getLast()); } }); @@ -100,14 +100,14 @@ private static IntFunction lookupFunction(NavigableMap lookupFunction(NavigableMap lookupFunction(NavigableMap T lastItem(List list) { - return list.get(list.size() - 1); - } - private static Version nextVersion(Version version) { return new Version(version.id + 100); // +1 to revision } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index f489c80185ef9..3b173ace0ac7b 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -24,7 +24,6 @@ import java.util.TreeMap; import java.util.TreeSet; import java.util.function.IntFunction; -import java.util.stream.Collectors; @SuppressWarnings("deprecation") public class IndexVersions { @@ -250,10 +249,6 @@ static NavigableMap getAllVersionIds(Class cls) { return Collections.unmodifiableNavigableMap(builder); } - static Collection getAllWriteVersions() { - return VERSION_IDS.values().stream().filter(v -> v.onOrAfter(IndexVersions.MINIMUM_COMPATIBLE)).collect(Collectors.toSet()); - } - static Collection getAllVersions() { return VERSION_IDS.values(); } diff --git a/server/src/main/java/org/elasticsearch/internal/VersionExtension.java b/server/src/main/java/org/elasticsearch/internal/VersionExtension.java index 5a6c7c1f3671d..fc947738c9e33 100644 --- a/server/src/main/java/org/elasticsearch/internal/VersionExtension.java +++ b/server/src/main/java/org/elasticsearch/internal/VersionExtension.java @@ -12,16 +12,16 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.index.IndexVersion; -import java.util.List; +import java.util.Collection; /** * Allows plugging in current version elements. */ public interface VersionExtension { /** - * Returns list of {@link TransportVersion} defined by extension + * Returns additional {@link TransportVersion} defined by extension */ - List getTransportVersions(); + Collection getTransportVersions(); /** * Returns the {@link IndexVersion} that Elasticsearch should use. diff --git a/server/src/test/java/org/elasticsearch/TransportVersionTests.java b/server/src/test/java/org/elasticsearch/TransportVersionTests.java index e0b78e09d8998..9b02b66583e78 100644 --- a/server/src/test/java/org/elasticsearch/TransportVersionTests.java +++ b/server/src/test/java/org/elasticsearch/TransportVersionTests.java @@ -13,15 +13,13 @@ import org.elasticsearch.test.TransportVersionUtils; import java.lang.reflect.Modifier; -import java.util.Collections; -import java.util.List; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; @@ -69,13 +67,11 @@ public static class DuplicatedIdFakeVersion { public void testStaticTransportVersionChecks() { assertThat( TransportVersions.collectAllVersionIdsDefinedInClass(CorrectFakeVersion.class), - equalTo( - List.of( - CorrectFakeVersion.V_0_000_002, - CorrectFakeVersion.V_0_000_003, - CorrectFakeVersion.V_0_000_004, - CorrectFakeVersion.V_0_00_01 - ) + contains( + CorrectFakeVersion.V_0_000_002, + CorrectFakeVersion.V_0_000_003, + CorrectFakeVersion.V_0_000_004, + CorrectFakeVersion.V_0_00_01 ) ); AssertionError e = expectThrows( @@ -184,7 +180,7 @@ public void testVersionConstantPresent() { } public void testCURRENTIsLatest() { - assertThat(Collections.max(TransportVersion.getAllVersions()), is(TransportVersion.current())); + assertThat(TransportVersion.getAllVersions().getLast(), is(TransportVersion.current())); } public void testPatchVersionsStillAvailable() { diff --git a/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java b/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java index 4f559a5f3eaef..8aea7a5713cf1 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java +++ b/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java @@ -9,7 +9,9 @@ package org.elasticsearch.index; -import java.util.List; +import java.util.Collections; +import java.util.NavigableSet; +import java.util.TreeSet; /** * Provides access to all known index versions @@ -18,10 +20,12 @@ public class KnownIndexVersions { /** * A sorted list of all known index versions */ - public static final List ALL_VERSIONS = List.copyOf(IndexVersions.getAllVersions()); + public static final NavigableSet ALL_VERSIONS = Collections.unmodifiableNavigableSet( + new TreeSet<>(IndexVersions.getAllVersions()) + ); /** * A sorted list of all known index versions that can be written to */ - public static final List ALL_WRITE_VERSIONS = List.copyOf(IndexVersions.getAllWriteVersions()); + public static final NavigableSet ALL_WRITE_VERSIONS = ALL_VERSIONS.tailSet(IndexVersions.MINIMUM_COMPATIBLE, true); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBWCSerializationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBWCSerializationTestCase.java index d931340365cd6..22044e079018b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBWCSerializationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBWCSerializationTestCase.java @@ -14,7 +14,7 @@ import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; -import java.util.List; +import java.util.Collection; import static org.elasticsearch.test.BWCVersions.DEFAULT_BWC_VERSIONS; @@ -28,7 +28,7 @@ public abstract class AbstractBWCSerializationTestCase bwcVersions() { + protected Collection bwcVersions() { return DEFAULT_BWC_VERSIONS; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java b/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java index 49859071b03cf..1cd0d0ddc4cd2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/BWCVersions.java @@ -12,17 +12,14 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; public final class BWCVersions { private BWCVersions() {} - public static List getAllBWCVersions() { - List allVersions = TransportVersion.getAllVersions(); - int minCompatVersion = Collections.binarySearch(allVersions, TransportVersions.MINIMUM_COMPATIBLE); - return allVersions.subList(minCompatVersion, allVersions.size()); + public static NavigableSet getAllBWCVersions() { + return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersions.MINIMUM_COMPATIBLE, true); } - public static final List DEFAULT_BWC_VERSIONS = getAllBWCVersions(); + public static final NavigableSet DEFAULT_BWC_VERSIONS = getAllBWCVersions(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java index 0c7274a36b49a..9c7114425b8db 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TransportVersionUtils.java @@ -14,15 +14,23 @@ import org.elasticsearch.core.Nullable; import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; import java.util.Random; import java.util.Set; +import java.util.TreeSet; import java.util.stream.Collectors; +import static org.apache.lucene.tests.util.LuceneTestCase.random; + public class TransportVersionUtils { + + private static final NavigableSet RELEASED_VERSIONS = Collections.unmodifiableNavigableSet( + new TreeSet<>(TransportVersion.getAllVersions()) + ); + /** Returns all released versions */ - public static List allReleasedVersions() { - return TransportVersion.getAllVersions(); + public static NavigableSet allReleasedVersions() { + return RELEASED_VERSIONS; } /** Returns the oldest known {@link TransportVersion} */ @@ -32,7 +40,7 @@ public static TransportVersion getFirstVersion() { /** Returns a random {@link TransportVersion} from all available versions. */ public static TransportVersion randomVersion() { - return ESTestCase.randomFrom(allReleasedVersions()); + return VersionUtils.randomFrom(random(), allReleasedVersions(), TransportVersion::fromId); } /** Returns a random {@link TransportVersion} from all available versions without the ignore set */ @@ -42,7 +50,7 @@ public static TransportVersion randomVersion(Set ignore) { /** Returns a random {@link TransportVersion} from all available versions. */ public static TransportVersion randomVersion(Random random) { - return allReleasedVersions().get(random.nextInt(allReleasedVersions().size())); + return VersionUtils.randomFrom(random, allReleasedVersions(), TransportVersion::fromId); } /** Returns a random {@link TransportVersion} between minVersion and maxVersion (inclusive). */ @@ -55,24 +63,21 @@ public static TransportVersion randomVersionBetween( throw new IllegalArgumentException("maxVersion [" + maxVersion + "] cannot be less than minVersion [" + minVersion + "]"); } - int minVersionIndex = 0; - List allReleasedVersions = allReleasedVersions(); + NavigableSet versions = allReleasedVersions(); if (minVersion != null) { - minVersionIndex = Collections.binarySearch(allReleasedVersions, minVersion); + if (versions.contains(minVersion) == false) { + throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); + } + versions = versions.tailSet(minVersion, true); } - int maxVersionIndex = allReleasedVersions.size() - 1; if (maxVersion != null) { - maxVersionIndex = Collections.binarySearch(allReleasedVersions, maxVersion); - } - if (minVersionIndex < 0) { - throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); - } else if (maxVersionIndex < 0) { - throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); - } else { - // minVersionIndex is inclusive so need to add 1 to this index - int range = maxVersionIndex + 1 - minVersionIndex; - return allReleasedVersions.get(minVersionIndex + random.nextInt(range)); + if (versions.contains(maxVersion) == false) { + throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); + } + versions = versions.headSet(maxVersion, true); } + + return VersionUtils.randomFrom(random, versions, TransportVersion::fromId); } public static TransportVersion getPreviousVersion() { @@ -82,16 +87,11 @@ public static TransportVersion getPreviousVersion() { } public static TransportVersion getPreviousVersion(TransportVersion version) { - int place = Collections.binarySearch(allReleasedVersions(), version); - if (place < 0) { - // version does not exist - need the item before the index this version should be inserted - place = -(place + 1); - } - - if (place < 1) { + TransportVersion lower = allReleasedVersions().lower(version); + if (lower == null) { throw new IllegalArgumentException("couldn't find any released versions before [" + version + "]"); } - return allReleasedVersions().get(place - 1); + return lower; } public static TransportVersion getNextVersion(TransportVersion version) { @@ -99,17 +99,8 @@ public static TransportVersion getNextVersion(TransportVersion version) { } public static TransportVersion getNextVersion(TransportVersion version, boolean createIfNecessary) { - List allReleasedVersions = allReleasedVersions(); - int place = Collections.binarySearch(allReleasedVersions, version); - if (place < 0) { - // version does not exist - need the item at the index this version should be inserted - place = -(place + 1); - } else { - // need the *next* version - place++; - } - - if (place < 0 || place >= allReleasedVersions.size()) { + TransportVersion higher = allReleasedVersions().higher(version); + if (higher == null) { if (createIfNecessary) { // create a new transport version one greater than specified return new TransportVersion(version.id() + 1); @@ -117,7 +108,7 @@ public static TransportVersion getNextVersion(TransportVersion version, boolean throw new IllegalArgumentException("couldn't find any released versions after [" + version + "]"); } } - return allReleasedVersions.get(place); + return higher; } /** Returns a random {@code TransportVersion} that is compatible with {@link TransportVersion#current()} */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java index 8b7ab620774b9..311f032088f74 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java @@ -9,23 +9,31 @@ package org.elasticsearch.test; +import com.carrotsearch.randomizedtesting.generators.RandomNumbers; + import org.elasticsearch.Build; import org.elasticsearch.Version; +import org.elasticsearch.common.VersionId; import org.elasticsearch.core.Nullable; +import java.util.Collections; import java.util.List; -import java.util.Optional; +import java.util.NavigableSet; import java.util.Random; +import java.util.TreeSet; +import java.util.function.IntFunction; /** Utilities for selecting versions in tests */ public class VersionUtils { - private static final List ALL_VERSIONS = Version.getDeclaredVersions(Version.class); + private static final NavigableSet ALL_VERSIONS = Collections.unmodifiableNavigableSet( + new TreeSet<>(Version.getDeclaredVersions(Version.class)) + ); /** * Returns an immutable, sorted list containing all versions, both released and unreleased. */ - public static List allVersions() { + public static NavigableSet allVersions() { return ALL_VERSIONS; } @@ -33,13 +41,11 @@ public static List allVersions() { * Get the version before {@code version}. */ public static Version getPreviousVersion(Version version) { - for (int i = ALL_VERSIONS.size() - 1; i >= 0; i--) { - Version v = ALL_VERSIONS.get(i); - if (v.before(version)) { - return v; - } + var versions = ALL_VERSIONS.headSet(version, false); + if (versions.isEmpty()) { + throw new IllegalArgumentException("couldn't find any versions before [" + version + "]"); } - throw new IllegalArgumentException("couldn't find any versions before [" + version + "]"); + return versions.getLast(); } /** @@ -56,8 +62,7 @@ public static Version getPreviousVersion() { * where the minor version is less than the currents minor version. */ public static Version getPreviousMinorVersion() { - for (int i = ALL_VERSIONS.size() - 1; i >= 0; i--) { - Version v = ALL_VERSIONS.get(i); + for (Version v : ALL_VERSIONS.descendingSet()) { if (v.minor < Version.CURRENT.minor || v.major < Version.CURRENT.major) { return v; } @@ -67,12 +72,12 @@ public static Version getPreviousMinorVersion() { /** Returns the oldest {@link Version} */ public static Version getFirstVersion() { - return ALL_VERSIONS.get(0); + return ALL_VERSIONS.getFirst(); } /** Returns a random {@link Version} from all available versions. */ public static Version randomVersion(Random random) { - return ALL_VERSIONS.get(random.nextInt(ALL_VERSIONS.size())); + return randomFrom(random, ALL_VERSIONS, Version::fromId); } /** Returns a random {@link Version} from all available versions, that is compatible with the given version. */ @@ -83,38 +88,42 @@ public static Version randomCompatibleVersion(Random random, Version version) { /** Returns a random {@link Version} between minVersion and maxVersion (inclusive). */ public static Version randomVersionBetween(Random random, @Nullable Version minVersion, @Nullable Version maxVersion) { - int minVersionIndex = 0; + if (minVersion != null && maxVersion != null && maxVersion.before(minVersion)) { + throw new IllegalArgumentException("maxVersion [" + maxVersion + "] cannot be less than minVersion [" + minVersion + "]"); + } + + NavigableSet versions = ALL_VERSIONS; if (minVersion != null) { - minVersionIndex = ALL_VERSIONS.indexOf(minVersion); + if (versions.contains(minVersion) == false) { + throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); + } + versions = versions.tailSet(minVersion, true); } - int maxVersionIndex = ALL_VERSIONS.size() - 1; if (maxVersion != null) { - maxVersionIndex = ALL_VERSIONS.indexOf(maxVersion); - } - if (minVersionIndex == -1) { - throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); - } else if (maxVersionIndex == -1) { - throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); - } else if (minVersionIndex > maxVersionIndex) { - throw new IllegalArgumentException("maxVersion [" + maxVersion + "] cannot be less than minVersion [" + minVersion + "]"); - } else { - // minVersionIndex is inclusive so need to add 1 to this index - int range = maxVersionIndex + 1 - minVersionIndex; - return ALL_VERSIONS.get(minVersionIndex + random.nextInt(range)); + if (versions.contains(maxVersion) == false) { + throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); + } + versions = versions.headSet(maxVersion, true); } - } - /** returns the first future compatible version */ - public static Version compatibleFutureVersion(Version version) { - final Optional opt = ALL_VERSIONS.stream().filter(version::before).filter(v -> v.isCompatible(version)).findAny(); - assert opt.isPresent() : "no future compatible version for " + version; - return opt.get(); + return randomFrom(random, versions, Version::fromId); } /** Returns the maximum {@link Version} that is compatible with the given version. */ public static Version maxCompatibleVersion(Version version) { - final List compatible = ALL_VERSIONS.stream().filter(version::isCompatible).filter(version::onOrBefore).toList(); - assert compatible.size() > 0; - return compatible.get(compatible.size() - 1); + return ALL_VERSIONS.tailSet(version, true).descendingSet().stream().filter(version::isCompatible).findFirst().orElseThrow(); + } + + public static > T randomFrom(Random random, NavigableSet set, IntFunction ctor) { + // get the first and last id, pick a random id in the middle, then find that id in the set in O(nlogn) time + // this assumes the id numbers are reasonably evenly distributed in the set + assert set.isEmpty() == false; + int lowest = set.getFirst().id(); + int highest = set.getLast().id(); + + T randomId = ctor.apply(RandomNumbers.randomIntBetween(random, lowest, highest)); + // try to find the id below, then the id above. We're just looking for *some* item in the set that is close to randomId + T found = set.floor(randomId); + return found != null ? found : set.ceiling(randomId); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java index 667149e4bdd3e..5bf20b18abc72 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java @@ -14,41 +14,43 @@ import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.KnownIndexVersions; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; -import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; import java.util.Random; import java.util.Set; import java.util.stream.Collectors; +import static org.apache.lucene.tests.util.LuceneTestCase.random; + public class IndexVersionUtils { - private static final List ALL_VERSIONS = KnownIndexVersions.ALL_VERSIONS; - private static final List ALL_WRITE_VERSIONS = KnownIndexVersions.ALL_WRITE_VERSIONS; + private static final NavigableSet ALL_VERSIONS = KnownIndexVersions.ALL_VERSIONS; + private static final NavigableSet ALL_WRITE_VERSIONS = KnownIndexVersions.ALL_WRITE_VERSIONS; /** Returns all released versions */ - public static List allReleasedVersions() { + public static NavigableSet allReleasedVersions() { return ALL_VERSIONS; } /** Returns the oldest known {@link IndexVersion}. This version can only be read from and not written to */ public static IndexVersion getLowestReadCompatibleVersion() { - return ALL_VERSIONS.get(0); + return ALL_VERSIONS.getFirst(); } /** Returns the oldest known {@link IndexVersion} that can be written to */ public static IndexVersion getLowestWriteCompatibleVersion() { - return ALL_WRITE_VERSIONS.get(0); + return ALL_WRITE_VERSIONS.getFirst(); } /** Returns a random {@link IndexVersion} from all available versions. */ public static IndexVersion randomVersion() { - return ESTestCase.randomFrom(ALL_VERSIONS); + return VersionUtils.randomFrom(random(), ALL_VERSIONS, IndexVersion::fromId); } /** Returns a random {@link IndexVersion} from all versions that can be written to. */ public static IndexVersion randomWriteVersion() { - return ESTestCase.randomFrom(ALL_WRITE_VERSIONS); + return VersionUtils.randomFrom(random(), ALL_WRITE_VERSIONS, IndexVersion::fromId); } /** Returns a random {@link IndexVersion} from all available versions without the ignore set */ @@ -62,23 +64,21 @@ public static IndexVersion randomVersionBetween(Random random, @Nullable IndexVe throw new IllegalArgumentException("maxVersion [" + maxVersion + "] cannot be less than minVersion [" + minVersion + "]"); } - int minVersionIndex = 0; + NavigableSet versions = allReleasedVersions(); if (minVersion != null) { - minVersionIndex = Collections.binarySearch(ALL_VERSIONS, minVersion); + if (versions.contains(minVersion) == false) { + throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); + } + versions = versions.tailSet(minVersion, true); } - int maxVersionIndex = ALL_VERSIONS.size() - 1; if (maxVersion != null) { - maxVersionIndex = Collections.binarySearch(ALL_VERSIONS, maxVersion); - } - if (minVersionIndex < 0) { - throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist."); - } else if (maxVersionIndex < 0) { - throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); - } else { - // minVersionIndex is inclusive so need to add 1 to this index - int range = maxVersionIndex + 1 - minVersionIndex; - return ALL_VERSIONS.get(minVersionIndex + random.nextInt(range)); + if (versions.contains(maxVersion) == false) { + throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist."); + } + versions = versions.headSet(maxVersion, true); } + + return VersionUtils.randomFrom(random, versions, IndexVersion::fromId); } public static IndexVersion getPreviousVersion() { @@ -88,16 +88,11 @@ public static IndexVersion getPreviousVersion() { } public static IndexVersion getPreviousVersion(IndexVersion version) { - int place = Collections.binarySearch(ALL_VERSIONS, version); - if (place < 0) { - // version does not exist - need the item before the index this version should be inserted - place = -(place + 1); - } - - if (place < 1) { + IndexVersion lower = allReleasedVersions().lower(version); + if (lower == null) { throw new IllegalArgumentException("couldn't find any released versions before [" + version + "]"); } - return ALL_VERSIONS.get(place - 1); + return lower; } public static IndexVersion getPreviousMajorVersion(IndexVersion version) { @@ -105,19 +100,11 @@ public static IndexVersion getPreviousMajorVersion(IndexVersion version) { } public static IndexVersion getNextVersion(IndexVersion version) { - int place = Collections.binarySearch(ALL_VERSIONS, version); - if (place < 0) { - // version does not exist - need the item at the index this version should be inserted - place = -(place + 1); - } else { - // need the *next* version - place++; - } - - if (place < 0 || place >= ALL_VERSIONS.size()) { + IndexVersion higher = allReleasedVersions().higher(version); + if (higher == null) { throw new IllegalArgumentException("couldn't find any released versions after [" + version + "]"); } - return ALL_VERSIONS.get(place); + return higher; } /** Returns a random {@code IndexVersion} that is compatible with {@link IndexVersion#current()} */ diff --git a/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java index 5ae7e5640fc91..9951878289d48 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java @@ -21,13 +21,6 @@ */ public class VersionUtilsTests extends ESTestCase { - public void testAllVersionsSorted() { - List allVersions = VersionUtils.allVersions(); - for (int i = 0, j = 1; j < allVersions.size(); ++i, ++j) { - assertTrue(allVersions.get(i).before(allVersions.get(j))); - } - } - public void testRandomVersionBetween() { // TODO: rework this test to use a dummy Version class so these don't need to change with each release // full range @@ -50,9 +43,9 @@ public void testRandomVersionBetween() { got = VersionUtils.randomVersionBetween(random(), null, fromId(7000099)); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrBefore(fromId(7000099))); - got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allVersions().get(0)); + got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allVersions().getFirst()); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); - assertTrue(got.onOrBefore(VersionUtils.allVersions().get(0))); + assertTrue(got.onOrBefore(VersionUtils.allVersions().getFirst())); // unbounded upper got = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), null); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractBWCWireSerializationTestCase.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractBWCWireSerializationTestCase.java index 451c85936f3cb..d0dff954dec13 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractBWCWireSerializationTestCase.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractBWCWireSerializationTestCase.java @@ -12,7 +12,7 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import java.io.IOException; -import java.util.List; +import java.util.Collection; import static org.elasticsearch.test.BWCVersions.DEFAULT_BWC_VERSIONS; @@ -26,7 +26,7 @@ public abstract class AbstractBWCWireSerializationTestCase /** * The bwc versions to test serialization against */ - protected List bwcVersions() { + protected Collection bwcVersions() { return DEFAULT_BWC_VERSIONS; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractChunkedBWCSerializationTestCase.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractChunkedBWCSerializationTestCase.java index 0254406a2c8ec..e6b6ef3e3a06a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractChunkedBWCSerializationTestCase.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractChunkedBWCSerializationTestCase.java @@ -13,7 +13,7 @@ import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import java.io.IOException; -import java.util.List; +import java.util.Collection; import static org.elasticsearch.test.BWCVersions.DEFAULT_BWC_VERSIONS; @@ -28,7 +28,7 @@ public abstract class AbstractChunkedBWCSerializationTestCase bwcVersions() { + protected Collection bwcVersions() { return DEFAULT_BWC_VERSIONS; } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java index fc41bdd627c95..2e8b8578b5056 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCSerializationTestCase.java @@ -10,23 +10,21 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; -import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; import static org.hamcrest.Matchers.equalTo; public abstract class AbstractBWCSerializationTestCase extends AbstractXContentSerializingTestCase { - private static List getAllBWCVersions() { - List allVersions = TransportVersion.getAllVersions(); - int minCompatVersion = Collections.binarySearch(allVersions, TransportVersions.MINIMUM_COMPATIBLE); - return allVersions.subList(minCompatVersion, allVersions.size()); + private static NavigableSet getAllBWCVersions() { + return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersions.MINIMUM_COMPATIBLE, true); } - private static final List DEFAULT_BWC_VERSIONS = getAllBWCVersions(); + private static final NavigableSet DEFAULT_BWC_VERSIONS = getAllBWCVersions(); protected abstract T mutateInstanceForVersion(T instance, TransportVersion version); diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java index 30777f43597c8..76c2b3355e236 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/AbstractBWCWireSerializingTestCase.java @@ -10,22 +10,20 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.TransportVersionUtils; import java.io.IOException; -import java.util.Collections; -import java.util.List; +import java.util.NavigableSet; import static org.hamcrest.Matchers.equalTo; public abstract class AbstractBWCWireSerializingTestCase extends AbstractWireSerializingTestCase { - private static List getAllBWCVersions() { - List allVersions = TransportVersion.getAllVersions(); - int minCompatVersion = Collections.binarySearch(allVersions, TransportVersions.MINIMUM_COMPATIBLE); - return allVersions.subList(minCompatVersion, allVersions.size()); + private static NavigableSet getAllBWCVersions() { + return TransportVersionUtils.allReleasedVersions().tailSet(TransportVersions.MINIMUM_COMPATIBLE, true); } - private static final List DEFAULT_BWC_VERSIONS = getAllBWCVersions(); + private static final NavigableSet DEFAULT_BWC_VERSIONS = getAllBWCVersions(); protected abstract T mutateInstanceForVersion(T instance, TransportVersion version); From 274fb738a1a008482cc725f78063b9c7737bcd87 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 4 Feb 2025 01:52:42 +1100 Subject: [PATCH 379/383] Mute org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT test {yaml=snapshot.create/10_basic/Create a snapshot for missing index} #121536 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 9c3d121f97c58..ccea465bc68f2 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -449,6 +449,9 @@ tests: - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT method: test {yaml=cat.aliases/10_basic/Complex alias} issue: https://github.com/elastic/elasticsearch/issues/121513 +- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT + method: test {yaml=snapshot.create/10_basic/Create a snapshot for missing index} + issue: https://github.com/elastic/elasticsearch/issues/121536 # Examples: # From aa28d84792d3749010c50d87eb9933a487358bc9 Mon Sep 17 00:00:00 2001 From: Patrick Doyle <810052+prdoyle@users.noreply.github.com> Date: Mon, 3 Feb 2025 10:10:01 -0500 Subject: [PATCH 380/383] Fix PolicyManager: plugin resolver overrides agent (#121456) --- .../runtime/policy/PolicyManager.java | 4 +++- .../runtime/policy/PolicyManagerTests.java | 18 +++++++++++++++++- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index 092f5ce8455cb..da2191f601110 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -398,7 +398,9 @@ private ModuleEntitlements computeEntitlements(Class requestingClass) { var pluginName = pluginResolver.apply(requestingClass); if (pluginName != null) { var pluginEntitlements = pluginsEntitlements.get(pluginName); - if (pluginEntitlements != null) { + if (pluginEntitlements == null) { + return ModuleEntitlements.NONE; + } else { final String scopeName; if (requestingModule.isNamed() == false) { scopeName = ALL_UNNAMED; diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index 24be0f6f43a4c..f6dca079f9202 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -271,7 +271,7 @@ public void testAgentsEntitlements() throws IOException, ClassNotFoundException createEmptyTestServerPolicy(), List.of(new CreateClassLoaderEntitlement()), Map.of(), - c -> "test", + c -> c.getPackageName().startsWith(TEST_AGENTS_PACKAGE_NAME) ? null : "test", TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE ); @@ -357,6 +357,22 @@ public void testDuplicateFlagEntitlements() { ); } + /** + * If the plugin resolver tells us a class is in a plugin, don't conclude that it's in an agent. + */ + public void testPluginResolverOverridesAgents() { + var policyManager = new PolicyManager( + createEmptyTestServerPolicy(), + List.of(new CreateClassLoaderEntitlement()), + Map.of(), + c -> "test", // Insist that the class is in a plugin + TEST_AGENTS_PACKAGE_NAME, + NO_ENTITLEMENTS_MODULE + ); + ModuleEntitlements notAgentsEntitlements = policyManager.getEntitlements(TestAgent.class); + assertThat(notAgentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(false)); + } + private static Class makeClassInItsOwnModule() throws IOException, ClassNotFoundException { final Path home = createTempDir(); Path jar = createMockPluginJar(home); From 623a6afd12755e4ff47f6f8309cb1463e68f6ab7 Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Mon, 3 Feb 2025 10:31:21 -0500 Subject: [PATCH 381/383] Introduce AllocationBalancingRoundSummaryService (#120957) This service is added to the desired balance allocator to track and report on balancer round activity. It is a WIP and currently only tracks the number of shard moves caused by a balancing round. Reporting balancer round summary results will provide information with which to do cost-benefit analyses of the work that shard allocation rebalancing executes. It is disabled by default. Relates ES-10341 --- docs/changelog/120957.yaml | 5 + ...llocationBalancingRoundSummaryService.java | 194 +++++++++++++ .../allocator/BalancingRoundSummary.java | 24 ++ .../CombinedBalancingRoundSummary.java | 45 +++ .../allocation/allocator/DesiredBalance.java | 3 +- .../DesiredBalanceShardsAllocator.java | 13 + .../common/settings/ClusterSettings.java | 3 + ...tionBalancingRoundSummaryServiceTests.java | 256 ++++++++++++++++++ 8 files changed, 541 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/120957.yaml create mode 100644 server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryService.java create mode 100644 server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancingRoundSummary.java create mode 100644 server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/CombinedBalancingRoundSummary.java create mode 100644 server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryServiceTests.java diff --git a/docs/changelog/120957.yaml b/docs/changelog/120957.yaml new file mode 100644 index 0000000000000..841ef945ce7ef --- /dev/null +++ b/docs/changelog/120957.yaml @@ -0,0 +1,5 @@ +pr: 120957 +summary: Introduce `AllocationBalancingRoundSummaryService` +area: Allocation +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryService.java new file mode 100644 index 0000000000000..2e45938f3d2c0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryService.java @@ -0,0 +1,194 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.routing.allocation.allocator; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.atomic.AtomicReference; + +/** + * Manages the lifecycle of a series of {@link BalancingRoundSummary} results from allocation balancing rounds and creates reports thereof. + * Reporting balancer round summary results will provide information with which to do cost-benefit analyses of the work that shard + * allocation rebalancing executes. + * + * Any successfully added summary via {@link #addBalancerRoundSummary(BalancingRoundSummary)} will eventually be collected/drained and + * reported. This should still be done in the event of the node stepping down from master, on the assumption that all summaries are only + * added while master and should be drained for reporting. There is no need to start/stop this service with master election/stepdown because + * balancer rounds will no longer be supplied when not master. It will simply drain the last summaries and then have nothing more to do. + * This does have the tradeoff that non-master nodes will run a task to check for summaries to report every + * {@link #BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING} seconds. + */ +public class AllocationBalancingRoundSummaryService { + + /** Turns on or off balancing round summary reporting. */ + public static final Setting ENABLE_BALANCER_ROUND_SUMMARIES_SETTING = Setting.boolSetting( + "cluster.routing.allocation.desired_balance.enable_balancer_round_summaries", + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** Controls how frequently in time balancer round summaries are logged. */ + public static final Setting BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING = Setting.timeSetting( + "cluster.routing.allocation.desired_balance.balanace_round_summaries_interval", + TimeValue.timeValueSeconds(10), + TimeValue.ZERO, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + private static final Logger logger = LogManager.getLogger(AllocationBalancingRoundSummaryService.class); + private final ThreadPool threadPool; + private volatile boolean enableBalancerRoundSummaries; + private volatile TimeValue summaryReportInterval; + + /** + * A concurrency-safe list of balancing round summaries. Balancer rounds are run and added here serially, so the queue will naturally + * progress from newer to older results. + */ + private final ConcurrentLinkedQueue summaries = new ConcurrentLinkedQueue<>(); + + /** This reference is set when reporting is scheduled. If it is null, then reporting is inactive. */ + private final AtomicReference scheduledReportFuture = new AtomicReference<>(); + + public AllocationBalancingRoundSummaryService(ThreadPool threadPool, ClusterSettings clusterSettings) { + this.threadPool = threadPool; + // Initialize the local setting values to avoid a null access when ClusterSettings#initializeAndWatch is called on each setting: + // updating enableBalancerRoundSummaries accesses summaryReportInterval. + this.enableBalancerRoundSummaries = clusterSettings.get(ENABLE_BALANCER_ROUND_SUMMARIES_SETTING); + this.summaryReportInterval = clusterSettings.get(BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING); + + clusterSettings.initializeAndWatch(ENABLE_BALANCER_ROUND_SUMMARIES_SETTING, value -> { + this.enableBalancerRoundSummaries = value; + updateBalancingRoundSummaryReporting(); + }); + clusterSettings.initializeAndWatch(BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING, value -> { + // The new value will get picked up the next time that the summary report task reschedules itself on the thread pool. + this.summaryReportInterval = value; + }); + } + + /** + * Adds the summary of a balancing round. If summaries are enabled, this will eventually be reported (logging, etc.). If balancer round + * summaries are not enabled in the cluster, then the summary is immediately discarded (so as not to fill up a data structure that will + * never be drained). + */ + public void addBalancerRoundSummary(BalancingRoundSummary summary) { + if (enableBalancerRoundSummaries == false) { + return; + } + + summaries.add(summary); + } + + /** + * Reports on all the balancer round summaries added since the last call to this method, if there are any. Then reschedules itself per + * the {@link #ENABLE_BALANCER_ROUND_SUMMARIES_SETTING} and {@link #BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING} settings. + */ + private void reportSummariesAndThenReschedule() { + drainAndReportSummaries(); + rescheduleReporting(); + } + + /** + * Drains all the waiting balancer round summaries (if there are any) and reports them. + */ + private void drainAndReportSummaries() { + var combinedSummaries = drainSummaries(); + if (combinedSummaries == CombinedBalancingRoundSummary.EMPTY_RESULTS) { + return; + } + + logger.info("Balancing round summaries: " + combinedSummaries); + } + + /** + * Returns a combined summary of all unreported allocation round summaries: may summarize a single balancer round, multiple, or none. + * + * @return {@link CombinedBalancingRoundSummary#EMPTY_RESULTS} if there are no balancing round summaries waiting to be reported. + */ + private CombinedBalancingRoundSummary drainSummaries() { + ArrayList batchOfSummaries = new ArrayList<>(); + while (summaries.isEmpty() == false) { + batchOfSummaries.add(summaries.poll()); + } + return CombinedBalancingRoundSummary.combine(batchOfSummaries); + } + + /** + * Schedules a periodic task to drain and report the latest balancer round summaries, or cancels the already running task, if the latest + * setting values dictate a change to enable or disable reporting. A change to {@link #BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING} + * will only take effect when the periodic task completes and reschedules itself. + */ + private void updateBalancingRoundSummaryReporting() { + if (this.enableBalancerRoundSummaries) { + startReporting(this.summaryReportInterval); + } else { + cancelReporting(); + // Clear the data structure so that we don't retain unnecessary memory. + drainSummaries(); + } + } + + /** + * Schedules a reporting task, if one is not already scheduled. The reporting task will reschedule itself going forward. + */ + private void startReporting(TimeValue intervalValue) { + if (scheduledReportFuture.get() == null) { + scheduleReporting(intervalValue); + } + } + + /** + * Cancels the future reporting task and resets {@link #scheduledReportFuture} to null. + * + * Note that this is best-effort: cancellation can race with {@link #rescheduleReporting}. But that is okay because the subsequent + * {@link #rescheduleReporting} will use the latest settings and choose to cancel reporting if appropriate. + */ + private void cancelReporting() { + var future = scheduledReportFuture.getAndSet(null); + if (future != null) { + future.cancel(); + } + } + + private void scheduleReporting(TimeValue intervalValue) { + scheduledReportFuture.set( + threadPool.schedule(this::reportSummariesAndThenReschedule, intervalValue, threadPool.executor(ThreadPool.Names.GENERIC)) + ); + } + + /** + * Looks at the given setting values and decides whether to schedule another reporting task or cancel reporting now. + */ + private void rescheduleReporting() { + if (this.enableBalancerRoundSummaries) { + // It's possible that this races with a concurrent call to cancel reporting, but that's okay. The next rescheduleReporting call + // will check the latest settings and cancel. + scheduleReporting(this.summaryReportInterval); + } else { + cancelReporting(); + } + } + + // @VisibleForTesting + protected void verifyNumberOfSummaries(int numberOfSummaries) { + assert numberOfSummaries == summaries.size(); + } + +} diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancingRoundSummary.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancingRoundSummary.java new file mode 100644 index 0000000000000..2662825eff48e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancingRoundSummary.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.routing.allocation.allocator; + +/** + * Summarizes the impact to the cluster as a result of a rebalancing round. + * + * @param numberOfShardsToMove The number of shard moves required to move from the previous desired balance to the new one. + */ +public record BalancingRoundSummary(long numberOfShardsToMove) { + + @Override + public String toString() { + return "BalancingRoundSummary{" + "numberOfShardsToMove=" + numberOfShardsToMove + '}'; + } + +} diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/CombinedBalancingRoundSummary.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/CombinedBalancingRoundSummary.java new file mode 100644 index 0000000000000..78fa1f6c5f5f5 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/CombinedBalancingRoundSummary.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.routing.allocation.allocator; + +import java.util.List; + +/** + * Holds combined {@link BalancingRoundSummary} results. Essentially holds a list of the balancing events and the summed up changes + * across all those events: what allocation work was done across some period of time. + * TODO: WIP ES-10341 + * + * Note that each balancing round summary is the difference between, at the time, latest desired balance and the previous desired balance. + * Each summary represents a step towards the next desired balance, which is based on presuming the previous desired balance is reached. So + * combining them is roughly the difference between the first summary's previous desired balance and the last summary's latest desired + * balance. + * + * @param numberOfBalancingRounds How many balancing round summaries are combined in this report. + * @param numberOfShardMoves The sum of shard moves for each balancing round being combined into a single summary. + */ +public record CombinedBalancingRoundSummary(int numberOfBalancingRounds, long numberOfShardMoves) { + + public static final CombinedBalancingRoundSummary EMPTY_RESULTS = new CombinedBalancingRoundSummary(0, 0); + + public static CombinedBalancingRoundSummary combine(List summaries) { + if (summaries.isEmpty()) { + return EMPTY_RESULTS; + } + + int numSummaries = 0; + long numberOfShardMoves = 0; + for (BalancingRoundSummary summary : summaries) { + ++numSummaries; + numberOfShardMoves += summary.numberOfShardsToMove(); + } + return new CombinedBalancingRoundSummary(numSummaries, numberOfShardMoves); + } + +} diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java index 16cbf41ee1bfa..202582839f1d9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java @@ -24,8 +24,7 @@ * strictly increasing sequence number. A new master term restarts the index values from zero. The balancer, * which runs async to reroute, uses the latest request's data to compute the desired balance. * @param assignments a set of the (persistent) node IDs to which each {@link ShardId} should be allocated - * @param weightsPerNode The node weights calculated based on - * {@link org.elasticsearch.cluster.routing.allocation.allocator.WeightFunction#calculateNodeWeight} + * @param weightsPerNode The node weights calculated based on {@link WeightFunction#calculateNodeWeight} */ public record DesiredBalance( long lastConvergedIndex, diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java index 2c73a27ad3418..d9fba492fb9d0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java @@ -88,6 +88,10 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator { private volatile boolean resetCurrentDesiredBalance = false; private final Set processedNodeShutdowns = new HashSet<>(); private final DesiredBalanceMetrics desiredBalanceMetrics; + /** + * Manages balancer round results in order to report on the balancer activity in a configurable manner. + */ + private final AllocationBalancingRoundSummaryService balancerRoundSummaryService; // stats protected final CounterMetric computationsSubmitted = new CounterMetric(); @@ -132,6 +136,7 @@ public DesiredBalanceShardsAllocator( NodeAllocationStatsAndWeightsCalculator nodeAllocationStatsAndWeightsCalculator ) { this.desiredBalanceMetrics = new DesiredBalanceMetrics(telemetryProvider.getMeterRegistry()); + this.balancerRoundSummaryService = new AllocationBalancingRoundSummaryService(threadPool, clusterService.getClusterSettings()); this.delegateAllocator = delegateAllocator; this.threadPool = threadPool; this.reconciler = reconciler; @@ -320,6 +325,7 @@ private void setCurrentDesiredBalance(DesiredBalance newDesiredBalance) { } if (currentDesiredBalanceRef.compareAndSet(oldDesiredBalance, newDesiredBalance)) { + balancerRoundSummaryService.addBalancerRoundSummary(calculateBalancingRoundSummary(oldDesiredBalance, newDesiredBalance)); if (logger.isTraceEnabled()) { var diff = DesiredBalance.hasChanges(oldDesiredBalance, newDesiredBalance) ? "Diff: " + DesiredBalance.humanReadableDiff(oldDesiredBalance, newDesiredBalance) @@ -334,6 +340,13 @@ private void setCurrentDesiredBalance(DesiredBalance newDesiredBalance) { } } + /** + * Summarizes the work required to move from an old to new desired balance shard allocation. + */ + private BalancingRoundSummary calculateBalancingRoundSummary(DesiredBalance oldDesiredBalance, DesiredBalance newDesiredBalance) { + return new BalancingRoundSummary(DesiredBalance.shardMovements(oldDesiredBalance, newDesiredBalance)); + } + protected void submitReconcileTask(DesiredBalance desiredBalance) { masterServiceTaskQueue.submitTask("reconcile-desired-balance", new ReconcileDesiredBalanceTask(desiredBalance), null); } diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index e9b9a5ea4ab9e..7397382866388 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -46,6 +46,7 @@ import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; +import org.elasticsearch.cluster.routing.allocation.allocator.AllocationBalancingRoundSummaryService; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceComputer; import org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceReconciler; @@ -212,6 +213,8 @@ public void apply(Settings value, Settings current, Settings previous) { } public static final Set> BUILT_IN_CLUSTER_SETTINGS = Stream.of( + AllocationBalancingRoundSummaryService.ENABLE_BALANCER_ROUND_SUMMARIES_SETTING, + AllocationBalancingRoundSummaryService.BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING, AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING, BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryServiceTests.java new file mode 100644 index 0000000000000..337fad01f905b --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/AllocationBalancingRoundSummaryServiceTests.java @@ -0,0 +1,256 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.routing.allocation.allocator; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLog; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.Before; + +public class AllocationBalancingRoundSummaryServiceTests extends ESTestCase { + private static final Logger logger = LogManager.getLogger(AllocationBalancingRoundSummaryServiceTests.class); + + private static final String BALANCING_SUMMARY_MSG_PREFIX = "Balancing round summaries:*"; + + final Settings enabledSummariesSettings = Settings.builder() + .put(AllocationBalancingRoundSummaryService.ENABLE_BALANCER_ROUND_SUMMARIES_SETTING.getKey(), true) + .build(); + final Settings disabledDefaultEmptySettings = Settings.builder().build(); + final Settings enabledButNegativeIntervalSettings = Settings.builder() + .put(AllocationBalancingRoundSummaryService.ENABLE_BALANCER_ROUND_SUMMARIES_SETTING.getKey(), true) + .put(AllocationBalancingRoundSummaryService.BALANCER_ROUND_SUMMARIES_LOG_INTERVAL_SETTING.getKey(), TimeValue.MINUS_ONE) + .build(); + + ClusterSettings enabledClusterSettings = new ClusterSettings(enabledSummariesSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + ClusterSettings disabledDefaultEmptyClusterSettings = new ClusterSettings( + disabledDefaultEmptySettings, + ClusterSettings.BUILT_IN_CLUSTER_SETTINGS + ); + ClusterSettings enabledButNegativeIntervalClusterSettings = new ClusterSettings( + enabledButNegativeIntervalSettings, + ClusterSettings.BUILT_IN_CLUSTER_SETTINGS + ); + + // Construction parameters for the service. + + DeterministicTaskQueue deterministicTaskQueue; + ThreadPool testThreadPool; + + @Before + public void setUpThreadPool() { + deterministicTaskQueue = new DeterministicTaskQueue(); + testThreadPool = deterministicTaskQueue.getThreadPool(); + } + + /** + * Test that the service is disabled and no logging occurs when + * {@link AllocationBalancingRoundSummaryService#ENABLE_BALANCER_ROUND_SUMMARIES_SETTING} defaults to false. + */ + public void testServiceDisabledByDefault() { + var service = new AllocationBalancingRoundSummaryService(testThreadPool, disabledDefaultEmptyClusterSettings); + + try (var mockLog = MockLog.capture(AllocationBalancingRoundSummaryService.class)) { + /** + * Add a summary and check it is not logged. + */ + + service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.verifyNumberOfSummaries(0); // when summaries are disabled, summaries are not retained when added. + mockLog.addExpectation( + new MockLog.UnseenEventExpectation( + "Running balancer summary logging", + AllocationBalancingRoundSummaryService.class.getName(), + Level.INFO, + "*" + ) + ); + + if (deterministicTaskQueue.hasDeferredTasks()) { + deterministicTaskQueue.advanceTime(); + } + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + service.verifyNumberOfSummaries(0); + } + } + + public void testEnabledService() { + var service = new AllocationBalancingRoundSummaryService(testThreadPool, enabledClusterSettings); + + try (var mockLog = MockLog.capture(AllocationBalancingRoundSummaryService.class)) { + /** + * Add a summary and check the service logs a report on it. + */ + + service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.verifyNumberOfSummaries(1); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "Running balancer summary logging", + AllocationBalancingRoundSummaryService.class.getName(), + Level.INFO, + BALANCING_SUMMARY_MSG_PREFIX + ) + ); + + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + service.verifyNumberOfSummaries(0); + + /** + * Add a second summary, check for more logging. + */ + + service.addBalancerRoundSummary(new BalancingRoundSummary(200)); + service.verifyNumberOfSummaries(1); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "Running balancer summary logging a second time", + AllocationBalancingRoundSummaryService.class.getName(), + Level.INFO, + BALANCING_SUMMARY_MSG_PREFIX + ) + ); + + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + service.verifyNumberOfSummaries(0); + } + } + + /** + * The service should combine multiple summaries together into a single report when multiple summaries were added since the last report. + */ + public void testCombinedSummary() { + var service = new AllocationBalancingRoundSummaryService(testThreadPool, enabledClusterSettings); + + try (var mockLog = MockLog.capture(AllocationBalancingRoundSummaryService.class)) { + service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.addBalancerRoundSummary(new BalancingRoundSummary(100)); + service.verifyNumberOfSummaries(2); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "Running balancer summary logging of combined summaries", + AllocationBalancingRoundSummaryService.class.getName(), + Level.INFO, + "*150*" + ) + ); + + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + service.verifyNumberOfSummaries(0); + } + } + + /** + * The service shouldn't log anything when there haven't been any summaries added since the last report. + */ + public void testNoSummariesToReport() { + var service = new AllocationBalancingRoundSummaryService(testThreadPool, enabledClusterSettings); + + try (var mockLog = MockLog.capture(AllocationBalancingRoundSummaryService.class)) { + /** + * First add some summaries to report, ensuring that the logging is active. + */ + + service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.verifyNumberOfSummaries(1); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "Running balancer summary logging of combined summaries", + AllocationBalancingRoundSummaryService.class.getName(), + Level.INFO, + BALANCING_SUMMARY_MSG_PREFIX + ) + ); + + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + service.verifyNumberOfSummaries(0); + + /** + * Now check that there are no further log messages because there were no further summaries added. + */ + + mockLog.addExpectation( + new MockLog.UnseenEventExpectation( + "No balancer round summary to log", + AllocationBalancingRoundSummaryService.class.getName(), + Level.INFO, + "*" + ) + ); + + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + service.verifyNumberOfSummaries(0); + } + } + + /** + * Test that the service is disabled by setting {@link AllocationBalancingRoundSummaryService#ENABLE_BALANCER_ROUND_SUMMARIES_SETTING} + * to false. + */ + public void testEnableAndThenDisableService() { + var disabledSettingsUpdate = Settings.builder() + .put(AllocationBalancingRoundSummaryService.ENABLE_BALANCER_ROUND_SUMMARIES_SETTING.getKey(), false) + .build(); + ClusterSettings clusterSettings = new ClusterSettings(enabledSummariesSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + var service = new AllocationBalancingRoundSummaryService(testThreadPool, clusterSettings); + + try (var mockLog = MockLog.capture(AllocationBalancingRoundSummaryService.class)) { + /** + * Add some summaries, but then disable the service before logging occurs. Disabling the service should drain and discard any + * summaries waiting to be reported. + */ + + service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.verifyNumberOfSummaries(1); + + clusterSettings.applySettings(disabledSettingsUpdate); + service.verifyNumberOfSummaries(0); + + /** + * Verify that any additional summaries are not retained, since the service is disabled. + */ + + service.addBalancerRoundSummary(new BalancingRoundSummary(50)); + service.verifyNumberOfSummaries(0); + + // Check that the service never logged anything. + mockLog.addExpectation( + new MockLog.UnseenEventExpectation( + "Running balancer summary logging", + AllocationBalancingRoundSummaryService.class.getName(), + Level.INFO, + "*" + ) + ); + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + service.verifyNumberOfSummaries(0); + } + } + +} From bb67a7c4abad248e28661d3d03bec4e8e51948c2 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 3 Feb 2025 12:15:59 -0500 Subject: [PATCH 382/383] GEO: Comment that we are not removing warning (#121459) Change the warning message when you send a multifield under a geo field. We *still* ignore the multifields but we do not plan to remove them. It isn't worth breaking anyone. --- .../index/mapper/GeoShapeWithDocValuesFieldMapper.java | 6 +++++- .../xpack/spatial/index/mapper/PointFieldMapper.java | 6 +++++- .../xpack/spatial/index/mapper/ShapeFieldMapper.java | 6 +++++- .../index/mapper/GeoShapeWithDocValuesFieldMapperTests.java | 2 +- .../xpack/spatial/index/mapper/PointFieldMapperTests.java | 2 +- .../xpack/spatial/index/mapper/ShapeFieldMapperTests.java | 2 +- 6 files changed, 18 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java index f7c5f1b8072f3..62e68bfdb425c 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java @@ -172,10 +172,14 @@ private FieldValues scriptValues() { @Override public GeoShapeWithDocValuesFieldMapper build(MapperBuilderContext context) { if (multiFieldsBuilder.hasMultiFields()) { + /* + * We have no plans to fail on multifields because it isn't worth breaking + * even the tiny fraction of users. + */ DEPRECATION_LOGGER.warn( DeprecationCategory.MAPPINGS, "geo_shape_multifields", - "Adding multifields to [geo_shape] mappers has no effect and will be forbidden in future" + "Adding multifields to [geo_shape] mappers has no effect" ); } GeometryParser geometryParser = new GeometryParser( diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java index 65e54513e8c9e..fcfeca6301950 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java @@ -100,10 +100,14 @@ private static CartesianPoint parseNullValue(Object nullValue, boolean ignoreZVa @Override public FieldMapper build(MapperBuilderContext context) { if (multiFieldsBuilder.hasMultiFields()) { + /* + * We have no plans to fail on multifields because it isn't worth breaking + * even the tiny fraction of users. + */ DEPRECATION_LOGGER.warn( DeprecationCategory.MAPPINGS, "point_multifields", - "Adding multifields to [point] mappers has no effect and will be forbidden in future" + "Adding multifields to [point] mappers has no effect" ); } CartesianPointParser parser = new CartesianPointParser( diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java index 198e0ba3011bf..3c8127b6c6036 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java @@ -104,10 +104,14 @@ protected Parameter[] getParameters() { @Override public ShapeFieldMapper build(MapperBuilderContext context) { if (multiFieldsBuilder.hasMultiFields()) { + /* + * We have no plans to fail on multifields because it isn't worth breaking + * even the tiny fraction of users. + */ DEPRECATION_LOGGER.warn( DeprecationCategory.MAPPINGS, "shape_multifields", - "Adding multifields to [shape] mappers has no effect and will be forbidden in future" + "Adding multifields to [shape] mappers has no effect" ); } GeometryParser geometryParser = new GeometryParser( diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java index 712113b1960ef..a34f0ba2eae3e 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java @@ -519,7 +519,7 @@ public void testMultiFieldsDeprecationWarning() throws Exception { b.startObject("keyword").field("type", "keyword").endObject(); b.endObject(); })); - assertWarnings("Adding multifields to [" + getFieldName() + "] mappers has no effect and will be forbidden in future"); + assertWarnings("Adding multifields to [" + getFieldName() + "] mappers has no effect"); } public void testSelfIntersectPolygon() throws IOException { diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java index 30a30bde51528..8f28b462afca4 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java @@ -368,7 +368,7 @@ public void testMultiFieldsDeprecationWarning() throws Exception { b.startObject("keyword").field("type", "keyword").endObject(); b.endObject(); })); - assertWarnings("Adding multifields to [point] mappers has no effect and will be forbidden in future"); + assertWarnings("Adding multifields to [point] mappers has no effect"); } @Override diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java index 5d2624735bebe..61491b88a8a0b 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java @@ -338,7 +338,7 @@ public void testMultiFieldsDeprecationWarning() throws Exception { b.startObject("keyword").field("type", "keyword").endObject(); b.endObject(); })); - assertWarnings("Adding multifields to [" + getFieldName() + "] mappers has no effect and will be forbidden in future"); + assertWarnings("Adding multifields to [" + getFieldName() + "] mappers has no effect"); } public void testSelfIntersectPolygon() throws IOException { From 92d1d31eea496a014bd400dea727fe572f74a521 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 3 Feb 2025 09:59:20 -0800 Subject: [PATCH 383/383] Add Java 24 to testing matrix --- .buildkite/pipelines/periodic.template.yml | 1 + .buildkite/pipelines/periodic.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index afde6bdf8e65d..4ebe8460c72d1 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -86,6 +86,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 + - openjdk24 GRADLE_TASK: - checkPart1 - checkPart2 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index d925f7e2bffbf..ff771061950e7 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -505,6 +505,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 + - openjdk24 GRADLE_TASK: - checkPart1 - checkPart2