diff --git a/.gitmodules b/.gitmodules index 144499df..57b96344 100644 --- a/.gitmodules +++ b/.gitmodules @@ -50,3 +50,9 @@ [submodule "deeplearning4j/deeplearning4j"] path = deeplearning4j/deeplearning4j url = https://github.com/deeplearning4j/deeplearning4j.git +[submodule "vespa/vespa"] + path = vespa/vespa + url = https://github.com/vespa-engine/vespa.git +[submodule "elasticsearch/elasticsearch"] + path = elasticsearch/elasticsearch + url = https://github.com/elastic/elasticsearch.git diff --git a/elasticsearch/classes.csv b/elasticsearch/classes.csv new file mode 100755 index 00000000..cc047f1f --- /dev/null +++ b/elasticsearch/classes.csv @@ -0,0 +1,299 @@ +id,name,file,offset +0,org.elasticsearch.cluster.node.DiscoveryNode,server\src\main\java\org\elasticsearch\cluster\node\DiscoveryNode.java,1528 +1,org.elasticsearch.xpack.core.security.authc.support.mapper.TemplateRoleName,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\authc\support\mapper\TemplateRoleName.java,2019 +2,org.elasticsearch.common.transport.BoundTransportAddress,server\src\main\java\org\elasticsearch\common\transport\BoundTransportAddress.java,1030 +3,org.elasticsearch.index.seqno.GlobalCheckpointSyncAction,server\src\main\java\org\elasticsearch\index\seqno\GlobalCheckpointSyncAction.java,2115 +4,org.elasticsearch.search.SearchModule,server\src\main\java\org\elasticsearch\search\SearchModule.java,19088 +5,org.elasticsearch.xpack.core.ccr.action.FollowParameters,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ccr\action\FollowParameters.java,1033 +6,org.elasticsearch.action.termvectors.MultiTermVectorsRequest,server\src\main\java\org\elasticsearch\action\termvectors\MultiTermVectorsRequest.java,1608 +7,org.elasticsearch.cluster.routing.allocation.command.AllocationCommands,server\src\main\java\org\elasticsearch\cluster\routing\allocation\command\AllocationCommands.java,1511 +8,org.elasticsearch.index.get.ShardGetService,server\src\main\java\org\elasticsearch\index\get\ShardGetService.java,2753 +9,org.elasticsearch.geo.geometry.Polygon,libs\geo\src\main\java\org\elasticsearch\geo\geometry\Polygon.java,911 +10,org.elasticsearch.xpack.security.authc.TokenService,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\authc\TokenService.java,6925 +11,org.elasticsearch.xpack.sql.analysis.analyzer.PreAnalyzer,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\analysis\analyzer\PreAnalyzer.java,528 +12,org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\watcher\execution\WatchExecutionContext.java,1334 +13,org.elasticsearch.gateway.MetaStateService,server\src\main\java\org\elasticsearch\gateway\MetaStateService.java,1474 +14,org.elasticsearch.cluster.metadata.RepositoryMetaData,server\src\main\java\org\elasticsearch\cluster\metadata\RepositoryMetaData.java,1026 +15,org.elasticsearch.index.analysis.NamedAnalyzer,server\src\main\java\org\elasticsearch\index\analysis\NamedAnalyzer.java,1072 +16,org.elasticsearch.cluster.metadata.IndexNameExpressionResolver,server\src\main\java\org\elasticsearch\cluster\metadata\IndexNameExpressionResolver.java,2237 +17,org.elasticsearch.script.mustache.SearchTemplateRequest,modules\lang-mustache\src\main\java\org\elasticsearch\script\mustache\SearchTemplateRequest.java,1789 +18,org.elasticsearch.analysis.common.WordDelimiterTokenFilterFactory,modules\analysis-common\src\main\java\org\elasticsearch\analysis\common\WordDelimiterTokenFilterFactory.java,2558 +19,org.elasticsearch.painless.node.AExpression,modules\lang-painless\src\main\java\org\elasticsearch\painless\node\AExpression.java,1115 +20,org.elasticsearch.xpack.ml.action.TransportDeleteJobAction,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\action\TransportDeleteJobAction.java,4696 +21,org.elasticsearch.xpack.security.authc.ApiKeyService,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\authc\ApiKeyService.java,4941 +22,org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\action\user\HasPrivilegesRequest.java,879 +23,org.elasticsearch.cluster.routing.allocation.RoutingAllocation,server\src\main\java\org\elasticsearch\cluster\routing\allocation\RoutingAllocation.java,1801 +24,org.elasticsearch.analysis.common.WordDelimiterGraphTokenFilterFactory,modules\analysis-common\src\main\java\org\elasticsearch\analysis\common\WordDelimiterGraphTokenFilterFactory.java,2531 +25,org.elasticsearch.cluster.block.ClusterBlock,server\src\main\java\org\elasticsearch\cluster\block\ClusterBlock.java,1388 +26,org.elasticsearch.common.lucene.search.XMoreLikeThis,server\src\main\java\org\elasticsearch\common\lucene\search\XMoreLikeThis.java,2803 +27,org.elasticsearch.common.bytes.BytesReference,server\src\main\java\org\elasticsearch\common\bytes\BytesReference.java,1400 +28,org.elasticsearch.index.shard.IndexShard,server\src\main\java\org\elasticsearch\index\shard\IndexShard.java,8533 +29,org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource,x-pack\plugin\monitoring\src\main\java\org\elasticsearch\xpack\monitoring\exporter\http\PublishableHttpResource.java,1263 +30,org.elasticsearch.index.Index,server\src\main\java\org\elasticsearch\index\Index.java,1364 +31,org.elasticsearch.search.fetch.subphase.highlight.UnifiedHighlighter,server\src\main\java\org\elasticsearch\search\fetch\subphase\highlight\UnifiedHighlighter.java,2394 +32,org.elasticsearch.indices.mapper.MapperRegistry,server\src\main\java\org\elasticsearch\indices\mapper\MapperRegistry.java,1232 +33,org.elasticsearch.common.compress.CompressedXContent,server\src\main\java\org\elasticsearch\common\compress\CompressedXContent.java,1528 +34,org.elasticsearch.xpack.sql.cli.command.CliSession,x-pack\plugin\sql\sql-cli\src\main\java\org\elasticsearch\xpack\sql\cli\command\CliSession.java,594 +35,org.elasticsearch.search.aggregations.Aggregator,server\src\main\java\org\elasticsearch\search\aggregations\Aggregator.java,1425 +36,org.elasticsearch.search.sort.SortOrder,server\src\main\java\org\elasticsearch\search\sort\SortOrder.java,1049 +37,org.elasticsearch.search.internal.ShardSearchLocalRequest,server\src\main\java\org\elasticsearch\search\internal\ShardSearchLocalRequest.java,1583 +38,org.elasticsearch.common.transport.TransportAddress,server\src\main\java\org\elasticsearch\common\transport\TransportAddress.java,1306 +39,org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse,server\src\main\java\org\elasticsearch\action\admin\indices\alias\get\GetAliasesResponse.java,1317 +40,org.elasticsearch.index.shard.RemoveCorruptedShardDataCommand,server\src\main\java\org\elasticsearch\index\shard\RemoveCorruptedShardDataCommand.java,3177 +41,org.elasticsearch.common.unit.ByteSizeValue,server\src\main\java\org\elasticsearch\common\unit\ByteSizeValue.java,1392 +42,org.elasticsearch.client.Response,client\rest\src\main\java\org\elasticsearch\client\Response.java,1175 +43,org.elasticsearch.xpack.core.monitoring.MonitoredSystem,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\monitoring\MonitoredSystem.java,375 +44,org.elasticsearch.action.search.MultiSearchRequest,server\src\main\java\org\elasticsearch\action\search\MultiSearchRequest.java,2200 +45,org.elasticsearch.index.analysis.AnalysisMode,server\src\main\java\org\elasticsearch\index\analysis\AnalysisMode.java,833 +46,org.elasticsearch.xpack.core.dataframe.transforms.SourceConfig,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\dataframe\transforms\SourceConfig.java,1155 +47,org.elasticsearch.action.update.UpdateRequest,server\src\main\java\org\elasticsearch\action\update\UpdateRequest.java,2704 +48,org.elasticsearch.action.search.SearchRequest,server\src\main\java\org\elasticsearch\action\search\SearchRequest.java,1786 +49,org.elasticsearch.common.geo.GeoDistance,server\src\main\java\org\elasticsearch\common\geo\GeoDistance.java,1099 +50,org.elasticsearch.common.io.stream.StreamInput,server\src\main\java\org\elasticsearch\common\io\stream\StreamInput.java,2977 +51,org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider,server\src\main\java\org\elasticsearch\cluster\routing\allocation\decider\DiskThresholdDecider.java,2214 +52,org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\datafeed\DatafeedUpdate.java,1560 +53,org.elasticsearch.xpack.ml.MachineLearningFeatureSet,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\MachineLearningFeatureSet.java,2224 +54,org.elasticsearch.cli.Terminal,libs\cli\src\main\java\org\elasticsearch\cli\Terminal.java,1026 +55,org.elasticsearch.action.admin.indices.get.GetIndexResponse,server\src\main\java\org\elasticsearch\action\admin\indices\get\GetIndexResponse.java,2112 +56,org.elasticsearch.cluster.service.ClusterService,server\src\main\java\org\elasticsearch\cluster\service\ClusterService.java,1912 +57,org.elasticsearch.indices.analysis.HunspellService,server\src\main\java\org\elasticsearch\indices\analysis\HunspellService.java,1843 +58,org.elasticsearch.client.node.NodeClient,server\src\main\java\org\elasticsearch\client\node\NodeClient.java,1517 +59,org.elasticsearch.painless.Location,modules\lang-painless\src\main\java\org\elasticsearch\painless\Location.java,854 +60,org.elasticsearch.xpack.core.security.authc.Realm,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\authc\Realm.java,841 +61,org.elasticsearch.ElasticsearchException,server\src\main\java\org\elasticsearch\ElasticsearchException.java,2502 +62,org.elasticsearch.xpack.watcher.notification.email.attachment.ReportingAttachment,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\notification\email\attachment\ReportingAttachment.java,686 +63,org.elasticsearch.xpack.core.ml.job.config.JobUpdate,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\job\config\JobUpdate.java,1155 +64,org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest,server\src\main\java\org\elasticsearch\action\admin\cluster\configuration\AddVotingConfigExclusionsRequest.java,1500 +65,org.elasticsearch.search.SearchService,server\src\main\java\org\elasticsearch\search\SearchService.java,6111 +66,org.elasticsearch.cluster.routing.allocation.decider.Decision,server\src\main\java\org\elasticsearch\cluster\routing\allocation\decider\Decision.java,1452 +67,org.elasticsearch.action.admin.cluster.state.ClusterStateRequest,server\src\main\java\org\elasticsearch\action\admin\cluster\state\ClusterStateRequest.java,1315 +68,org.elasticsearch.index.mapper.ObjectMapper,server\src\main\java\org\elasticsearch\index\mapper\ObjectMapper.java,1743 +69,org.elasticsearch.repositories.RepositoriesService,server\src\main\java\org\elasticsearch\repositories\RepositoriesService.java,2315 +70,org.elasticsearch.action.termvectors.TermVectorsRequest,server\src\main\java\org\elasticsearch\action\termvectors\TermVectorsRequest.java,2314 +71,org.elasticsearch.cluster.coordination.Join,server\src\main\java\org\elasticsearch\cluster\coordination\Join.java,1085 +72,org.elasticsearch.action.admin.cluster.snapshots.get.TransportGetSnapshotsAction,server\src\main\java\org\elasticsearch\action\admin\cluster\snapshots\get\TransportGetSnapshotsAction.java,2090 +73,org.elasticsearch.client.Request,client\rest\src\main\java\org\elasticsearch\client\Request.java,1082 +74,org.elasticsearch.cluster.routing.RoutingTable,server\src\main\java\org\elasticsearch\cluster\routing\RoutingTable.java,2094 +75,org.elasticsearch.client.ml.datafeed.DatafeedUpdate,client\rest-high-level\src\main\java\org\elasticsearch\client\ml\datafeed\DatafeedUpdate.java,1942 +76,org.elasticsearch.xpack.watcher.notification.email.attachment.ReportingAttachmentParser,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\notification\email\attachment\ReportingAttachmentParser.java,2068 +77,org.elasticsearch.indices.recovery.RecoveryState,server\src\main\java\org\elasticsearch\indices\recovery\RecoveryState.java,1809 +78,org.elasticsearch.protocol.xpack.graph.Hop,x-pack\plugin\core\src\main\java\org\elasticsearch\protocol\xpack\graph\Hop.java,850 +79,org.elasticsearch.transport.TransportRequestOptions,server\src\main\java\org\elasticsearch\transport\TransportRequestOptions.java,877 +80,org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\job\process\autodetect\state\DataCounts.java,965 +81,org.elasticsearch.index.mapper.MappedFieldType,server\src\main\java\org\elasticsearch\index\mapper\MappedFieldType.java,2492 +82,org.elasticsearch.action.admin.indices.shrink.ResizeRequest,server\src\main\java\org\elasticsearch\action\admin\indices\shrink\ResizeRequest.java,1846 +83,org.elasticsearch.grok.Grok,libs\grok\src\main\java\org\elasticsearch\grok\Grok.java,1438 +84,org.elasticsearch.rest.RestRequest,server\src\main\java\org\elasticsearch\rest\RestRequest.java,2149 +85,org.elasticsearch.index.analysis.IcuTokenizerFactory,plugins\analysis-icu\src\main\java\org\elasticsearch\index\analysis\IcuTokenizerFactory.java,1663 +86,org.elasticsearch.persistent.PersistentTasksCustomMetaData,server\src\main\java\org\elasticsearch\persistent\PersistentTasksCustomMetaData.java,2388 +87,org.elasticsearch.cluster.routing.ShardRoutingState,server\src\main\java\org\elasticsearch\cluster\routing\ShardRoutingState.java,835 +88,org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest,server\src\main\java\org\elasticsearch\action\admin\indices\template\put\PutIndexTemplateRequest.java,3059 +89,org.elasticsearch.index.mapper.CompletionFieldMapper,server\src\main\java\org\elasticsearch\index\mapper\CompletionFieldMapper.java,3005 +90,org.elasticsearch.protocol.xpack.graph.Connection,x-pack\plugin\core\src\main\java\org\elasticsearch\protocol\xpack\graph\Connection.java,990 +91,org.elasticsearch.xpack.ml.MlConfigMigrationEligibilityCheck,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\MlConfigMigrationEligibilityCheck.java,878 +92,org.elasticsearch.Version,server\src\main\java\org\elasticsearch\Version.java,1523 +93,org.elasticsearch.xpack.monitoring.exporter.http.ClusterAlertHttpResource,x-pack\plugin\monitoring\src\main\java\org\elasticsearch\xpack\monitoring\exporter\http\ClusterAlertHttpResource.java,1190 +94,org.elasticsearch.cluster.routing.allocation.command.AbstractAllocateAllocationCommand,server\src\main\java\org\elasticsearch\cluster\routing\allocation\command\AbstractAllocateAllocationCommand.java,1873 +95,org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider,server\src\main\java\org\elasticsearch\cluster\routing\allocation\decider\ThrottlingAllocationDecider.java,1618 +96,org.elasticsearch.common.blobstore.url.URLBlobStore,modules\repository-url\src\main\java\org\elasticsearch\common\blobstore\url\URLBlobStore.java,1279 +97,org.elasticsearch.action.search.SearchPhaseController,server\src\main\java\org\elasticsearch\action\search\SearchPhaseController.java,2958 +98,org.elasticsearch.indices.flush.SyncedFlushService,server\src\main\java\org\elasticsearch\indices\flush\SyncedFlushService.java,3358 +99,org.elasticsearch.xpack.security.authz.AuthorizationService,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\authz\AuthorizationService.java,4981 +100,org.elasticsearch.action.admin.indices.create.CreateIndexRequest,server\src\main\java\org\elasticsearch\action\admin\indices\create\CreateIndexRequest.java,2866 +101,org.elasticsearch.xpack.ml.filestructurefinder.TimeoutChecker,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\filestructurefinder\TimeoutChecker.java,877 +102,org.elasticsearch.script.ScriptService,server\src\main\java\org\elasticsearch\script\ScriptService.java,2680 +103,org.elasticsearch.search.fetch.FetchSearchResult,server\src\main\java\org\elasticsearch\search\fetch\FetchSearchResult.java,1218 +104,org.elasticsearch.xpack.watcher.notification.email.Authentication,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\notification\email\Authentication.java,397 +105,org.elasticsearch.common.xcontent.XContentType,libs\x-content\src\main\java\org\elasticsearch\common\xcontent\XContentType.java,1129 +106,org.elasticsearch.cluster.routing.IndexRoutingTable,server\src\main\java\org\elasticsearch\cluster\routing\IndexRoutingTable.java,2110 +107,org.elasticsearch.cluster.service.MasterService,server\src\main\java\org\elasticsearch\cluster\service\MasterService.java,3302 +108,org.elasticsearch.xpack.sql.cli.Cli,x-pack\plugin\sql\sql-cli\src\main\java\org\elasticsearch\xpack\sql\cli\Cli.java,1587 +109,org.elasticsearch.cluster.metadata.MetaData,server\src\main\java\org\elasticsearch\cluster\metadata\MetaData.java,3612 +110,org.elasticsearch.xpack.security.authc.support.DelegatedAuthorizationSupport,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\authc\support\DelegatedAuthorizationSupport.java,1325 +111,org.elasticsearch.painless.Globals,modules\lang-painless\src\main\java\org\elasticsearch\painless\Globals.java,952 +112,org.elasticsearch.cluster.routing.allocation.RerouteExplanation,server\src\main\java\org\elasticsearch\cluster\routing\allocation\RerouteExplanation.java,1312 +113,org.elasticsearch.rest.RestController,server\src\main\java\org\elasticsearch\rest\RestController.java,2645 +114,org.elasticsearch.xpack.dataframe.transforms.pivot.Pivot,x-pack\plugin\data-frame\src\main\java\org\elasticsearch\xpack\dataframe\transforms\pivot\Pivot.java,1894 +115,org.elasticsearch.xpack.deprecation.RestDeprecationInfoAction,x-pack\plugin\deprecation\src\main\java\org\elasticsearch\xpack\deprecation\RestDeprecationInfoAction.java,949 +116,org.elasticsearch.xpack.monitoring.exporter.http.WatcherExistsHttpResource,x-pack\plugin\monitoring\src\main\java\org\elasticsearch\xpack\monitoring\exporter\http\WatcherExistsHttpResource.java,1007 +117,org.elasticsearch.index.reindex.BulkByScrollResponse,server\src\main\java\org\elasticsearch\index\reindex\BulkByScrollResponse.java,2122 +118,org.elasticsearch.xpack.ml.job.JobManager,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\JobManager.java,3655 +119,org.elasticsearch.xpack.graph.rest.action.RestGraphAction,x-pack\plugin\graph\src\main\java\org\elasticsearch\xpack\graph\rest\action\RestGraphAction.java,1665 +120,org.elasticsearch.search.fetch.subphase.FetchSourceContext,server\src\main\java\org\elasticsearch\search\fetch\subphase\FetchSourceContext.java,1630 +121,org.elasticsearch.index.mapper.DocumentMapper,server\src\main\java\org\elasticsearch\index\mapper\DocumentMapper.java,2243 +122,org.elasticsearch.xpack.security.action.realm.TransportClearRealmCacheAction,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\action\realm\TransportClearRealmCacheAction.java,1248 +123,org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService,server\src\main\java\org\elasticsearch\cluster\metadata\MetaDataIndexUpgradeService.java,1942 +124,org.elasticsearch.cluster.metadata.MetaDataUpdateSettingsService,server\src\main\java\org\elasticsearch\cluster\metadata\MetaDataUpdateSettingsService.java,2579 +125,org.elasticsearch.action.termvectors.TermVectorsFields,server\src\main\java\org\elasticsearch\action\termvectors\TermVectorsFields.java,1712 +126,org.elasticsearch.xpack.core.security.user.User,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\user\User.java,757 +127,org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider,server\src\main\java\org\elasticsearch\cluster\routing\allocation\decider\SameShardAllocationDecider.java,1293 +128,org.elasticsearch.client.indices.CreateIndexRequest,client\rest-high-level\src\main\java\org\elasticsearch\client\indices\CreateIndexRequest.java,2128 +129,org.elasticsearch.action.admin.indices.alias.Alias,server\src\main\java\org\elasticsearch\action\admin\indices\alias\Alias.java,1719 +130,org.elasticsearch.xpack.security.transport.nio.SSLDriver,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\transport\nio\SSLDriver.java,770 +131,org.elasticsearch.xpack.watcher.execution.ExecutionService,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\execution\ExecutionService.java,4188 +132,org.elasticsearch.node.Node,server\src\main\java\org\elasticsearch\node\Node.java,8851 +133,org.elasticsearch.cluster.metadata.IndexMetaData,server\src\main\java\org\elasticsearch\cluster\metadata\IndexMetaData.java,3657 +134,org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\persistence\JobDataCountsPersister.java,1256 +135,org.elasticsearch.xpack.core.ml.job.results.Bucket,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\job\results\Bucket.java,1126 +136,org.elasticsearch.env.NodeRepurposeCommand,server\src\main\java\org\elasticsearch\env\NodeRepurposeCommand.java,1662 +137,org.elasticsearch.xpack.core.dataframe.transforms.pivot.DateHistogramGroupSource,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\dataframe\transforms\pivot\DateHistogramGroupSource.java,1203 +138,org.elasticsearch.xpack.ccr.CcrLicenseChecker,x-pack\plugin\ccr\src\main\java\org\elasticsearch\xpack\ccr\CcrLicenseChecker.java,2941 +139,org.elasticsearch.index.shard.RemoveCorruptedLuceneSegmentsAction,server\src\main\java\org\elasticsearch\index\shard\RemoveCorruptedLuceneSegmentsAction.java,1144 +140,org.elasticsearch.xpack.core.ml.job.config.Job,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\job\config\Job.java,1877 +141,org.elasticsearch.xpack.sql.analysis.index.IndexResolver,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\analysis\index\IndexResolver.java,2616 +142,org.elasticsearch.license.PutLicenseRequest,x-pack\plugin\core\src\main\java\org\elasticsearch\license\PutLicenseRequest.java,712 +143,org.elasticsearch.index.store.Store,server\src\main\java\org\elasticsearch\index\store\Store.java,4799 +144,org.elasticsearch.xpack.ml.job.persistence.JobConfigProvider,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\persistence\JobConfigProvider.java,4023 +145,org.elasticsearch.action.support.ActiveShardCount,server\src\main\java\org\elasticsearch\action\support\ActiveShardCount.java,1407 +146,org.elasticsearch.action.index.IndexRequest,server\src\main\java\org\elasticsearch\action\index\IndexRequest.java,2652 +147,org.elasticsearch.index.reindex.ReindexRequest,server\src\main\java\org\elasticsearch\index\reindex\ReindexRequest.java,1743 +148,org.elasticsearch.index.reindex.UpdateByQueryRequest,server\src\main\java\org\elasticsearch\index\reindex\UpdateByQueryRequest.java,1338 +149,org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService,server\src\main\java\org\elasticsearch\indices\breaker\HierarchyCircuitBreakerService.java,1752 +150,org.elasticsearch.common.util.concurrent.ThreadContext,server\src\main\java\org\elasticsearch\common\util\concurrent\ThreadContext.java,2224 +151,org.elasticsearch.xpack.watcher.trigger.schedule.support.MonthTimes,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\trigger\schedule\support\MonthTimes.java,922 +152,org.elasticsearch.search.internal.SearchContext,server\src\main\java\org\elasticsearch\search\internal\SearchContext.java,3267 +153,org.elasticsearch.cluster.routing.ShardRouting,server\src\main\java\org\elasticsearch\cluster\routing\ShardRouting.java,1572 +154,org.elasticsearch.common.text.Text,server\src\main\java\org\elasticsearch\common\text\Text.java,1164 +155,org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\job\results\AnomalyRecord.java,1277 +156,org.elasticsearch.index.shard.ShardId,server\src\main\java\org\elasticsearch\index\shard\ShardId.java,1238 +157,org.elasticsearch.xpack.monitoring.exporter.Exporters,x-pack\plugin\monitoring\src\main\java\org\elasticsearch\xpack\monitoring\exporter\Exporters.java,1725 +158,org.elasticsearch.cluster.routing.RecoverySource,server\src\main\java\org\elasticsearch\cluster\routing\RecoverySource.java,1303 +159,org.elasticsearch.common.inject.internal.Errors,server\src\main\java\org\elasticsearch\common\inject\internal\Errors.java,1963 +160,org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkRequest,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\monitoring\action\MonitoringBulkRequest.java,968 +161,org.elasticsearch.index.mapper.MapperService,server\src\main\java\org\elasticsearch\index\mapper\MapperService.java,3218 +162,org.elasticsearch.cluster.SnapshotsInProgress,server\src\main\java\org\elasticsearch\cluster\SnapshotsInProgress.java,1786 +163,org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\job\process\autodetect\state\ModelSizeStats.java,1121 +164,org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\watcher\support\xcontent\XContentSource.java,1167 +165,org.elasticsearch.index.query.QueryShardContext,server\src\main\java\org\elasticsearch\index\query\QueryShardContext.java,2969 +166,org.elasticsearch.xpack.core.ssl.SSLService,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ssl\SSLService.java,2116 +167,org.elasticsearch.rest.RestStatus,server\src\main\java\org\elasticsearch\rest\RestStatus.java,1129 +168,org.elasticsearch.script.ScriptType,server\src\main\java\org\elasticsearch\script\ScriptType.java,1088 +169,org.elasticsearch.cluster.service.ClusterApplierService,server\src\main\java\org\elasticsearch\cluster\service\ClusterApplierService.java,3103 +170,org.elasticsearch.cluster.ClusterState,server\src\main\java\org\elasticsearch\cluster\ClusterState.java,3373 +171,org.elasticsearch.xpack.watcher.notification.email.EmailService,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\notification\email\EmailService.java,1062 +172,org.elasticsearch.index.IndexSettings,server\src\main\java\org\elasticsearch\index\IndexSettings.java,1743 +173,org.elasticsearch.action.bulk.BulkRequest,server\src\main\java\org\elasticsearch\action\bulk\BulkRequest.java,2145 +174,org.elasticsearch.xpack.sql.expression.Attribute,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\expression\Attribute.java,625 +175,org.elasticsearch.xpack.watcher.trigger.schedule.support.DayTimes,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\trigger\schedule\support\DayTimes.java,794 +176,org.elasticsearch.index.mapper.ParseContext,server\src\main\java\org\elasticsearch\index\mapper\ParseContext.java,1408 +177,org.elasticsearch.index.mapper.RootObjectMapper,server\src\main\java\org\elasticsearch\index\mapper\RootObjectMapper.java,1599 +178,org.elasticsearch.xpack.ml.job.process.normalizer.output.NormalizerResultHandler,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\process\normalizer\output\NormalizerResultHandler.java,1013 +179,org.elasticsearch.cluster.metadata.IndexGraveyard,server\src\main\java\org\elasticsearch\cluster\metadata\IndexGraveyard.java,1883 +180,org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectRealm,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\authc\oidc\OpenIdConnectRealm.java,4822 +181,org.elasticsearch.xpack.core.security.authc.Authentication,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\authc\Authentication.java,1008 +182,org.elasticsearch.gateway.TransportNodesListGatewayMetaState,server\src\main\java\org\elasticsearch\gateway\TransportNodesListGatewayMetaState.java,1973 +183,org.elasticsearch.action.admin.indices.validate.query.TransportValidateQueryAction,server\src\main\java\org\elasticsearch\action\admin\indices\validate\query\TransportValidateQueryAction.java,2745 +184,org.elasticsearch.common.inject.spi.Message,server\src\main\java\org\elasticsearch\common\inject\spi\Message.java,890 +185,org.elasticsearch.transport.RemoteClusterService,server\src\main\java\org\elasticsearch\transport\RemoteClusterService.java,2418 +186,org.elasticsearch.cluster.routing.UnassignedInfo,server\src\main\java\org\elasticsearch\cluster\routing\UnassignedInfo.java,1767 +187,org.elasticsearch.painless.node.SFunction,modules\lang-painless\src\main\java\org\elasticsearch\painless\node\SFunction.java,1709 +188,org.elasticsearch.cluster.block.ClusterBlocks,server\src\main\java\org\elasticsearch\cluster\block\ClusterBlocks.java,1742 +189,org.elasticsearch.index.VersionType,server\src\main\java\org\elasticsearch\index\VersionType.java,1095 +190,org.elasticsearch.example.CustomAuthorizationEngine,plugins\examples\security-authorization-engine\src\main\java\org\elasticsearch\example\CustomAuthorizationEngine.java,2576 +191,org.elasticsearch.xpack.ml.action.TransportGetDatafeedsAction,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\action\TransportGetDatafeedsAction.java,1515 +192,org.elasticsearch.search.aggregations.LeafBucketCollector,server\src\main\java\org\elasticsearch\search\aggregations\LeafBucketCollector.java,1028 +193,org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider,server\src\main\java\org\elasticsearch\cluster\routing\allocation\decider\FilterAllocationDecider.java,1711 +194,org.elasticsearch.client.ParentTaskAssigningClient,server\src\main\java\org\elasticsearch\client\ParentTaskAssigningClient.java,1138 +195,org.elasticsearch.xpack.core.security.action.CreateApiKeyRequest,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\action\CreateApiKeyRequest.java,959 +196,org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\common\http\HttpRequestTemplate.java,1335 +197,org.elasticsearch.client.security.support.expressiondsl.parser.RoleMapperExpressionParser,client\rest-high-level\src\main\java\org\elasticsearch\client\security\support\expressiondsl\parser\RoleMapperExpressionParser.java,1753 +198,org.elasticsearch.xpack.core.watcher.transform.chain.ExecutableChainTransform,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\watcher\transform\chain\ExecutableChainTransform.java,1030 +199,org.elasticsearch.repositories.RepositoryData,server\src\main\java\org\elasticsearch\repositories\RepositoryData.java,1623 +200,org.elasticsearch.xpack.sql.querydsl.container.Sort,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\querydsl\container\Sort.java,483 +201,org.elasticsearch.index.analysis.AnalysisRegistry,server\src\main\java\org\elasticsearch\index\analysis\AnalysisRegistry.java,1813 +202,org.elasticsearch.common.settings.Settings,server\src\main\java\org\elasticsearch\common\settings\Settings.java,3108 +203,org.elasticsearch.env.Environment,server\src\main\java\org\elasticsearch\env\Environment.java,1544 +204,org.elasticsearch.search.SearchHits,server\src\main\java\org\elasticsearch\search\SearchHits.java,1703 +205,org.elasticsearch.common.inject.internal.ProviderMethodsModule,server\src\main\java\org\elasticsearch\common\inject\internal\ProviderMethodsModule.java,1382 +206,org.elasticsearch.painless.lookup.PainlessMethod,modules\lang-painless\src\main\java\org\elasticsearch\painless\lookup\PainlessMethod.java,1021 +207,org.elasticsearch.nio.InboundChannelBuffer,libs\nio\src\main\java\org\elasticsearch\nio\InboundChannelBuffer.java,1101 +208,org.elasticsearch.index.search.MatchQuery,server\src\main\java\org\elasticsearch\index\search\MatchQuery.java,3227 +209,org.elasticsearch.xpack.watcher.WatcherLifeCycleService,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\WatcherLifeCycleService.java,1557 +210,org.elasticsearch.search.aggregations.bucket.BucketsAggregator,server\src\main\java\org\elasticsearch\search\aggregations\bucket\BucketsAggregator.java,1641 +211,org.elasticsearch.xpack.security.authc.saml.SamlMetadataCommand,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\authc\saml\SamlMetadataCommand.java,2820 +212,org.elasticsearch.index.reindex.BulkIndexByScrollResponseContentListener,modules\reindex\src\main\java\org\elasticsearch\index\reindex\BulkIndexByScrollResponseContentListener.java,1383 +213,org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider,server\src\main\java\org\elasticsearch\cluster\routing\allocation\decider\NodeVersionAllocationDecider.java,1226 +214,org.elasticsearch.xpack.sql.cli.command.AbstractServerCliCommand,x-pack\plugin\sql\sql-cli\src\main\java\org\elasticsearch\xpack\sql\cli\command\AbstractServerCliCommand.java,345 +215,org.elasticsearch.painless.antlr.Walker,modules\lang-painless\src\main\java\org\elasticsearch\painless\antlr\Walker.java,9558 +216,org.elasticsearch.common.unit.DistanceUnit,server\src\main\java\org\elasticsearch\common\unit\DistanceUnit.java,1070 +217,org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\calendars\ScheduledEvent.java,1511 +218,org.elasticsearch.common.io.stream.StreamOutput,server\src\main\java\org\elasticsearch\common\io\stream\StreamOutput.java,2926 +219,org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\persistence\JobResultsProvider.java,7275 +220,org.elasticsearch.ingest.CompoundProcessor,server\src\main\java\org\elasticsearch\ingest\CompoundProcessor.java,1165 +221,org.elasticsearch.common.geo.GeoPoint,server\src\main\java\org\elasticsearch\common\geo\GeoPoint.java,1465 +222,org.elasticsearch.xpack.sql.querydsl.container.QueryContainer,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\querydsl\container\QueryContainer.java,2471 +223,org.elasticsearch.action.admin.indices.upgrade.post.TransportUpgradeAction,server\src\main\java\org\elasticsearch\action\admin\indices\upgrade\post\TransportUpgradeAction.java,2374 +224,org.elasticsearch.indices.recovery.RecoveriesCollection,server\src\main\java\org\elasticsearch\indices\recovery\RecoveriesCollection.java,1611 +225,org.elasticsearch.ingest.IngestDocument,server\src\main\java\org\elasticsearch\ingest\IngestDocument.java,1668 +226,org.elasticsearch.common.util.concurrent.CountDown,server\src\main\java\org\elasticsearch\common\util\concurrent\CountDown.java,937 +227,org.elasticsearch.protocol.xpack.graph.Vertex,x-pack\plugin\core\src\main\java\org\elasticsearch\protocol\xpack\graph\Vertex.java,932 +228,org.elasticsearch.xpack.core.ml.job.results.CategoryDefinition,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\job\results\CategoryDefinition.java,961 +229,org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionParser,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\authc\support\mapper\expressiondsl\ExpressionParser.java,920 +230,org.elasticsearch.xpack.security.action.saml.TransportSamlPrepareAuthenticationAction,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\action\saml\TransportSamlPrepareAuthenticationAction.java,1326 +231,org.elasticsearch.geo.geometry.LinearRing,libs\geo\src\main\java\org\elasticsearch\geo\geometry\LinearRing.java,831 +232,org.elasticsearch.xpack.watcher.notification.email.Profile,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\notification\email\Profile.java,637 +233,org.elasticsearch.index.engine.Segment,server\src\main\java\org\elasticsearch\index\engine\Segment.java,1677 +234,org.elasticsearch.index.reindex.DeleteByQueryRequest,server\src\main\java\org\elasticsearch\index\reindex\DeleteByQueryRequest.java,1424 +235,org.elasticsearch.xpack.ml.filestructurefinder.FileStructureFinderManager,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\filestructurefinder\FileStructureFinderManager.java,1131 +236,org.elasticsearch.xpack.security.authc.saml.SamlRealm,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\authc\saml\SamlRealm.java,7589 +237,org.elasticsearch.common.lucene.search.MoreLikeThisQuery,server\src\main\java\org\elasticsearch\common\lucene\search\MoreLikeThisQuery.java,1799 +238,org.elasticsearch.cluster.routing.RoutingNodes,server\src\main\java\org\elasticsearch\cluster\routing\RoutingNodes.java,1984 +239,org.elasticsearch.protocol.xpack.graph.GraphExploreRequest,x-pack\plugin\core\src\main\java\org\elasticsearch\protocol\xpack\graph\GraphExploreRequest.java,1302 +240,org.elasticsearch.cluster.routing.RoutingNode,server\src\main\java\org\elasticsearch\cluster\routing\RoutingNode.java,1117 +241,org.elasticsearch.index.store.StoreFileMetaData,server\src\main\java\org\elasticsearch\index\store\StoreFileMetaData.java,1162 +242,org.elasticsearch.xpack.ccr.repository.CcrRepository,x-pack\plugin\ccr\src\main\java\org\elasticsearch\xpack\ccr\repository\CcrRepository.java,5176 +243,org.elasticsearch.index.shard.ShardPath,server\src\main\java\org\elasticsearch\index\shard\ShardPath.java,1392 +244,org.elasticsearch.persistent.PersistentTasksClusterService,server\src\main\java\org\elasticsearch\persistent\PersistentTasksClusterService.java,2057 +245,org.elasticsearch.action.bulk.Retry,server\src\main\java\org\elasticsearch\action\bulk\Retry.java,1421 +246,org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplanation,server\src\main\java\org\elasticsearch\action\admin\cluster\allocation\ClusterAllocationExplanation.java,1832 +247,org.elasticsearch.xpack.security.action.saml.TransportSamlInvalidateSessionAction,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\action\saml\TransportSamlInvalidateSessionAction.java,1839 +248,org.elasticsearch.action.ActionResponse,server\src\main\java\org\elasticsearch\action\ActionResponse.java,1020 +249,org.elasticsearch.action.bulk.BulkProcessor,server\src\main\java\org\elasticsearch\action\bulk\BulkProcessor.java,1887 +250,org.elasticsearch.xpack.sql.client.ConnectionConfiguration,x-pack\plugin\sql\sql-client\src\main\java\org\elasticsearch\xpack\sql\client\ConnectionConfiguration.java,660 +251,org.elasticsearch.env.NodeEnvironment,server\src\main\java\org\elasticsearch\env\NodeEnvironment.java,3476 +252,org.elasticsearch.tasks.Task,server\src\main\java\org\elasticsearch\tasks\Task.java,1148 +253,org.elasticsearch.transport.ConnectionProfile,server\src\main\java\org\elasticsearch\transport\ConnectionProfile.java,1253 +254,org.elasticsearch.indices.recovery.PeerRecoveryTargetService,server\src\main\java\org\elasticsearch\indices\recovery\PeerRecoveryTargetService.java,3603 +255,org.elasticsearch.cluster.ClusterInfo,server\src\main\java\org\elasticsearch\cluster\ClusterInfo.java,1383 +256,org.elasticsearch.xpack.sql.plan.logical.LogicalPlan,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\plan\logical\LogicalPlan.java,538 +257,org.elasticsearch.action.support.IndicesOptions,server\src\main\java\org\elasticsearch\action\support\IndicesOptions.java,1528 +258,org.elasticsearch.cluster.DiskUsage,server\src\main\java\org\elasticsearch\cluster\DiskUsage.java,1258 +259,org.elasticsearch.xpack.monitoring.exporter.ExportBulk,x-pack\plugin\monitoring\src\main\java\org\elasticsearch\xpack\monitoring\exporter\ExportBulk.java,753 +260,org.elasticsearch.cluster.routing.allocation.AllocationService,server\src\main\java\org\elasticsearch\cluster\routing\allocation\AllocationService.java,2570 +261,org.elasticsearch.common.blobstore.BlobPath,server\src\main\java\org\elasticsearch\common\blobstore\BlobPath.java,944 +262,org.elasticsearch.xpack.core.watcher.history.WatchRecord,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\watcher\history\WatchRecord.java,1545 +263,org.elasticsearch.xpack.core.ml.job.results.AnomalyCause,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\job\results\AnomalyCause.java,755 +264,org.elasticsearch.client.indices.PutIndexTemplateRequest,client\rest-high-level\src\main\java\org\elasticsearch\client\indices\PutIndexTemplateRequest.java,2444 +265,org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\common\text\TextTemplateEngine.java,628 +266,org.elasticsearch.xpack.watcher.common.text.TextTemplate,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\common\text\TextTemplate.java,749 +267,org.elasticsearch.common.blobstore.fs.FsBlobStore,server\src\main\java\org\elasticsearch\common\blobstore\fs\FsBlobStore.java,1288 +268,org.elasticsearch.snapshots.Snapshot,server\src\main\java\org\elasticsearch\snapshots\Snapshot.java,1048 +269,org.elasticsearch.common.xcontent.NamedXContentRegistry,libs\x-content\src\main\java\org\elasticsearch\common\xcontent\NamedXContentRegistry.java,1229 +270,org.elasticsearch.rest.BaseRestHandler,server\src\main\java\org\elasticsearch\rest\BaseRestHandler.java,1756 +271,org.elasticsearch.search.SearchHit,server\src\main\java\org\elasticsearch\search\SearchHit.java,3591 +272,org.elasticsearch.cluster.node.DiscoveryNodes,server\src\main\java\org\elasticsearch\cluster\node\DiscoveryNodes.java,1722 +273,org.elasticsearch.script.mustache.MultiSearchTemplateRequest,modules\lang-mustache\src\main\java\org\elasticsearch\script\mustache\MultiSearchTemplateRequest.java,1721 +274,org.elasticsearch.transport.TransportService,server\src\main\java\org\elasticsearch\transport\TransportService.java,3049 +275,org.elasticsearch.indices.IndicesService,server\src\main\java\org\elasticsearch\indices\IndicesService.java,8097 +276,org.elasticsearch.analysis.common.MinHashTokenFilterFactory,modules\analysis-common\src\main\java\org\elasticsearch\analysis\common\MinHashTokenFilterFactory.java,1202 +277,org.elasticsearch.gateway.GatewayMetaState,server\src\main\java\org\elasticsearch\gateway\GatewayMetaState.java,2698 +278,org.elasticsearch.cluster.metadata.Manifest,server\src\main\java\org\elasticsearch\cluster\metadata\Manifest.java,1488 +279,org.elasticsearch.xpack.security.action.oidc.TransportOpenIdConnectPrepareAuthenticationAction,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\action\oidc\TransportOpenIdConnectPrepareAuthenticationAction.java,1345 +280,org.elasticsearch.common.logging.DeprecationLogger,server\src\main\java\org\elasticsearch\common\logging\DeprecationLogger.java,1535 +281,org.elasticsearch.index.get.GetResult,server\src\main\java\org\elasticsearch\index\get\GetResult.java,2152 +282,org.elasticsearch.search.fetch.FetchPhase,server\src\main\java\org\elasticsearch\search\fetch\FetchPhase.java,3005 +283,org.elasticsearch.cluster.routing.allocation.RoutingExplanations,server\src\main\java\org\elasticsearch\cluster\routing\allocation\RoutingExplanations.java,1349 +284,org.elasticsearch.common.ParseField,libs\x-content\src\main\java\org\elasticsearch\common\ParseField.java,991 +285,org.elasticsearch.transport.TransportRequest,server\src\main\java\org\elasticsearch\transport\TransportRequest.java,1057 +286,org.elasticsearch.common.unit.TimeValue,libs\core\src\main\java\org\elasticsearch\common\unit\TimeValue.java,920 +287,org.elasticsearch.tasks.TaskResultsService,server\src\main\java\org\elasticsearch\tasks\TaskResultsService.java,2803 +288,org.elasticsearch.cluster.routing.IndexShardRoutingTable,server\src\main\java\org\elasticsearch\cluster\routing\IndexShardRoutingTable.java,1730 +289,org.elasticsearch.search.aggregations.support.AggregationPath,server\src\main\java\org\elasticsearch\search\aggregations\support\AggregationPath.java,1586 +290,org.elasticsearch.client.indices.rollover.RolloverRequest,client\rest-high-level\src\main\java\org\elasticsearch\client\indices\rollover\RolloverRequest.java,1526 +291,org.elasticsearch.painless.MethodWriter,modules\lang-painless\src\main\java\org\elasticsearch\painless\MethodWriter.java,5716 +292,org.elasticsearch.xpack.watcher.notification.email.Account,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\notification\email\Account.java,1257 +293,org.elasticsearch.xpack.sql.querydsl.agg.GroupByKey,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\querydsl\agg\GroupByKey.java,670 +294,org.elasticsearch.index.fieldvisitor.FieldsVisitor,server\src\main\java\org\elasticsearch\index\fieldvisitor\FieldsVisitor.java,1888 +295,org.elasticsearch.cluster.routing.OperationRouting,server\src\main\java\org\elasticsearch\cluster\routing\OperationRouting.java,1693 +296,org.elasticsearch.xpack.sql.querydsl.agg.Aggs,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\querydsl\agg\Aggs.java,1188 +297,org.elasticsearch.action.admin.indices.stats.CommonStatsFlags,server\src\main\java\org\elasticsearch\action\admin\indices\stats\CommonStatsFlags.java,1129 diff --git a/elasticsearch/elasticsearch b/elasticsearch/elasticsearch new file mode 160000 index 00000000..349d2ce1 --- /dev/null +++ b/elasticsearch/elasticsearch @@ -0,0 +1 @@ +Subproject commit 349d2ce153d4be78bf0d8ae2472bfc88e18530c1 diff --git a/elasticsearch/log b/elasticsearch/log new file mode 100755 index 00000000..83751d56 --- /dev/null +++ b/elasticsearch/log @@ -0,0 +1,26 @@ +2019-05-27 23:52:47,606 [INFO] Project C:/Dev/MoveMethodGenerator/../MoveMethodDataset/elasticsearch/elasticsearch is opened +2019-05-27 23:54:52,887 [INFO] Total number of java files: 11094 +2019-05-27 23:54:52,887 [INFO] Total number of source java files: 11091 +2019-05-27 23:54:52,887 [INFO] Total number of classes: 4760 +2019-05-27 23:54:52,887 [INFO] Total number of methods: 39829 +2019-05-27 23:54:52,887 [INFO] StaticMethodsFilter filtered: 5781 +2019-05-27 23:54:52,887 [INFO] ConstructorsFilter filtered: 5795 +2019-05-27 23:54:52,887 [INFO] AbstractMethodsFilter filtered: 536 +2019-05-27 23:54:52,887 [INFO] GettersFilter filtered: 6040 +2019-05-27 23:54:52,887 [INFO] SettersFilter filtered: 744 +2019-05-27 23:54:52,887 [INFO] EmptyMethodsFilter filtered: 952 +2019-05-27 23:54:52,889 [INFO] ExceptionsThrowersFilter filtered: 411 +2019-05-27 23:54:52,889 [INFO] SingleMethodFilter filtered: 758 +2019-05-27 23:54:52,889 [INFO] SimpleDelegationsFilter filtered: 5997 +2019-05-27 23:54:52,889 [INFO] PrivateMethodsCallersFilter filtered: 3470 +2019-05-27 23:54:52,889 [INFO] PrivateFieldAccessorsFilter filtered: 4801 +2019-05-27 23:54:52,889 [INFO] OverridingMethodsFilter filtered: 3154 +2019-05-27 23:54:52,889 [INFO] OverriddenMethodsFilter filtered: 77 +2019-05-27 23:54:52,889 [INFO] MethodCallWithSuperFilter filtered: 3 +2019-05-27 23:54:52,889 [INFO] PrivateClassUserFilter filtered: 8 +2019-05-27 23:54:52,889 [INFO] GenericTypeUserFilter filtered: 0 +2019-05-27 23:54:52,889 [INFO] NoTargetsMethodsFilter filtered: 1017 +2019-05-27 23:54:52,889 [INFO] Number of methods after filtration: 285 +2019-05-27 23:56:51,454 [INFO] Project C:/Dev/MoveMethodGenerator/../MoveMethodDataset/elasticsearch/elasticsearch is opened +2019-05-27 23:59:28,125 [INFO] 344 potential moves found +2019-05-27 23:59:28,125 [INFO] 104 moves performed diff --git a/elasticsearch/methods.csv b/elasticsearch/methods.csv new file mode 100755 index 00000000..f4a26569 --- /dev/null +++ b/elasticsearch/methods.csv @@ -0,0 +1,286 @@ +id,name,file,offset,containing_class_id,target_ids +0,org.elasticsearch.geo.geometry.Polygon.checkRing,libs\geo\src\main\java\org\elasticsearch\geo\geometry\Polygon.java,2357,9,231 +1,org.elasticsearch.client.ml.datafeed.DatafeedUpdate.addOptionalField,client\rest-high-level\src\main\java\org\elasticsearch\client\ml\datafeed\DatafeedUpdate.java,7389,75,284 +2,org.elasticsearch.client.indices.rollover.RolloverRequest.addMaxIndexAgeCondition,client\rest-high-level\src\main\java\org\elasticsearch\client\indices\rollover\RolloverRequest.java,2941,290,286 +3,org.elasticsearch.client.indices.rollover.RolloverRequest.addMaxIndexSizeCondition,client\rest-high-level\src\main\java\org\elasticsearch\client\indices\rollover\RolloverRequest.java,3964,290,41 +4,org.elasticsearch.client.indices.CreateIndexRequest.aliases,client\rest-high-level\src\main\java\org\elasticsearch\client\indices\CreateIndexRequest.java,7923,128,105 27 +5,org.elasticsearch.client.indices.CreateIndexRequest.alias,client\rest-high-level\src\main\java\org\elasticsearch\client\indices\CreateIndexRequest.java,8727,128,129 +6,org.elasticsearch.client.indices.CreateIndexRequest.source,client\rest-high-level\src\main\java\org\elasticsearch\client\indices\CreateIndexRequest.java,9785,128,105 27 +7,org.elasticsearch.client.indices.PutIndexTemplateRequest.aliases,client\rest-high-level\src\main\java\org\elasticsearch\client\indices\PutIndexTemplateRequest.java,13399,264,27 +8,org.elasticsearch.client.indices.PutIndexTemplateRequest.alias,client\rest-high-level\src\main\java\org\elasticsearch\client\indices\PutIndexTemplateRequest.java,14170,264,129 +9,org.elasticsearch.client.security.support.expressiondsl.parser.RoleMapperExpressionParser.parseArray,client\rest-high-level\src\main\java\org\elasticsearch\client\security\support\expressiondsl\parser\RoleMapperExpressionParser.java,7646,197,284 +10,org.elasticsearch.env.NodeEnvironment.isShardLocked,server\src\main\java\org\elasticsearch\env\NodeEnvironment.java,21161,251,156 +11,org.elasticsearch.env.NodeEnvironment.deleteIndexDirectorySafe,server\src\main\java\org\elasticsearch\env\NodeEnvironment.java,21411,251,172 30 +12,org.elasticsearch.env.NodeRepurposeCommand.outputHowToSeeVerboseInformation,server\src\main\java\org\elasticsearch\env\NodeRepurposeCommand.java,6977,136,54 +13,org.elasticsearch.env.NodeRepurposeCommand.rewriteManifest,server\src\main\java\org\elasticsearch\env\NodeRepurposeCommand.java,8323,136,278 54 +14,org.elasticsearch.env.NodeRepurposeCommand.removePaths,server\src\main\java\org\elasticsearch\env\NodeRepurposeCommand.java,9181,136,54 +15,org.elasticsearch.node.Node.writePortsFile,server\src\main\java\org\elasticsearch\node\Node.java,52531,132,2 +16,org.elasticsearch.rest.RestController.registerAsDeprecatedHandler,server\src\main\java\org\elasticsearch\rest\RestController.java,3789,113,280 +17,org.elasticsearch.rest.RestController.registerWithDeprecatedHandler,server\src\main\java\org\elasticsearch\rest\RestController.java,4619,113,280 +18,org.elasticsearch.rest.RestController.checkErrorTraceParameter,server\src\main\java\org\elasticsearch\rest\RestController.java,14702,113,84 +19,org.elasticsearch.rest.RestController.handleOptionsRequest,server\src\main\java\org\elasticsearch\rest\RestController.java,18531,113,84 +20,org.elasticsearch.rest.RestController.handleBadRequest,server\src\main\java\org\elasticsearch\rest\RestController.java,19792,113,84 +21,org.elasticsearch.rest.RestController.handleFavicon,server\src\main\java\org\elasticsearch\rest\RestController.java,21186,113,84 +22,org.elasticsearch.rest.BaseRestHandler.unrecognized,server\src\main\java\org\elasticsearch\rest\BaseRestHandler.java,4936,270,84 +23,org.elasticsearch.index.get.GetResult.readFields,server\src\main\java\org\elasticsearch\index\get\GetResult.java,13788,281,50 +24,org.elasticsearch.index.get.GetResult.writeFields,server\src\main\java\org\elasticsearch\index\get\GetResult.java,16513,281,218 +25,org.elasticsearch.index.get.ShardGetService.normalizeFetchSourceContent,server\src\main\java\org\elasticsearch\index\get\ShardGetService.java,6543,8,120 +26,org.elasticsearch.index.query.QueryShardContext.getSearchAnalyzer,server\src\main\java\org\elasticsearch\index\query\QueryShardContext.java,9489,165,81 +27,org.elasticsearch.index.query.QueryShardContext.getSearchQuoteAnalyzer,server\src\main\java\org\elasticsearch\index\query\QueryShardContext.java,9913,165,81 +28,org.elasticsearch.index.seqno.GlobalCheckpointSyncAction.maybeSyncTranslog,server\src\main\java\org\elasticsearch\index\seqno\GlobalCheckpointSyncAction.java,5104,3,28 +29,org.elasticsearch.index.shard.IndexShard.prepareDelete,server\src\main\java\org\elasticsearch\index\shard\IndexShard.java,48529,28,189 +30,org.elasticsearch.index.shard.IndexShard.startRecovery,server\src\main\java\org\elasticsearch\index\shard\IndexShard.java,112053,28,254 77 275 69 +31,org.elasticsearch.index.shard.RemoveCorruptedShardDataCommand.warnAboutESShouldBeStopped,server\src\main\java\org\elasticsearch\index\shard\RemoveCorruptedShardDataCommand.java,11502,40,54 +32,org.elasticsearch.index.shard.RemoveCorruptedShardDataCommand.addNewHistoryCommit,server\src\main\java\org\elasticsearch\index\shard\RemoveCorruptedShardDataCommand.java,18831,40,54 +33,org.elasticsearch.index.shard.RemoveCorruptedShardDataCommand.getNodePath,server\src\main\java\org\elasticsearch\index\shard\RemoveCorruptedShardDataCommand.java,23392,40,243 +34,org.elasticsearch.index.shard.RemoveCorruptedLuceneSegmentsAction.getCleanStatus,server\src\main\java\org\elasticsearch\index\shard\RemoveCorruptedLuceneSegmentsAction.java,1251,139,243 +35,org.elasticsearch.index.shard.RemoveCorruptedLuceneSegmentsAction.execute,server\src\main\java\org\elasticsearch\index\shard\RemoveCorruptedLuceneSegmentsAction.java,2883,139,54 243 +36,org.elasticsearch.index.store.Store.createVerifyingOutput,server\src\main\java\org\elasticsearch\index\store\Store.java,23427,143,241 +37,org.elasticsearch.index.store.Store.openVerifyingInput,server\src\main\java\org\elasticsearch\index\store\Store.java,24544,143,241 +38,org.elasticsearch.index.engine.Segment.readSegmentSort,server\src\main\java\org\elasticsearch\index\engine\Segment.java,6143,233,50 +39,org.elasticsearch.index.engine.Segment.writeSegmentSort,server\src\main\java\org\elasticsearch\index\engine\Segment.java,8324,233,218 +40,org.elasticsearch.index.mapper.ObjectMapper.getParentObjectMapper,server\src\main\java\org\elasticsearch\index\mapper\ObjectMapper.java,15760,68,161 +41,org.elasticsearch.index.mapper.ObjectMapper.parentObjectMapperAreNested,server\src\main\java\org\elasticsearch\index\mapper\ObjectMapper.java,16296,68,161 +42,org.elasticsearch.index.mapper.MapperService.assertMappingVersion,server\src\main\java\org\elasticsearch\index\mapper\MapperService.java,12222,161,133 +43,org.elasticsearch.index.mapper.MapperService.assertSerialization,server\src\main\java\org\elasticsearch\index\mapper\MapperService.java,28389,161,121 +44,org.elasticsearch.index.mapper.DocumentMapper.typeFilter,server\src\main\java\org\elasticsearch\index\mapper\DocumentMapper.java,10271,121,165 +45,org.elasticsearch.index.mapper.DocumentMapper.findNestedObjectMapper,server\src\main\java\org\elasticsearch\index\mapper\DocumentMapper.java,11892,121,152 +46,org.elasticsearch.index.mapper.RootObjectMapper.findTemplateBuilder,server\src\main\java\org\elasticsearch\index\mapper\RootObjectMapper.java,11493,177,176 +47,org.elasticsearch.index.mapper.CompletionFieldMapper.isExternalValueOfClass,server\src\main\java\org\elasticsearch\index\mapper\CompletionFieldMapper.java,23583,89,176 +48,org.elasticsearch.index.mapper.CompletionFieldMapper.parse,server\src\main\java\org\elasticsearch\index\mapper\CompletionFieldMapper.java,23738,89,176 +49,org.elasticsearch.index.search.MatchQuery.hasPositions,server\src\main\java\org\elasticsearch\index\search\MatchQuery.java,12980,208,81 +50,org.elasticsearch.index.reindex.ReindexRequest.addSortField,server\src\main\java\org\elasticsearch\index\reindex\ReindexRequest.java,6593,147,36 +51,org.elasticsearch.index.reindex.ReindexRequest.setDestVersionType,server\src\main\java\org\elasticsearch\index\reindex\ReindexRequest.java,7605,147,189 +52,org.elasticsearch.index.reindex.DeleteByQueryRequest.setIndicesOptions,server\src\main\java\org\elasticsearch\index\reindex\DeleteByQueryRequest.java,4543,234,257 +53,org.elasticsearch.index.reindex.UpdateByQueryRequest.setIndicesOptions,server\src\main\java\org\elasticsearch\index\reindex\UpdateByQueryRequest.java,3938,148,257 +54,org.elasticsearch.index.analysis.NamedAnalyzer.checkAllowedInMode,server\src\main\java\org\elasticsearch\index\analysis\NamedAnalyzer.java,3210,15,45 +55,org.elasticsearch.index.analysis.AnalysisRegistry.build,server\src\main\java\org\elasticsearch\index\analysis\AnalysisRegistry.java,7755,201,172 +56,org.elasticsearch.tasks.Task.result,server\src\main\java\org\elasticsearch\tasks\Task.java,5987,252,0 248 +57,org.elasticsearch.tasks.TaskResultsService.getTaskResultMappingVersion,server\src\main\java\org\elasticsearch\tasks\TaskResultsService.java,6586,287,133 +58,org.elasticsearch.action.bulk.Retry.withBackoff,server\src\main\java\org\elasticsearch\action\bulk\Retry.java,2563,245,173 +59,org.elasticsearch.action.bulk.BulkProcessor.startFlushTask,server\src\main\java\org\elasticsearch\action\bulk\BulkProcessor.java,15131,249,286 +60,org.elasticsearch.action.admin.cluster.snapshots.get.TransportGetSnapshotsAction.buildSimpleSnapshotInfos,server\src\main\java\org\elasticsearch\action\admin\cluster\snapshots\get\TransportGetSnapshotsAction.java,7611,72,199 +61,org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplanation.unassignedInfoToXContent,server\src\main\java\org\elasticsearch\action\admin\cluster\allocation\ClusterAllocationExplanation.java,7730,246,186 +62,org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest.resolveVotingConfigExclusions,server\src\main\java\org\elasticsearch\action\admin\cluster\configuration\AddVotingConfigExclusionsRequest.java,3386,64,170 +63,org.elasticsearch.action.admin.indices.create.CreateIndexRequest.mapping,server\src\main\java\org\elasticsearch\action\admin\indices\create\CreateIndexRequest.java,7260,100,105 27 +64,org.elasticsearch.action.admin.indices.create.CreateIndexRequest.aliases,server\src\main\java\org\elasticsearch\action\admin\indices\create\CreateIndexRequest.java,10765,100,27 +65,org.elasticsearch.action.admin.indices.create.CreateIndexRequest.alias,server\src\main\java\org\elasticsearch\action\admin\indices\create\CreateIndexRequest.java,11519,100,129 +66,org.elasticsearch.action.admin.indices.create.CreateIndexRequest.source,server\src\main\java\org\elasticsearch\action\admin\indices\create\CreateIndexRequest.java,12658,100,105 27 +67,org.elasticsearch.action.admin.indices.shrink.ResizeRequest.setWaitForActiveShards,server\src\main\java\org\elasticsearch\action\admin\indices\shrink\ResizeRequest.java,5442,82,145 +68,org.elasticsearch.action.admin.indices.upgrade.post.TransportUpgradeAction.indicesWithMissingPrimaries,server\src\main\java\org\elasticsearch\action\admin\indices\upgrade\post\TransportUpgradeAction.java,8350,223,170 +69,org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest.mapping,server\src\main\java\org\elasticsearch\action\admin\indices\template\put\PutIndexTemplateRequest.java,8018,88,105 27 +70,org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest.aliases,server\src\main\java\org\elasticsearch\action\admin\indices\template\put\PutIndexTemplateRequest.java,15270,88,27 +71,org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest.alias,server\src\main\java\org\elasticsearch\action\admin\indices\template\put\PutIndexTemplateRequest.java,16029,88,129 +72,org.elasticsearch.action.admin.indices.validate.query.TransportValidateQueryAction.explain,server\src\main\java\org\elasticsearch\action\admin\indices\validate\query\TransportValidateQueryAction.java,10350,183,152 +73,org.elasticsearch.action.index.IndexRequest.source,server\src\main\java\org\elasticsearch\action\index\IndexRequest.java,12597,146,105 +74,org.elasticsearch.action.index.IndexRequest.source,server\src\main\java\org\elasticsearch\action\index\IndexRequest.java,14211,146,105 +75,org.elasticsearch.action.search.MultiSearchRequest.add,server\src\main\java\org\elasticsearch\action\search\MultiSearchRequest.java,2937,44,48 +76,org.elasticsearch.action.search.SearchPhaseController.resolveTrackTotalHits,server\src\main\java\org\elasticsearch\action\search\SearchPhaseController.java,36315,97,48 +77,org.elasticsearch.action.update.UpdateRequest.updateOrCreateScript,server\src\main\java\org\elasticsearch\action\update\UpdateRequest.java,13743,47,168 +78,org.elasticsearch.action.support.ActiveShardCount.enoughShardsActive,server\src\main\java\org\elasticsearch\action\support\ActiveShardCount.java,5314,145,170 +79,org.elasticsearch.action.termvectors.TermVectorsFields.readPotentiallyNegativeVInt,server\src\main\java\org\elasticsearch\action\termvectors\TermVectorsFields.java,18274,125,50 +80,org.elasticsearch.action.termvectors.TermVectorsFields.readPotentiallyNegativeVLong,server\src\main\java\org\elasticsearch\action\termvectors\TermVectorsFields.java,18623,125,50 +81,org.elasticsearch.action.termvectors.MultiTermVectorsRequest.add,server\src\main\java\org\elasticsearch\action\termvectors\MultiTermVectorsRequest.java,1893,6,70 +82,org.elasticsearch.common.io.stream.StreamOutput.writeBytesReference,server\src\main\java\org\elasticsearch\common\io\stream\StreamOutput.java,7224,218,27 +83,org.elasticsearch.common.io.stream.StreamOutput.writeOptionalBytesReference,server\src\main\java\org\elasticsearch\common\io\stream\StreamOutput.java,7547,218,27 +84,org.elasticsearch.common.io.stream.StreamOutput.writeOptionalText,server\src\main\java\org\elasticsearch\common\io\stream\StreamOutput.java,12447,218,154 +85,org.elasticsearch.common.io.stream.StreamOutput.writeGeoPoint,server\src\main\java\org\elasticsearch\common\io\stream\StreamOutput.java,39053,218,221 +86,org.elasticsearch.common.io.stream.StreamOutput.writeOptionalTimeValue,server\src\main\java\org\elasticsearch\common\io\stream\StreamOutput.java,43260,218,286 +87,org.elasticsearch.common.geo.GeoDistance.calculate,server\src\main\java\org\elasticsearch\common\geo\GeoDistance.java,2447,49,216 +88,org.elasticsearch.common.inject.internal.Errors.merge,server\src\main\java\org\elasticsearch\common\inject\internal\Errors.java,14884,159,184 +89,org.elasticsearch.common.inject.internal.ProviderMethodsModule.getKey,server\src\main\java\org\elasticsearch\common\inject\internal\ProviderMethodsModule.java,4824,205,159 +90,org.elasticsearch.common.lucene.search.MoreLikeThisQuery.createQuery,server\src\main\java\org\elasticsearch\common\lucene\search\MoreLikeThisQuery.java,5869,237,26 +91,org.elasticsearch.common.lucene.search.MoreLikeThisQuery.handleUnlike,server\src\main\java\org\elasticsearch\common\lucene\search\MoreLikeThisQuery.java,6875,237,26 +92,org.elasticsearch.common.compress.CompressedXContent.writeTo,server\src\main\java\org\elasticsearch\common\compress\CompressedXContent.java,5899,33,218 +93,org.elasticsearch.common.settings.Settings.getAsVersion,server\src\main\java\org\elasticsearch\common\settings\Settings.java,18575,202,92 +94,org.elasticsearch.common.blobstore.fs.FsBlobStore.buildPath,server\src\main\java\org\elasticsearch\common\blobstore\fs\FsBlobStore.java,2677,267,261 +95,org.elasticsearch.ingest.CompoundProcessor.putFailureMetadata,server\src\main\java\org\elasticsearch\ingest\CompoundProcessor.java,6588,220,225 61 +96,org.elasticsearch.ingest.CompoundProcessor.removeFailureMetadata,server\src\main\java\org\elasticsearch\ingest\CompoundProcessor.java,7385,220,225 +97,org.elasticsearch.search.fetch.subphase.highlight.UnifiedHighlighter.getOffsetSource,server\src\main\java\org\elasticsearch\search\fetch\subphase\highlight\UnifiedHighlighter.java,10611,31,81 +98,org.elasticsearch.search.fetch.FetchPhase.findRootDocumentIfNested,server\src\main\java\org\elasticsearch\search\fetch\FetchPhase.java,8851,282,152 +99,org.elasticsearch.search.fetch.FetchPhase.getInternalNestedIdentity,server\src\main\java\org\elasticsearch\search\fetch\FetchPhase.java,17976,282,152 161 +100,org.elasticsearch.search.fetch.FetchPhase.loadStoredFields,server\src\main\java\org\elasticsearch\search\fetch\FetchPhase.java,22026,282,152 294 +101,org.elasticsearch.search.fetch.FetchSearchResult.assertNoSearchTarget,server\src\main\java\org\elasticsearch\search\fetch\FetchSearchResult.java,2022,103,204 +102,org.elasticsearch.search.internal.ShardSearchLocalRequest.innerWriteTo,server\src\main\java\org\elasticsearch\search\internal\ShardSearchLocalRequest.java,5640,37,218 +103,org.elasticsearch.search.aggregations.bucket.BucketsAggregator.collectBucket,server\src\main\java\org\elasticsearch\search\aggregations\bucket\BucketsAggregator.java,2817,210,192 +104,org.elasticsearch.search.aggregations.bucket.BucketsAggregator.collectExistingBucket,server\src\main\java\org\elasticsearch\search\aggregations\bucket\BucketsAggregator.java,3148,210,192 +105,org.elasticsearch.search.aggregations.support.AggregationPath.resolveAggregator,server\src\main\java\org\elasticsearch\search\aggregations\support\AggregationPath.java,10910,289,35 +106,org.elasticsearch.search.aggregations.support.AggregationPath.resolveTopmostAggregator,server\src\main\java\org\elasticsearch\search\aggregations\support\AggregationPath.java,11862,289,35 +107,org.elasticsearch.search.aggregations.support.AggregationPath.validate,server\src\main\java\org\elasticsearch\search\aggregations\support\AggregationPath.java,12596,289,35 +108,org.elasticsearch.search.SearchModule.setupHighlighters,server\src\main\java\org\elasticsearch\search\SearchModule.java,41898,4,202 +109,org.elasticsearch.search.SearchService.validateKeepAlives,server\src\main\java\org\elasticsearch\search\SearchService.java,11497,65,286 +110,org.elasticsearch.search.SearchService.fetchPhaseShouldFreeContext,server\src\main\java\org\elasticsearch\search\SearchService.java,20785,65,152 +111,org.elasticsearch.search.SearchService.cleanContext,server\src\main\java\org\elasticsearch\search\SearchService.java,31186,65,152 +112,org.elasticsearch.search.SearchService.shortcutDocIdsToLoad,server\src\main\java\org\elasticsearch\search\SearchService.java,40778,65,152 +113,org.elasticsearch.cluster.node.DiscoveryNodes.nodeExists,server\src\main\java\org\elasticsearch\cluster\node\DiscoveryNodes.java,7090,272,0 +114,org.elasticsearch.cluster.node.DiscoveryNodes.findByAddress,server\src\main\java\org\elasticsearch\cluster\node\DiscoveryNodes.java,8248,272,38 +115,org.elasticsearch.cluster.node.DiscoveryNodes.newNode,server\src\main\java\org\elasticsearch\cluster\node\DiscoveryNodes.java,15463,272,0 +116,org.elasticsearch.cluster.block.ClusterBlocks.hasGlobalBlockWithStatus,server\src\main\java\org\elasticsearch\cluster\block\ClusterBlocks.java,5032,188,167 +117,org.elasticsearch.cluster.block.ClusterBlocks.hasIndexBlock,server\src\main\java\org\elasticsearch\cluster\block\ClusterBlocks.java,5365,188,25 +118,org.elasticsearch.cluster.routing.allocation.command.AllocationCommands.execute,server\src\main\java\org\elasticsearch\cluster\routing\allocation\command\AllocationCommands.java,2691,7,23 +119,org.elasticsearch.cluster.routing.allocation.command.AbstractAllocateAllocationCommand.explainOrThrowRejectedCommand,server\src\main\java\org\elasticsearch\cluster\routing\allocation\command\AbstractAllocateAllocationCommand.java,5494,94,23 +120,org.elasticsearch.cluster.routing.allocation.command.AbstractAllocateAllocationCommand.explainOrThrowRejectedCommand,server\src\main\java\org\elasticsearch\cluster\routing\allocation\command\AbstractAllocateAllocationCommand.java,5968,94,23 +121,org.elasticsearch.cluster.routing.allocation.command.AbstractAllocateAllocationCommand.initializeUnassignedShard,server\src\main\java\org\elasticsearch\cluster\routing\allocation\command\AbstractAllocateAllocationCommand.java,7050,94,238 153 186 240 23 158 +122,org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider.averageUsage,server\src\main\java\org\elasticsearch\cluster\routing\allocation\decider\DiskThresholdDecider.java,20717,51,240 +123,org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider.freeDiskPercentageAfterShardAssigned,server\src\main\java\org\elasticsearch\cluster\routing\allocation\decider\DiskThresholdDecider.java,21667,51,258 +124,org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider.shouldIndexFilter,server\src\main\java\org\elasticsearch\cluster\routing\allocation\decider\FilterAllocationDecider.java,9056,193,133 240 23 +125,org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider.decideSameNode,server\src\main\java\org\elasticsearch\cluster\routing\allocation\decider\SameShardAllocationDecider.java,6140,127,153 240 23 +126,org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider.initializingShard,server\src\main\java\org\elasticsearch\cluster\routing\allocation\decider\ThrottlingAllocationDecider.java,10747,95,153 +127,org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider.isVersionCompatibleRelocatePrimary,server\src\main\java\org\elasticsearch\cluster\routing\allocation\decider\NodeVersionAllocationDecider.java,3455,213,238 240 23 +128,org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider.isVersionCompatibleAllocatingReplica,server\src\main\java\org\elasticsearch\cluster\routing\allocation\decider\NodeVersionAllocationDecider.java,4368,213,238 240 23 +129,org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider.isVersionCompatible,server\src\main\java\org\elasticsearch\cluster\routing\allocation\decider\NodeVersionAllocationDecider.java,5574,213,240 23 +130,org.elasticsearch.cluster.routing.allocation.AllocationService.buildResult,server\src\main\java\org\elasticsearch\cluster\routing\allocation\AllocationService.java,5955,260,170 23 +131,org.elasticsearch.cluster.routing.allocation.AllocationService.removeDelayMarkers,server\src\main\java\org\elasticsearch\cluster\routing\allocation\AllocationService.java,14913,260,23 +132,org.elasticsearch.cluster.routing.allocation.AllocationService.resetFailedAllocationCounter,server\src\main\java\org\elasticsearch\cluster\routing\allocation\AllocationService.java,16275,260,23 +133,org.elasticsearch.cluster.routing.allocation.AllocationService.hasDeadNodes,server\src\main\java\org\elasticsearch\cluster\routing\allocation\AllocationService.java,21450,260,23 +134,org.elasticsearch.cluster.routing.allocation.AllocationService.getMutableRoutingNodes,server\src\main\java\org\elasticsearch\cluster\routing\allocation\AllocationService.java,25016,260,170 +135,org.elasticsearch.cluster.routing.allocation.RoutingAllocation.decision,server\src\main\java\org\elasticsearch\cluster\routing\allocation\RoutingAllocation.java,8757,23,66 +136,org.elasticsearch.cluster.routing.allocation.RoutingExplanations.add,server\src\main\java\org\elasticsearch\cluster\routing\allocation\RoutingExplanations.java,1658,283,112 +137,org.elasticsearch.cluster.routing.RoutingNodes.activePrimary,server\src\main\java\org\elasticsearch\cluster\routing\RoutingNodes.java,13674,238,156 +138,org.elasticsearch.cluster.routing.RoutingNodes.activeReplicaWithHighestVersion,server\src\main\java\org\elasticsearch\cluster\routing\RoutingNodes.java,14115,238,156 +139,org.elasticsearch.cluster.routing.RoutingNodes.allReplicasActive,server\src\main\java\org\elasticsearch\cluster\routing\RoutingNodes.java,15312,238,109 156 +140,org.elasticsearch.cluster.routing.RoutingNodes.assertInstanceNotInList,server\src\main\java\org\elasticsearch\cluster\routing\RoutingNodes.java,35078,238,153 +141,org.elasticsearch.cluster.routing.RoutingTable.hasIndex,server\src\main\java\org\elasticsearch\cluster\routing\RoutingTable.java,3884,74,30 +142,org.elasticsearch.cluster.routing.RoutingTable.shardRoutingTable,server\src\main\java\org\elasticsearch\cluster\routing\RoutingTable.java,5103,74,156 +143,org.elasticsearch.cluster.routing.RoutingTable.getByAllocationId,server\src\main\java\org\elasticsearch\cluster\routing\RoutingTable.java,5903,74,156 +144,org.elasticsearch.cluster.routing.RoutingTable.shardsWithState,server\src\main\java\org\elasticsearch\cluster\routing\RoutingTable.java,6620,74,87 +145,org.elasticsearch.cluster.routing.ShardRouting.writeToThin,server\src\main\java\org\elasticsearch\cluster\routing\ShardRouting.java,9858,153,218 +146,org.elasticsearch.cluster.routing.ShardRouting.updateUnassigned,server\src\main\java\org\elasticsearch\cluster\routing\ShardRouting.java,10894,153,186 158 +147,org.elasticsearch.cluster.routing.ShardRouting.moveToUnassigned,server\src\main\java\org\elasticsearch\cluster\routing\ShardRouting.java,11415,153,186 +148,org.elasticsearch.cluster.routing.UnassignedInfo.getRemainingDelay,server\src\main\java\org\elasticsearch\cluster\routing\UnassignedInfo.java,13267,186,202 +149,org.elasticsearch.cluster.routing.OperationRouting.indexRoutingTable,server\src\main\java\org\elasticsearch\cluster\routing\OperationRouting.java,11263,295,170 +150,org.elasticsearch.cluster.routing.OperationRouting.indexMetaData,server\src\main\java\org\elasticsearch\cluster\routing\OperationRouting.java,11574,295,170 +151,org.elasticsearch.cluster.routing.IndexRoutingTable.validate,server\src\main\java\org\elasticsearch\cluster\routing\IndexRoutingTable.java,4131,106,109 +152,org.elasticsearch.cluster.routing.IndexRoutingTable.shardsWithState,server\src\main\java\org\elasticsearch\cluster\routing\IndexRoutingTable.java,10946,106,87 +153,org.elasticsearch.cluster.routing.IndexShardRoutingTable.shardsWithState,server\src\main\java\org\elasticsearch\cluster\routing\IndexShardRoutingTable.java,27339,288,87 +154,org.elasticsearch.cluster.service.MasterService.patchVersions,server\src\main\java\org\elasticsearch\cluster\service\MasterService.java,14786,107,170 +155,org.elasticsearch.cluster.service.ClusterService.setSlowTaskLoggingThreshold,server\src\main\java\org\elasticsearch\cluster\service\ClusterService.java,4138,56,286 +156,org.elasticsearch.cluster.metadata.MetaData.index,server\src\main\java\org\elasticsearch\cluster\metadata\MetaData.java,28566,109,30 +157,org.elasticsearch.cluster.metadata.MetaData.hasIndexMetaData,server\src\main\java\org\elasticsearch\cluster\metadata\MetaData.java,28819,109,133 +158,org.elasticsearch.cluster.metadata.MetaData.getIndexSafe,server\src\main\java\org\elasticsearch\cluster\metadata\MetaData.java,29067,109,30 +159,org.elasticsearch.cluster.metadata.IndexGraveyard.containsIndex,server\src\main\java\org\elasticsearch\cluster\metadata\IndexGraveyard.java,4887,179,30 +160,org.elasticsearch.cluster.metadata.RepositoryMetaData.writeTo,server\src\main\java\org\elasticsearch\cluster\metadata\RepositoryMetaData.java,2216,14,218 +161,org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.resolveSearchRoutingAllIndices,server\src\main\java\org\elasticsearch\cluster\metadata\IndexNameExpressionResolver.java,26505,16,109 +162,org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.isPatternMatchingAllIndices,server\src\main\java\org\elasticsearch\cluster\metadata\IndexNameExpressionResolver.java,28191,16,109 +163,org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService.isUpgraded,server\src\main\java\org\elasticsearch\cluster\metadata\MetaDataIndexUpgradeService.java,5000,123,133 +164,org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService.markAsUpgraded,server\src\main\java\org\elasticsearch\cluster\metadata\MetaDataIndexUpgradeService.java,10392,123,133 +165,org.elasticsearch.cluster.metadata.MetaDataUpdateSettingsService.getTotalNewShards,server\src\main\java\org\elasticsearch\cluster\metadata\MetaDataUpdateSettingsService.java,14309,124,170 30 +166,org.elasticsearch.cluster.coordination.Join.targetMatches,server\src\main\java\org\elasticsearch\cluster\coordination\Join.java,3009,71,0 +167,org.elasticsearch.cluster.ClusterInfo.getShardSize,server\src\main\java\org\elasticsearch\cluster\ClusterInfo.java,8097,255,153 +168,org.elasticsearch.cluster.SnapshotsInProgress.snapshot,server\src\main\java\org\elasticsearch\cluster\SnapshotsInProgress.java,13227,162,268 +169,org.elasticsearch.gateway.GatewayMetaState.getPersistedState,server\src\main\java\org\elasticsearch\gateway\GatewayMetaState.java,4869,277,169 202 +170,org.elasticsearch.gateway.MetaStateService.writeIndexAndUpdateManifest,server\src\main\java\org\elasticsearch\gateway\MetaStateService.java,12933,13,133 +171,org.elasticsearch.gateway.TransportNodesListGatewayMetaState.list,server\src\main\java\org\elasticsearch\gateway\TransportNodesListGatewayMetaState.java,3107,182,286 +172,org.elasticsearch.indices.flush.SyncedFlushService.reportSuccessWithExistingSyncId,server\src\main\java\org\elasticsearch\indices\flush\SyncedFlushService.java,14625,98,156 +173,org.elasticsearch.indices.flush.SyncedFlushService.getShardRoutingTable,server\src\main\java\org\elasticsearch\indices\flush\SyncedFlushService.java,15519,98,170 156 +174,org.elasticsearch.indices.flush.SyncedFlushService.countDownAndSendResponseIfDone,server\src\main\java\org\elasticsearch\indices\flush\SyncedFlushService.java,23669,98,226 156 +175,org.elasticsearch.indices.mapper.MapperRegistry.isMetaDataField,server\src\main\java\org\elasticsearch\indices\mapper\MapperRegistry.java,3049,32,92 +176,org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService.validateTotalCircuitBreakerLimit,server\src\main\java\org\elasticsearch\indices\breaker\HierarchyCircuitBreakerService.java,11738,149,41 +177,org.elasticsearch.indices.analysis.HunspellService.resolveHunspellDirectory,server\src\main\java\org\elasticsearch\indices\analysis\HunspellService.java,5417,57,203 +178,org.elasticsearch.indices.recovery.RecoveriesCollection.getRecoverySafe,server\src\main\java\org\elasticsearch\indices\recovery\RecoveriesCollection.java,6684,224,156 +179,org.elasticsearch.indices.IndicesService.indexShardStats,server\src\main\java\org\elasticsearch\indices\IndicesService.java,20334,275,28 297 +180,org.elasticsearch.indices.IndicesService.canCache,server\src\main\java\org\elasticsearch\indices\IndicesService.java,60634,275,152 +181,org.elasticsearch.transport.TransportService.submitRequest,server\src\main\java\org\elasticsearch\transport\TransportService.java,21421,274,0 285 79 +182,org.elasticsearch.transport.TransportService.sendRequest,server\src\main\java\org\elasticsearch\transport\TransportService.java,22262,274,0 285 +183,org.elasticsearch.transport.TransportService.sendRequest,server\src\main\java\org\elasticsearch\transport\TransportService.java,22927,274,0 285 79 +184,org.elasticsearch.transport.TransportService.sendChildRequest,server\src\main\java\org\elasticsearch\transport\TransportService.java,24824,274,0 285 252 79 +185,org.elasticsearch.transport.TransportService.sendChildRequest,server\src\main\java\org\elasticsearch\transport\TransportService.java,26099,274,285 252 79 +186,org.elasticsearch.transport.TransportService.isLocalNode,server\src\main\java\org\elasticsearch\transport\TransportService.java,56193,274,0 +187,org.elasticsearch.transport.RemoteClusterService.connectionProfileChanged,server\src\main\java\org\elasticsearch\transport\RemoteClusterService.java,20883,185,253 +188,org.elasticsearch.persistent.PersistentTasksClusterService.isAnyTaskUnassigned,server\src\main\java\org\elasticsearch\persistent\PersistentTasksClusterService.java,18110,244,86 +189,org.elasticsearch.repositories.RepositoriesService.ensureRepositoryNotInUse,server\src\main\java\org\elasticsearch\repositories\RepositoriesService.java,22506,69,170 +190,org.elasticsearch.xpack.ml.job.process.normalizer.output.NormalizerResultHandler.parseResult,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\process\normalizer\output\NormalizerResultHandler.java,2939,178,27 +191,org.elasticsearch.xpack.ml.job.persistence.JobConfigProvider.parseJobLenientlyFromSource,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\persistence\JobConfigProvider.java,36284,144,27 +192,org.elasticsearch.xpack.ml.job.persistence.JobConfigProvider.parseJobLenientlyFromSource,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\persistence\JobConfigProvider.java,36807,144,27 +193,org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider.parseSearchHit,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\persistence\JobResultsProvider.java,30189,219,271 +194,org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider.bucketRecords,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\persistence\JobResultsProvider.java,36514,219,135 +195,org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider.augmentWithGrokPattern,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\persistence\JobResultsProvider.java,41300,219,228 +196,org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider.handleLatestModelSizeStats,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\persistence\JobResultsProvider.java,77502,219,163 +197,org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister.serialiseCounts,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\persistence\JobDataCountsPersister.java,1605,134,80 +198,org.elasticsearch.xpack.ml.job.JobManager.expandJobsFromClusterState,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\JobManager.java,9384,118,170 +199,org.elasticsearch.xpack.ml.job.JobManager.isJobOpen,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\JobManager.java,22873,118,170 +200,org.elasticsearch.xpack.ml.job.JobManager.openJobIds,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\job\JobManager.java,23194,118,170 +201,org.elasticsearch.xpack.ml.action.TransportDeleteJobAction.deleteQuantiles,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\action\TransportDeleteJobAction.java,22548,20,194 +202,org.elasticsearch.xpack.ml.action.TransportDeleteJobAction.buildRemoveAliasesRequest,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\action\TransportDeleteJobAction.java,28658,20,39 +203,org.elasticsearch.xpack.ml.action.TransportDeleteJobAction.killProcess,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\action\TransportDeleteJobAction.java,31473,20,194 +204,org.elasticsearch.xpack.ml.action.TransportDeleteJobAction.checkJobIsNotOpen,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\action\TransportDeleteJobAction.java,32909,20,170 +205,org.elasticsearch.xpack.ml.action.TransportGetDatafeedsAction.expandClusterStateDatafeeds,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\action\TransportGetDatafeedsAction.java,4530,191,170 +206,org.elasticsearch.xpack.ml.filestructurefinder.TimeoutChecker.grokCaptures,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\filestructurefinder\TimeoutChecker.java,3916,101,83 +207,org.elasticsearch.xpack.ml.filestructurefinder.FileStructureFinderManager.sampleFile,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\filestructurefinder\FileStructureFinderManager.java,20293,235,101 +208,org.elasticsearch.xpack.ml.MachineLearningFeatureSet.mlNodeCount,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\MachineLearningFeatureSet.java,6032,53,170 +209,org.elasticsearch.xpack.ml.MlConfigMigrationEligibilityCheck.jobIsEligibleForMigration,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\MlConfigMigrationEligibilityCheck.java,3183,91,170 +210,org.elasticsearch.xpack.ml.MlConfigMigrationEligibilityCheck.datafeedIsEligibleForMigration,x-pack\plugin\ml\src\main\java\org\elasticsearch\xpack\ml\MlConfigMigrationEligibilityCheck.java,4517,91,170 +211,org.elasticsearch.xpack.ccr.repository.CcrRepository.createEmptyStore,x-pack\plugin\ccr\src\main\java\org\elasticsearch\xpack\ccr\repository\CcrRepository.java,18274,242,143 +212,org.elasticsearch.xpack.ccr.CcrLicenseChecker.checkRemoteClusterLicenseAndFetchClusterState,x-pack\plugin\ccr\src\main\java\org\elasticsearch\xpack\ccr\CcrLicenseChecker.java,8733,138,67 +213,org.elasticsearch.xpack.ccr.CcrLicenseChecker.fetchLeaderHistoryUUIDs,x-pack\plugin\ccr\src\main\java\org\elasticsearch\xpack\ccr\CcrLicenseChecker.java,11706,138,133 +214,org.elasticsearch.xpack.sql.analysis.index.IndexResolver.filterResults,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\analysis\index\IndexResolver.java,9439,141,39 55 +215,org.elasticsearch.xpack.sql.analysis.analyzer.PreAnalyzer.doPreAnalyze,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\analysis\analyzer\PreAnalyzer.java,1201,11,256 +216,org.elasticsearch.xpack.sql.querydsl.agg.Aggs.updateGroup,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\querydsl\agg\Aggs.java,4863,296,293 +217,org.elasticsearch.xpack.sql.querydsl.container.QueryContainer.addSort,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\querydsl\container\QueryContainer.java,11324,222,200 +218,org.elasticsearch.xpack.sql.querydsl.container.QueryContainer.aliasName,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\querydsl\container\QueryContainer.java,11605,222,174 +219,org.elasticsearch.xpack.sql.querydsl.container.QueryContainer.addColumn,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\querydsl\container\QueryContainer.java,16890,222,174 +220,org.elasticsearch.xpack.sql.cli.command.AbstractServerCliCommand.handleExceptionWhileCommunicatingWithServer,x-pack\plugin\sql\sql-cli\src\main\java\org\elasticsearch\xpack\sql\cli\command\AbstractServerCliCommand.java,907,214,34 +221,org.elasticsearch.xpack.sql.cli.Cli.checkConnection,x-pack\plugin\sql\sql-cli\src\main\java\org\elasticsearch\xpack\sql\cli\Cli.java,5958,108,34 250 +222,org.elasticsearch.xpack.core.ml.job.config.Job.earliestValidTimestamp,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\job\config\Job.java,17158,140,80 +223,org.elasticsearch.xpack.core.ml.job.config.JobUpdate.mergeWithJob,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\job\config\JobUpdate.java,15139,63,41 140 +224,org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord.addCause,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\job\results\AnomalyRecord.java,19219,155,263 +225,org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate.addOptionalField,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\datafeed\DatafeedUpdate.java,10037,52,284 +226,org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent.toDetectionRule,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\calendars\ScheduledEvent.java,5388,217,286 +227,org.elasticsearch.xpack.core.ccr.action.FollowParameters.fromStreamInput,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ccr\action\FollowParameters.java,9025,5,50 +228,org.elasticsearch.xpack.core.ssl.SSLService.sslIOSessionStrategy,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ssl\SSLService.java,6696,166,202 +229,org.elasticsearch.xpack.core.ssl.SSLService.getHttpTransportSSLSettings,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ssl\SSLService.java,31968,166,202 +230,org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource.parser,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\watcher\support\xcontent\XContentSource.java,3683,164,269 +231,org.elasticsearch.xpack.core.watcher.transform.chain.ExecutableChainTransform.doExecute,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\watcher\transform\chain\ExecutableChainTransform.java,2174,198,12 +232,org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionParser.parse,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\authc\support\mapper\expressiondsl\ExpressionParser.java,2068,229,164 +233,org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionParser.parseArray,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\authc\support\mapper\expressiondsl\ExpressionParser.java,7481,229,284 +234,org.elasticsearch.xpack.core.security.authc.support.mapper.TemplateRoleName.parseTemplate,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\authc\support\mapper\TemplateRoleName.java,5334,1,102 +235,org.elasticsearch.xpack.core.security.authc.Authentication.ensureContextDoesNotContainAuthentication,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\authc\Authentication.java,5259,181,150 +236,org.elasticsearch.xpack.core.dataframe.transforms.pivot.DateHistogramGroupSource.writeInterval,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\dataframe\transforms\pivot\DateHistogramGroupSource.java,5745,137,218 +237,org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkRequest.add,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\monitoring\action\MonitoringBulkRequest.java,2476,160,43 105 27 +238,org.elasticsearch.license.PutLicenseRequest.license,x-pack\plugin\core\src\main\java\org\elasticsearch\license\PutLicenseRequest.java,1092,142,105 27 +239,org.elasticsearch.protocol.xpack.graph.Vertex.writeTo,x-pack\plugin\core\src\main\java\org\elasticsearch\protocol\xpack\graph\Vertex.java,2380,227,218 +240,org.elasticsearch.protocol.xpack.graph.Connection.writeTo,x-pack\plugin\core\src\main\java\org\elasticsearch\protocol\xpack\graph\Connection.java,1900,90,218 +241,org.elasticsearch.xpack.graph.rest.action.RestGraphAction.parseVertices,x-pack\plugin\graph\src\main\java\org\elasticsearch\xpack\graph\rest\action\RestGraphAction.java,8208,119,78 +242,org.elasticsearch.xpack.graph.rest.action.RestGraphAction.parseControls,x-pack\plugin\graph\src\main\java\org\elasticsearch\xpack\graph\rest\action\RestGraphAction.java,16424,119,239 +243,org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate.render,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\common\http\HttpRequestTemplate.java,3621,196,265 +244,org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine.trimContentType,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\common\text\TextTemplateEngine.java,2192,265,266 +245,org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine.compileParams,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\common\text\TextTemplateEngine.java,3504,265,105 +246,org.elasticsearch.xpack.watcher.trigger.schedule.support.MonthTimes.contains,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\trigger\schedule\support\MonthTimes.java,3305,151,175 +247,org.elasticsearch.xpack.watcher.execution.ExecutionService.createWatchRecord,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\execution\ExecutionService.java,17961,131,262 12 +248,org.elasticsearch.xpack.watcher.notification.email.attachment.ReportingAttachmentParser.sleep,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\notification\email\attachment\ReportingAttachmentParser.java,8539,76,12 62 +249,org.elasticsearch.xpack.watcher.notification.email.EmailService.send,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\notification\email\EmailService.java,9049,171,232 292 104 +250,org.elasticsearch.xpack.watcher.WatcherLifeCycleService.isWatcherStoppedManually,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\WatcherLifeCycleService.java,7604,209,170 +251,org.elasticsearch.xpack.security.authc.saml.SamlMetadataCommand.requireText,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\authc\saml\SamlMetadataCommand.java,19399,211,54 +252,org.elasticsearch.xpack.security.authc.support.DelegatedAuthorizationSupport.checkForRealmChains,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\authc\support\DelegatedAuthorizationSupport.java,5805,110,202 +253,org.elasticsearch.xpack.security.authc.TokenService.getFromHeader,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\authc\TokenService.java,95702,10,150 +254,org.elasticsearch.xpack.security.authc.ApiKeyService.getApiKeyExpiration,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\authc\ApiKeyService.java,30150,21,195 +255,org.elasticsearch.xpack.security.authz.AuthorizationService.isInternalUser,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\authz\AuthorizationService.java,25273,99,126 +256,org.elasticsearch.xpack.security.action.oidc.TransportOpenIdConnectPrepareAuthenticationAction.prepareAuthenticationResponse,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\action\oidc\TransportOpenIdConnectPrepareAuthenticationAction.java,3679,279,180 +257,org.elasticsearch.xpack.security.action.saml.TransportSamlInvalidateSessionAction.buildLogoutResponseUrl,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\action\saml\TransportSamlInvalidateSessionAction.java,4053,247,236 +258,org.elasticsearch.xpack.security.action.saml.TransportSamlPrepareAuthenticationAction.prepareAuthentication,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\action\saml\TransportSamlPrepareAuthenticationAction.java,2684,230,236 +259,org.elasticsearch.xpack.security.action.realm.TransportClearRealmCacheAction.clearCache,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\action\realm\TransportClearRealmCacheAction.java,4328,122,60 +260,org.elasticsearch.xpack.security.transport.nio.SSLDriver.ensureApplicationBufferSize,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\transport\nio\SSLDriver.java,11548,130,207 +261,org.elasticsearch.xpack.dataframe.transforms.pivot.Pivot.buildSearchRequest,x-pack\plugin\data-frame\src\main\java\org\elasticsearch\xpack\dataframe\transforms\pivot\Pivot.java,4141,114,46 +262,org.elasticsearch.xpack.dataframe.transforms.pivot.Pivot.runTestQuery,x-pack\plugin\data-frame\src\main\java\org\elasticsearch\xpack\dataframe\transforms\pivot\Pivot.java,5827,114,46 +263,org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.shouldReplaceResource,x-pack\plugin\monitoring\src\main\java\org\elasticsearch\xpack\monitoring\exporter\http\PublishableHttpResource.java,21126,29,42 +264,org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.addParameters,x-pack\plugin\monitoring\src\main\java\org\elasticsearch\xpack\monitoring\exporter\http\PublishableHttpResource.java,23605,29,73 +265,org.elasticsearch.xpack.monitoring.exporter.http.ClusterAlertHttpResource.shouldReplaceClusterAlert,x-pack\plugin\monitoring\src\main\java\org\elasticsearch\xpack\monitoring\exporter\http\ClusterAlertHttpResource.java,5405,93,42 +266,org.elasticsearch.xpack.monitoring.exporter.http.WatcherExistsHttpResource.canUseWatcher,x-pack\plugin\monitoring\src\main\java\org\elasticsearch\xpack\monitoring\exporter\http\WatcherExistsHttpResource.java,4903,116,42 +267,org.elasticsearch.xpack.monitoring.exporter.Exporters.doExport,x-pack\plugin\monitoring\src\main\java\org\elasticsearch\xpack\monitoring\exporter\Exporters.java,9319,157,259 +268,org.elasticsearch.xpack.deprecation.RestDeprecationInfoAction.handleGet,x-pack\plugin\deprecation\src\main\java\org\elasticsearch\xpack\deprecation\RestDeprecationInfoAction.java,2243,115,58 84 +269,org.elasticsearch.index.reindex.BulkIndexByScrollResponseContentListener.getStatus,modules\reindex\src\main\java\org\elasticsearch\index\reindex\BulkIndexByScrollResponseContentListener.java,2196,212,117 +270,org.elasticsearch.script.mustache.MultiSearchTemplateRequest.add,modules\lang-mustache\src\main\java\org\elasticsearch\script\mustache\MultiSearchTemplateRequest.java,2387,273,17 +271,org.elasticsearch.painless.antlr.Walker.buildPostfixChain,modules\lang-painless\src\main\java\org\elasticsearch\painless\antlr\Walker.java,35328,215,19 +272,org.elasticsearch.painless.Globals.addSyntheticMethod,modules\lang-painless\src\main\java\org\elasticsearch\painless\Globals.java,1550,111,187 +273,org.elasticsearch.painless.MethodWriter.writeDebugInfo,modules\lang-painless\src\main\java\org\elasticsearch\painless\MethodWriter.java,7037,291,59 +274,org.elasticsearch.painless.MethodWriter.writeLoopCounter,modules\lang-painless\src\main\java\org\elasticsearch\painless\MethodWriter.java,7458,291,59 +275,org.elasticsearch.painless.MethodWriter.writeDynamicBinaryInstruction,modules\lang-painless\src\main\java\org\elasticsearch\painless\MethodWriter.java,17985,291,59 +276,org.elasticsearch.painless.MethodWriter.writeBinaryInstruction,modules\lang-painless\src\main\java\org\elasticsearch\painless\MethodWriter.java,20431,291,59 +277,org.elasticsearch.painless.MethodWriter.invokeMethodCall,modules\lang-painless\src\main\java\org\elasticsearch\painless\MethodWriter.java,23948,291,206 +278,org.elasticsearch.common.blobstore.url.URLBlobStore.buildPath,modules\repository-url\src\main\java\org\elasticsearch\common\blobstore\url\URLBlobStore.java,2747,96,261 +279,org.elasticsearch.analysis.common.MinHashTokenFilterFactory.convertSettings,modules\analysis-common\src\main\java\org\elasticsearch\analysis\common\MinHashTokenFilterFactory.java,1808,276,202 +280,org.elasticsearch.analysis.common.WordDelimiterTokenFilterFactory.getFlag,modules\analysis-common\src\main\java\org\elasticsearch\analysis\common\WordDelimiterTokenFilterFactory.java,5827,18,202 +281,org.elasticsearch.analysis.common.WordDelimiterGraphTokenFilterFactory.getFlag,modules\analysis-common\src\main\java\org\elasticsearch\analysis\common\WordDelimiterGraphTokenFilterFactory.java,5888,24,202 +282,org.elasticsearch.example.CustomAuthorizationEngine.getHasPrivilegesResponse,plugins\examples\security-authorization-engine\src\main\java\org\elasticsearch\example\CustomAuthorizationEngine.java,7791,190,181 22 +283,org.elasticsearch.example.CustomAuthorizationEngine.isSuperuser,plugins\examples\security-authorization-engine\src\main\java\org\elasticsearch\example\CustomAuthorizationEngine.java,12377,190,126 +284,org.elasticsearch.index.analysis.IcuTokenizerFactory.parseRules,plugins\analysis-icu\src\main\java\org\elasticsearch\index\analysis\IcuTokenizerFactory.java,4350,85,203 diff --git a/elasticsearch/moved-methods.csv b/elasticsearch/moved-methods.csv new file mode 100755 index 00000000..8fafd668 --- /dev/null +++ b/elasticsearch/moved-methods.csv @@ -0,0 +1,105 @@ +id,name,file,offset,original_class_id,target_class_id +0,org.elasticsearch.geo.geometry.LinearRing.checkRing,libs\geo\src\main\java\org\elasticsearch\geo\geometry\LinearRing.java,2300,9,231 +1,org.elasticsearch.common.ParseField.addOptionalField,libs\x-content\src\main\java\org\elasticsearch\common\ParseField.java,5566,75,284 +2,org.elasticsearch.common.unit.TimeValue.addMaxIndexAgeCondition,libs\core\src\main\java\org\elasticsearch\common\unit\TimeValue.java,10866,290,286 +4,org.elasticsearch.common.xcontent.XContentType.aliases,libs\x-content\src\main\java\org\elasticsearch\common\xcontent\XContentType.java,5757,128,105 +7,org.elasticsearch.common.bytes.BytesReference.aliases,server\src\main\java\org\elasticsearch\common\bytes\BytesReference.java,11267,264,27 +10,org.elasticsearch.index.shard.ShardId.isShardLocked,server\src\main\java\org\elasticsearch\index\shard\ShardId.java,4562,251,156 +12,org.elasticsearch.cli.Terminal.outputHowToSeeVerboseInformation,libs\cli\src\main\java\org\elasticsearch\cli\Terminal.java,1859,136,54 +15,org.elasticsearch.common.transport.BoundTransportAddress.writePortsFile,server\src\main\java\org\elasticsearch\common\transport\BoundTransportAddress.java,3528,132,2 +16,org.elasticsearch.common.logging.DeprecationLogger.registerAsDeprecatedHandler,server\src\main\java\org\elasticsearch\common\logging\DeprecationLogger.java,17670,113,280 +22,org.elasticsearch.rest.RestRequest.unrecognized,server\src\main\java\org\elasticsearch\rest\RestRequest.java,6255,270,84 +23,org.elasticsearch.common.io.stream.StreamInput.readFields,server\src\main\java\org\elasticsearch\common\io\stream\StreamInput.java,40285,281,50 +25,org.elasticsearch.search.fetch.subphase.FetchSourceContext.normalizeFetchSourceContent,server\src\main\java\org\elasticsearch\search\fetch\subphase\FetchSourceContext.java,10351,8,120 +26,org.elasticsearch.index.mapper.MappedFieldType.getSearchAnalyzer,server\src\main\java\org\elasticsearch\index\mapper\MappedFieldType.java,17008,165,81 +28,org.elasticsearch.index.shard.IndexShard.maybeSyncTranslog,server\src\main\java\org\elasticsearch\index\shard\IndexShard.java,118509,3,28 +33,org.elasticsearch.index.shard.ShardPath.getNodePath,server\src\main\java\org\elasticsearch\index\shard\ShardPath.java,14628,40,243 +36,org.elasticsearch.index.store.StoreFileMetaData.createVerifyingOutput,server\src\main\java\org\elasticsearch\index\store\StoreFileMetaData.java,4417,143,241 +39,org.elasticsearch.common.io.stream.StreamOutput.writeSegmentSort,server\src\main\java\org\elasticsearch\common\io\stream\StreamOutput.java,43643,233,218 +40,org.elasticsearch.index.mapper.MapperService.getParentObjectMapper,server\src\main\java\org\elasticsearch\index\mapper\MapperService.java,3304,68,161 +45,org.elasticsearch.search.internal.SearchContext.findNestedObjectMapper,server\src\main\java\org\elasticsearch\search\internal\SearchContext.java,14305,121,152 +46,org.elasticsearch.index.mapper.ParseContext.findTemplateBuilder,server\src\main\java\org\elasticsearch\index\mapper\ParseContext.java,1492,177,176 +50,org.elasticsearch.search.sort.SortOrder.addSortField,server\src\main\java\org\elasticsearch\search\sort\SortOrder.java,1840,147,36 +52,org.elasticsearch.action.support.IndicesOptions.setIndicesOptions,server\src\main\java\org\elasticsearch\action\support\IndicesOptions.java,1900,234,257 +54,org.elasticsearch.index.analysis.AnalysisMode.checkAllowedInMode,server\src\main\java\org\elasticsearch\index\analysis\AnalysisMode.java,2885,15,45 +55,org.elasticsearch.index.IndexSettings.build,server\src\main\java\org\elasticsearch\index\IndexSettings.java,44138,201,172 +56,org.elasticsearch.cluster.node.DiscoveryNode.result,server\src\main\java\org\elasticsearch\cluster\node\DiscoveryNode.java,15081,252,0 +57,org.elasticsearch.cluster.metadata.IndexMetaData.getTaskResultMappingVersion,server\src\main\java\org\elasticsearch\cluster\metadata\IndexMetaData.java,4950,287,133 +58,org.elasticsearch.action.bulk.BulkRequest.withBackoff,server\src\main\java\org\elasticsearch\action\bulk\BulkRequest.java,16330,245,173 +60,org.elasticsearch.repositories.RepositoryData.buildSimpleSnapshotInfos,server\src\main\java\org\elasticsearch\repositories\RepositoryData.java,22780,72,199 +61,org.elasticsearch.cluster.routing.UnassignedInfo.unassignedInfoToXContent,server\src\main\java\org\elasticsearch\cluster\routing\UnassignedInfo.java,2475,246,186 +62,org.elasticsearch.cluster.ClusterState.resolveVotingConfigExclusions,server\src\main\java\org\elasticsearch\cluster\ClusterState.java,5429,64,170 +65,org.elasticsearch.action.admin.indices.alias.Alias.alias,server\src\main\java\org\elasticsearch\action\admin\indices\alias\Alias.java,9556,100,129 +67,org.elasticsearch.action.support.ActiveShardCount.setWaitForActiveShards,server\src\main\java\org\elasticsearch\action\support\ActiveShardCount.java,9111,82,145 +75,org.elasticsearch.action.search.SearchRequest.add,server\src\main\java\org\elasticsearch\action\search\SearchRequest.java,27056,44,48 +77,org.elasticsearch.script.ScriptType.updateOrCreateScript,server\src\main\java\org\elasticsearch\script\ScriptType.java,4328,47,168 +81,org.elasticsearch.action.termvectors.TermVectorsRequest.add,server\src\main\java\org\elasticsearch\action\termvectors\TermVectorsRequest.java,4276,6,70 +87,org.elasticsearch.common.unit.DistanceUnit.calculate,server\src\main\java\org\elasticsearch\common\unit\DistanceUnit.java,6896,49,216 +88,org.elasticsearch.common.inject.spi.Message.merge,server\src\main\java\org\elasticsearch\common\inject\spi\Message.java,3530,159,184 +90,org.elasticsearch.common.lucene.search.XMoreLikeThis.createQueryOther,server\src\main\java\org\elasticsearch\common\lucene\search\XMoreLikeThis.java,36129,237,26 +93,org.elasticsearch.Version.getAsVersion,server\src\main\java\org\elasticsearch\Version.java,14523,202,92 +94,org.elasticsearch.common.blobstore.BlobPath.buildPath,server\src\main\java\org\elasticsearch\common\blobstore\BlobPath.java,2349,267,261 +95,org.elasticsearch.ingest.IngestDocument.putFailureMetadata,server\src\main\java\org\elasticsearch\ingest\IngestDocument.java,31119,220,225 +100,org.elasticsearch.index.fieldvisitor.FieldsVisitor.loadStoredFields,server\src\main\java\org\elasticsearch\index\fieldvisitor\FieldsVisitor.java,7020,282,294 +101,org.elasticsearch.search.SearchHits.assertNoSearchTarget,server\src\main\java\org\elasticsearch\search\SearchHits.java,6043,103,204 +103,org.elasticsearch.search.aggregations.LeafBucketCollector.collectBucket,server\src\main\java\org\elasticsearch\search\aggregations\LeafBucketCollector.java,2962,210,192 +105,org.elasticsearch.search.aggregations.Aggregator.resolveAggregator,server\src\main\java\org\elasticsearch\search\aggregations\Aggregator.java,1989,289,35 +114,org.elasticsearch.common.transport.TransportAddress.findByAddress,server\src\main\java\org\elasticsearch\common\transport\TransportAddress.java,4913,272,38 +116,org.elasticsearch.rest.RestStatus.hasGlobalBlockWithStatus,server\src\main\java\org\elasticsearch\rest\RestStatus.java,32362,188,167 +118,org.elasticsearch.cluster.routing.allocation.RoutingAllocation.execute,server\src\main\java\org\elasticsearch\cluster\routing\allocation\RoutingAllocation.java,10153,7,23 +121,org.elasticsearch.cluster.routing.RoutingNodes.initializeUnassignedShard,server\src\main\java\org\elasticsearch\cluster\routing\RoutingNodes.java,38882,94,238 +122,org.elasticsearch.cluster.routing.RoutingNode.averageUsage,server\src\main\java\org\elasticsearch\cluster\routing\RoutingNode.java,7712,51,240 +125,org.elasticsearch.cluster.routing.ShardRouting.decideSameNode,server\src\main\java\org\elasticsearch\cluster\routing\ShardRouting.java,27436,127,153 +136,org.elasticsearch.cluster.routing.allocation.RerouteExplanation.add,server\src\main\java\org\elasticsearch\cluster\routing\allocation\RerouteExplanation.java,2775,283,112 +141,org.elasticsearch.index.Index.hasIndex,server\src\main\java\org\elasticsearch\index\Index.java,4142,74,30 +151,org.elasticsearch.cluster.metadata.MetaData.validate,server\src\main\java\org\elasticsearch\cluster\metadata\MetaData.java,4141,106,109 +153,org.elasticsearch.cluster.routing.ShardRoutingState.shardsWithState,server\src\main\java\org\elasticsearch\cluster\routing\ShardRoutingState.java,2083,288,87 +168,org.elasticsearch.snapshots.Snapshot.snapshot,server\src\main\java\org\elasticsearch\snapshots\Snapshot.java,2978,162,268 +169,org.elasticsearch.cluster.service.ClusterApplierService.getPersistedState,server\src\main\java\org\elasticsearch\cluster\service\ClusterApplierService.java,6997,277,169 +174,org.elasticsearch.common.util.concurrent.CountDown.countDownAndSendResponseIfDone,server\src\main\java\org\elasticsearch\common\util\concurrent\CountDown.java,2944,98,226 +176,org.elasticsearch.common.unit.ByteSizeValue.validateTotalCircuitBreakerLimit,server\src\main\java\org\elasticsearch\common\unit\ByteSizeValue.java,10295,149,41 +177,org.elasticsearch.env.Environment.resolveHunspellDirectory,server\src\main\java\org\elasticsearch\env\Environment.java,12140,57,203 +179,org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.indexShardStats,server\src\main\java\org\elasticsearch\action\admin\indices\stats\CommonStatsFlags.java,7061,275,297 +181,org.elasticsearch.transport.TransportRequest.submitRequest,server\src\main\java\org\elasticsearch\transport\TransportRequest.java,1208,274,285 +187,org.elasticsearch.transport.ConnectionProfile.connectionProfileChanged,server\src\main\java\org\elasticsearch\transport\ConnectionProfile.java,7302,185,253 +188,org.elasticsearch.persistent.PersistentTasksCustomMetaData.isAnyTaskUnassigned,server\src\main\java\org\elasticsearch\persistent\PersistentTasksCustomMetaData.java,6497,244,86 +193,org.elasticsearch.search.SearchHit.parseSearchHit,server\src\main\java\org\elasticsearch\search\SearchHit.java,16592,219,271 +197,org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts.serialiseCounts,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\job\process\autodetect\state\DataCounts.java,25823,134,80 +201,org.elasticsearch.client.ParentTaskAssigningClient.deleteQuantiles,server\src\main\java\org\elasticsearch\client\ParentTaskAssigningClient.java,2974,20,194 +206,org.elasticsearch.grok.Grok.grokCaptures,libs\grok\src\main\java\org\elasticsearch\grok\Grok.java,13483,101,83 +212,org.elasticsearch.action.admin.cluster.state.ClusterStateRequest.checkRemoteClusterLicenseAndFetchClusterState,server\src\main\java\org\elasticsearch\action\admin\cluster\state\ClusterStateRequest.java,6126,138,67 +214,org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse.filterResults,server\src\main\java\org\elasticsearch\action\admin\indices\alias\get\GetAliasesResponse.java,3504,141,39 +215,org.elasticsearch.xpack.sql.plan.logical.LogicalPlan.doPreAnalyze,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\plan\logical\LogicalPlan.java,935,11,256 +216,org.elasticsearch.xpack.sql.querydsl.agg.GroupByKey.updateGroup,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\querydsl\agg\GroupByKey.java,3359,296,293 +217,org.elasticsearch.xpack.sql.querydsl.container.Sort.addSort,x-pack\plugin\sql\src\main\java\org\elasticsearch\xpack\sql\querydsl\container\Sort.java,517,222,200 +220,org.elasticsearch.xpack.sql.cli.command.CliSession.handleExceptionWhileCommunicatingWithServer,x-pack\plugin\sql\sql-cli\src\main\java\org\elasticsearch\xpack\sql\cli\command\CliSession.java,2353,214,34 +221,org.elasticsearch.xpack.sql.client.ConnectionConfiguration.checkConnection,x-pack\plugin\sql\sql-client\src\main\java\org\elasticsearch\xpack\sql\client\ConnectionConfiguration.java,8704,108,250 +223,org.elasticsearch.xpack.core.ml.job.config.Job.mergeWithJob,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\job\config\Job.java,25718,63,140 +224,org.elasticsearch.xpack.core.ml.job.results.AnomalyCause.addCause,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\ml\job\results\AnomalyCause.java,12700,155,263 +230,org.elasticsearch.common.xcontent.NamedXContentRegistry.parser,libs\x-content\src\main\java\org\elasticsearch\common\xcontent\NamedXContentRegistry.java,1993,164,269 +231,org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext.doExecute,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\watcher\execution\WatchExecutionContext.java,9793,198,12 +234,org.elasticsearch.script.ScriptService.parseTemplate,server\src\main\java\org\elasticsearch\script\ScriptService.java,25600,1,102 +235,org.elasticsearch.common.util.concurrent.ThreadContext.ensureContextDoesNotContainAuthentication,server\src\main\java\org\elasticsearch\common\util\concurrent\ThreadContext.java,16976,181,150 +237,org.elasticsearch.xpack.core.monitoring.MonitoredSystem.add,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\monitoring\MonitoredSystem.java,2092,160,43 +241,org.elasticsearch.protocol.xpack.graph.Hop.parseVertices,x-pack\plugin\core\src\main\java\org\elasticsearch\protocol\xpack\graph\Hop.java,5515,119,78 +243,org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine.renderOther,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\common\text\TextTemplateEngine.java,3907,196,265 +246,org.elasticsearch.xpack.watcher.trigger.schedule.support.DayTimes.contains,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\trigger\schedule\support\DayTimes.java,11210,151,175 +247,org.elasticsearch.xpack.core.watcher.history.WatchRecord.createWatchRecord,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\watcher\history\WatchRecord.java,8686,131,262 +248,org.elasticsearch.xpack.watcher.notification.email.attachment.ReportingAttachment.sleep,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\notification\email\attachment\ReportingAttachment.java,4225,76,62 +249,org.elasticsearch.xpack.watcher.notification.email.Profile.send,x-pack\plugin\watcher\src\main\java\org\elasticsearch\xpack\watcher\notification\email\Profile.java,7873,171,232 +254,org.elasticsearch.xpack.core.security.action.CreateApiKeyRequest.getApiKeyExpiration,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\action\CreateApiKeyRequest.java,5034,21,195 +255,org.elasticsearch.xpack.core.security.user.User.isInternalUser,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\user\User.java,8538,99,126 +256,org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectRealm.prepareAuthenticationResponse,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\authc\oidc\OpenIdConnectRealm.java,20276,279,180 +257,org.elasticsearch.xpack.security.authc.saml.SamlRealm.buildLogoutResponseUrl,x-pack\plugin\security\src\main\java\org\elasticsearch\xpack\security\authc\saml\SamlRealm.java,30251,247,236 +259,org.elasticsearch.xpack.core.security.authc.Realm.clearCache,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\authc\Realm.java,6393,122,60 +260,org.elasticsearch.nio.InboundChannelBuffer.ensureApplicationBufferSize,libs\nio\src\main\java\org\elasticsearch\nio\InboundChannelBuffer.java,10750,130,207 +261,org.elasticsearch.xpack.core.dataframe.transforms.SourceConfig.buildSearchRequest,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\dataframe\transforms\SourceConfig.java,5773,114,46 +263,org.elasticsearch.client.Response.shouldReplaceResource,client\rest\src\main\java\org\elasticsearch\client\Response.java,5344,29,42 +267,org.elasticsearch.xpack.monitoring.exporter.ExportBulk.doExport,x-pack\plugin\monitoring\src\main\java\org\elasticsearch\xpack\monitoring\exporter\ExportBulk.java,2224,157,259 +268,org.elasticsearch.client.node.NodeClient.handleGet,server\src\main\java\org\elasticsearch\client\node\NodeClient.java,5403,115,58 +269,org.elasticsearch.index.reindex.BulkByScrollResponse.getStatusOther,server\src\main\java\org\elasticsearch\index\reindex\BulkByScrollResponse.java,11499,212,117 +270,org.elasticsearch.script.mustache.SearchTemplateRequest.add,modules\lang-mustache\src\main\java\org\elasticsearch\script\mustache\SearchTemplateRequest.java,9053,273,17 +271,org.elasticsearch.painless.node.AExpression.buildPostfixChain,modules\lang-painless\src\main\java\org\elasticsearch\painless\node\AExpression.java,8788,215,19 +272,org.elasticsearch.painless.node.SFunction.addSyntheticMethod,modules\lang-painless\src\main\java\org\elasticsearch\painless\node\SFunction.java,1810,111,187 +273,org.elasticsearch.painless.Location.writeDebugInfo,modules\lang-painless\src\main\java\org\elasticsearch\painless\Location.java,3506,291,59 +282,org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest.getHasPrivilegesResponse,x-pack\plugin\core\src\main\java\org\elasticsearch\xpack\core\security\action\user\HasPrivilegesRequest.java,5000,190,22 diff --git a/elasticsearch/patches/all.patch b/elasticsearch/patches/all.patch new file mode 100644 index 00000000..313a10ee --- /dev/null +++ b/elasticsearch/patches/all.patch @@ -0,0 +1,12373 @@ +diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/CreateIndexRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/CreateIndexRequest.java +old mode 100644 +new mode 100755 +index 1a018591dc7..52728bb69fc +--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/CreateIndexRequest.java ++++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/CreateIndexRequest.java +@@ -20,7 +20,6 @@ + package org.elasticsearch.client.indices; + + import org.elasticsearch.ElasticsearchGenerationException; +-import org.elasticsearch.ElasticsearchParseException; + import org.elasticsearch.action.admin.indices.alias.Alias; + import org.elasticsearch.action.support.ActiveShardCount; + import org.elasticsearch.client.TimedRequest; +@@ -31,12 +30,10 @@ import org.elasticsearch.common.bytes.BytesArray; + import org.elasticsearch.common.bytes.BytesReference; + import org.elasticsearch.common.settings.Settings; + import org.elasticsearch.common.xcontent.DeprecationHandler; +-import org.elasticsearch.common.xcontent.NamedXContentRegistry; + import org.elasticsearch.common.xcontent.ToXContentObject; + import org.elasticsearch.common.xcontent.XContentBuilder; + import org.elasticsearch.common.xcontent.XContentFactory; + import org.elasticsearch.common.xcontent.XContentHelper; +-import org.elasticsearch.common.xcontent.XContentParser; + import org.elasticsearch.common.xcontent.XContentType; + + import java.io.IOException; +@@ -211,7 +208,7 @@ public class CreateIndexRequest extends TimedRequest implements Validatable, ToX + try { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.map(source); +- return aliases(BytesReference.bytes(builder), builder.contentType()); ++ return builder.contentType().aliases(BytesReference.bytes(builder), this); + } catch (IOException e) { + throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); + } +@@ -221,32 +218,14 @@ public class CreateIndexRequest extends TimedRequest implements Validatable, ToX + * Sets the aliases that will be associated with the index when it gets created + */ + public CreateIndexRequest aliases(XContentBuilder source) { +- return aliases(BytesReference.bytes(source), source.contentType()); ++ return source.contentType().aliases(BytesReference.bytes(source), this); + } + + /** + * Sets the aliases that will be associated with the index when it gets created + */ + public CreateIndexRequest aliases(String source, XContentType contentType) { +- return aliases(new BytesArray(source), contentType); +- } +- +- /** +- * Sets the aliases that will be associated with the index when it gets created +- */ +- public CreateIndexRequest aliases(BytesReference source, XContentType contentType) { +- // EMPTY is safe here because we never call namedObject +- try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, +- DeprecationHandler.THROW_UNSUPPORTED_OPERATION, source, contentType)) { +- //move to the first alias +- parser.nextToken(); +- while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { +- alias(Alias.fromXContent(parser)); +- } +- return this; +- } catch(IOException e) { +- throw new ElasticsearchParseException("Failed to parse aliases", e); +- } ++ return contentType.aliases(new BytesArray(source), this); + } + + /** +diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutIndexTemplateRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutIndexTemplateRequest.java +old mode 100644 +new mode 100755 +index 7008a719b7b..bd6b9f00da3 +--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutIndexTemplateRequest.java ++++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutIndexTemplateRequest.java +@@ -363,7 +363,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequestage old +- */ +- public RolloverRequest addMaxIndexAgeCondition(TimeValue age) { +- MaxAgeCondition maxAgeCondition = new MaxAgeCondition(age); +- if (this.conditions.containsKey(maxAgeCondition.name())) { +- throw new IllegalArgumentException(maxAgeCondition.name() + " condition is already set"); +- } +- this.conditions.put(maxAgeCondition.name(), maxAgeCondition); +- return this; +- } +- + /** + * Adds condition to check if the index has at least numDocs + */ +diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java +old mode 100644 +new mode 100755 +index 15598de91d4..225c933b035 +--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java ++++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java +@@ -19,7 +19,6 @@ + package org.elasticsearch.client.ml.datafeed; + + import org.elasticsearch.client.ml.job.config.Job; +-import org.elasticsearch.common.ParseField; + import org.elasticsearch.common.bytes.BytesArray; + import org.elasticsearch.common.bytes.BytesReference; + import org.elasticsearch.common.unit.TimeValue; +@@ -126,14 +125,14 @@ public class DatafeedUpdate implements ToXContentObject { + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(DatafeedConfig.ID.getPreferredName(), id); +- addOptionalField(builder, Job.ID, jobId); ++ Job.ID.addOptionalField(builder, jobId, this); + if (queryDelay != null) { + builder.field(DatafeedConfig.QUERY_DELAY.getPreferredName(), queryDelay.getStringRep()); + } + if (frequency != null) { + builder.field(DatafeedConfig.FREQUENCY.getPreferredName(), frequency.getStringRep()); + } +- addOptionalField(builder, DatafeedConfig.INDICES, indices); ++ DatafeedConfig.INDICES.addOptionalField(builder, indices, this); + if (query != null) { + builder.field(DatafeedConfig.QUERY.getPreferredName(), asMap(query)); + } +@@ -150,18 +149,12 @@ public class DatafeedUpdate implements ToXContentObject { + if (delayedDataCheckConfig != null) { + builder.field(DatafeedConfig.DELAYED_DATA_CHECK_CONFIG.getPreferredName(), delayedDataCheckConfig); + } +- addOptionalField(builder, DatafeedConfig.SCROLL_SIZE, scrollSize); +- addOptionalField(builder, DatafeedConfig.CHUNKING_CONFIG, chunkingConfig); ++ DatafeedConfig.SCROLL_SIZE.addOptionalField(builder, scrollSize, this); ++ DatafeedConfig.CHUNKING_CONFIG.addOptionalField(builder, chunkingConfig, this); + builder.endObject(); + return builder; + } + +- private void addOptionalField(XContentBuilder builder, ParseField field, Object value) throws IOException { +- if (value != null) { +- builder.field(field.getPreferredName(), value); +- } +- } +- + public String getJobId() { + return jobId; + } +diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +old mode 100644 +new mode 100755 +index d9adf61782b..2ad2cbe033f +--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java ++++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +@@ -281,7 +281,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { + Alias alias = new Alias("alias_name"); + alias.filter("{\"term\":{\"year\":2016}}"); + alias.routing("1"); +- createIndexRequest.alias(alias); ++ alias.alias(createIndexRequest); + + Settings.Builder settings = Settings.builder(); + settings.put(SETTING_NUMBER_OF_REPLICAS, 2); +@@ -1067,9 +1067,8 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { + .put("index.number_of_replicas", 0) + .putNull("index.routing.allocation.require._name") + .build(); +- resizeRequest.setTargetIndex(new org.elasticsearch.action.admin.indices.create.CreateIndexRequest("target") +- .settings(targetSettings) +- .alias(new Alias("alias"))); ++ resizeRequest.setTargetIndex(new Alias("alias").alias(new org.elasticsearch.action.admin.indices.create.CreateIndexRequest("target") ++ .settings(targetSettings))); + ResizeResponse resizeResponse = execute(resizeRequest, highLevelClient().indices()::shrink, + highLevelClient().indices()::shrinkAsync); + assertTrue(resizeResponse.isAcknowledged()); +@@ -1092,9 +1091,8 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { + ResizeRequest resizeRequest = new ResizeRequest("target", "source"); + resizeRequest.setResizeType(ResizeType.SPLIT); + Settings targetSettings = Settings.builder().put("index.number_of_shards", 4).put("index.number_of_replicas", 0).build(); +- resizeRequest.setTargetIndex(new org.elasticsearch.action.admin.indices.create.CreateIndexRequest("target") +- .settings(targetSettings) +- .alias(new Alias("alias"))); ++ resizeRequest.setTargetIndex(new Alias("alias").alias(new org.elasticsearch.action.admin.indices.create.CreateIndexRequest("target") ++ .settings(targetSettings))); + ResizeResponse resizeResponse = execute(resizeRequest, highLevelClient().indices()::split, highLevelClient().indices()::splitAsync); + assertTrue(resizeResponse.isAcknowledged()); + assertTrue(resizeResponse.isShardsAcknowledged()); +@@ -1130,7 +1128,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { + //without the refresh the rollover may not happen as the number of docs seen may be off + + { +- rolloverRequest.addMaxIndexAgeCondition(new TimeValue(1)); ++ new TimeValue(1).addMaxIndexAgeCondition(rolloverRequest); + rolloverRequest.dryRun(true); + RolloverResponse rolloverResponse = execute(rolloverRequest, highLevelClient().indices()::rollover, + highLevelClient().indices()::rolloverAsync); +diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java +old mode 100644 +new mode 100755 +index f7d5ac51a73..54d2c492716 +--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java ++++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java +@@ -900,7 +900,7 @@ public class IndicesRequestConvertersTests extends ESTestCase { + } + } + if (ESTestCase.randomBoolean()) { +- rolloverRequest.addMaxIndexAgeCondition(new TimeValue(ESTestCase.randomNonNegativeLong())); ++ new TimeValue(ESTestCase.randomNonNegativeLong()).addMaxIndexAgeCondition(rolloverRequest); + } + if (ESTestCase.randomBoolean()) { + rolloverRequest.getCreateIndexRequest().mapping(randomMapping()); +diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +old mode 100644 +new mode 100755 +index 9c5137d5442..a6293ba1b5a +--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java ++++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +@@ -1233,7 +1233,7 @@ public class RequestConvertersTests extends ESTestCase { + randomlyGenerated.allowNoIndices(), randomlyGenerated.expandWildcardsOpen(), randomlyGenerated.expandWildcardsClosed(), + msearchDefault.allowAliasesToMultipleIndices(), msearchDefault.forbidClosedIndices(), msearchDefault.ignoreAliases(), + msearchDefault.ignoreThrottled())); +- multiSearchRequest.add(searchRequest); ++ searchRequest.add(multiSearchRequest); + } + + Map expectedParams = new HashMap<>(); +@@ -1379,7 +1379,7 @@ public class RequestConvertersTests extends ESTestCase { + scriptParams.put("value", randomAlphaOfLengthBetween(2, 5)); + searchTemplateRequest.setScriptParams(scriptParams); + +- multiSearchTemplateRequest.add(searchTemplateRequest); ++ searchTemplateRequest.add(multiSearchTemplateRequest); + } + + Request multiRequest = RequestConverters.multiSearchTemplate(multiSearchTemplateRequest); +diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +old mode 100644 +new mode 100755 +index 00d905aa140..46aee9114eb +--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java ++++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +@@ -708,13 +708,13 @@ public class SearchIT extends ESRestHighLevelClientTestCase { + MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + SearchRequest searchRequest1 = new SearchRequest("index1"); + searchRequest1.source().sort("_id", SortOrder.ASC); +- multiSearchRequest.add(searchRequest1); ++ searchRequest1.add(multiSearchRequest); + SearchRequest searchRequest2 = new SearchRequest("index2"); + searchRequest2.source().sort("_id", SortOrder.ASC); +- multiSearchRequest.add(searchRequest2); ++ searchRequest2.add(multiSearchRequest); + SearchRequest searchRequest3 = new SearchRequest("index3"); + searchRequest3.source().sort("_id", SortOrder.ASC); +- multiSearchRequest.add(searchRequest3); ++ searchRequest3.add(multiSearchRequest); + + MultiSearchResponse multiSearchResponse = + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); +@@ -748,15 +748,15 @@ public class SearchIT extends ESRestHighLevelClientTestCase { + SearchRequest searchRequest1 = new SearchRequest("index1"); + searchRequest1.source().size(0).aggregation(new TermsAggregationBuilder("name", ValueType.STRING).field("field.keyword") + .order(BucketOrder.key(true))); +- multiSearchRequest.add(searchRequest1); ++ searchRequest1.add(multiSearchRequest); + SearchRequest searchRequest2 = new SearchRequest("index2"); + searchRequest2.source().size(0).aggregation(new TermsAggregationBuilder("name", ValueType.STRING).field("field.keyword") + .order(BucketOrder.key(true))); +- multiSearchRequest.add(searchRequest2); ++ searchRequest2.add(multiSearchRequest); + SearchRequest searchRequest3 = new SearchRequest("index3"); + searchRequest3.source().size(0).aggregation(new TermsAggregationBuilder("name", ValueType.STRING).field("field.keyword") + .order(BucketOrder.key(true))); +- multiSearchRequest.add(searchRequest3); ++ searchRequest3.add(multiSearchRequest); + + MultiSearchResponse multiSearchResponse = + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); +@@ -798,13 +798,13 @@ public class SearchIT extends ESRestHighLevelClientTestCase { + MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + SearchRequest searchRequest1 = new SearchRequest("index1"); + searchRequest1.source().query(new TermsQueryBuilder("field", "value2")); +- multiSearchRequest.add(searchRequest1); ++ searchRequest1.add(multiSearchRequest); + SearchRequest searchRequest2 = new SearchRequest("index2"); + searchRequest2.source().query(new TermsQueryBuilder("field", "value2")); +- multiSearchRequest.add(searchRequest2); ++ searchRequest2.add(multiSearchRequest); + SearchRequest searchRequest3 = new SearchRequest("index3"); + searchRequest3.source().query(new TermsQueryBuilder("field", "value2")); +- multiSearchRequest.add(searchRequest3); ++ searchRequest3.add(multiSearchRequest); + + MultiSearchResponse multiSearchResponse = + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); +@@ -864,10 +864,10 @@ public class SearchIT extends ESRestHighLevelClientTestCase { + MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + SearchRequest searchRequest1 = new SearchRequest("index1"); + searchRequest1.source().query(new ScriptQueryBuilder(new Script(ScriptType.INLINE, "invalid", "code", Collections.emptyMap()))); +- multiSearchRequest.add(searchRequest1); ++ searchRequest1.add(multiSearchRequest); + SearchRequest searchRequest2 = new SearchRequest("index2"); + searchRequest2.source().query(new ScriptQueryBuilder(new Script(ScriptType.INLINE, "invalid", "code", Collections.emptyMap()))); +- multiSearchRequest.add(searchRequest2); ++ searchRequest2.add(multiSearchRequest); + + MultiSearchResponse multiSearchResponse = + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); +@@ -1000,7 +1000,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { + goodRequest.setScriptParams(scriptParams); + goodRequest.setExplain(true); + goodRequest.setProfile(true); +- multiSearchTemplateRequest.add(goodRequest); ++ goodRequest.add(multiSearchTemplateRequest); + + + SearchTemplateRequest badRequest = new SearchTemplateRequest(); +@@ -1011,7 +1011,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { + scriptParams.put("number", 10); + badRequest.setScriptParams(scriptParams); + +- multiSearchTemplateRequest.add(badRequest); ++ badRequest.add(multiSearchTemplateRequest); + + MultiSearchTemplateResponse multiSearchTemplateResponse = + execute(multiSearchTemplateRequest, highLevelClient()::msearchTemplate, +@@ -1057,7 +1057,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { + Map scriptParams = new HashMap<>(); + scriptParams.put("number", "BAD NUMBER"); + badRequest1.setScriptParams(scriptParams); +- multiSearchTemplateRequest.add(badRequest1); ++ badRequest1.add(multiSearchTemplateRequest); + + + SearchTemplateRequest badRequest2 = new SearchTemplateRequest(); +@@ -1068,7 +1068,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { + scriptParams.put("number", "BAD NUMBER"); + badRequest2.setScriptParams(scriptParams); + +- multiSearchTemplateRequest.add(badRequest2); ++ badRequest2.add(multiSearchTemplateRequest); + + // The whole HTTP request should fail if no nested search requests are valid + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, +diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +old mode 100644 +new mode 100755 +index fe003d691a8..46af3a08d4f +--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java ++++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +@@ -834,8 +834,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { + request.setDestPipeline("my_pipeline"); // <1> + // end::reindex-request-pipeline + // tag::reindex-request-sort +- request.addSortField("field1", SortOrder.DESC); // <1> +- request.addSortField("field2", SortOrder.ASC); // <2> ++ SortOrder.DESC.addSortField("field1", request); // <1> ++ SortOrder.ASC.addSortField("field2", request); // <2> + // end::reindex-request-sort + // tag::reindex-request-script + request.setScript( +@@ -1170,7 +1170,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { + request.setRouting("=cat"); // <1> + // end::delete-by-query-request-routing + // tag::delete-by-query-request-indicesOptions +- request.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN); // <1> ++ IndicesOptions.LENIENT_EXPAND_OPEN.setIndicesOptions(request); // <1> + // end::delete-by-query-request-indicesOptions + + // tag::delete-by-query-execute +diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +old mode 100644 +new mode 100755 +index 14def60b277..988f0e5decd +--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java ++++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +@@ -1689,7 +1689,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase + // end::shrink-index-request-masterTimeout + // tag::shrink-index-request-waitForActiveShards + request.setWaitForActiveShards(2); // <1> +- request.setWaitForActiveShards(ActiveShardCount.DEFAULT); // <2> ++ ActiveShardCount.DEFAULT.setWaitForActiveShards(request); // <2> + // end::shrink-index-request-waitForActiveShards + // tag::shrink-index-request-settings + request.getTargetIndexRequest().settings(Settings.builder() +@@ -1697,7 +1697,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase + .putNull("index.routing.allocation.require._name")); // <2> + // end::shrink-index-request-settings + // tag::shrink-index-request-aliases +- request.getTargetIndexRequest().alias(new Alias("target_alias")); // <1> ++ new Alias("target_alias").alias(request.getTargetIndexRequest()); // <1> + // end::shrink-index-request-aliases + + // tag::shrink-index-execute +@@ -1760,14 +1760,14 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase + // end::split-index-request-masterTimeout + // tag::split-index-request-waitForActiveShards + request.setWaitForActiveShards(2); // <1> +- request.setWaitForActiveShards(ActiveShardCount.DEFAULT); // <2> ++ ActiveShardCount.DEFAULT.setWaitForActiveShards(request); // <2> + // end::split-index-request-waitForActiveShards + // tag::split-index-request-settings + request.getTargetIndexRequest().settings(Settings.builder() + .put("index.number_of_shards", 4)); // <1> + // end::split-index-request-settings + // tag::split-index-request-aliases +- request.getTargetIndexRequest().alias(new Alias("target_alias")); // <1> ++ new Alias("target_alias").alias(request.getTargetIndexRequest()); // <1> + // end::split-index-request-aliases + + // tag::split-index-execute +@@ -1815,7 +1815,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase + + // tag::rollover-index-request + RolloverRequest request = new RolloverRequest("alias", "index-2"); // <1> +- request.addMaxIndexAgeCondition(new TimeValue(7, TimeUnit.DAYS)); // <2> ++ new TimeValue(7, TimeUnit.DAYS).addMaxIndexAgeCondition(request); // <2> + request.addMaxIndexDocsCondition(1000); // <3> + request.addMaxIndexSizeCondition(new ByteSizeValue(5, ByteSizeUnit.GB)); // <4> + // end::rollover-index-request +diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +old mode 100644 +new mode 100755 +index ff5deb5cbdf..212166645dc +--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java ++++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +@@ -866,7 +866,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { + scriptParams.put("size", 5); + request.setScriptParams(scriptParams); + +- multiRequest.add(request); // <3> ++ request.add(multiRequest); // <3> + } + // end::multi-search-template-request-inline + +@@ -918,7 +918,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { + params.put("value", searchTerm); + params.put("size", 5); + request.setScriptParams(params); +- multiRequest.add(request); ++ request.add(multiRequest); + } + // end::multi-search-template-request-stored + +@@ -1201,12 +1201,12 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(QueryBuilders.matchQuery("user", "kimchy")); + firstSearchRequest.source(searchSourceBuilder); +- request.add(firstSearchRequest); // <3> ++ firstSearchRequest.add(request); // <3> + SearchRequest secondSearchRequest = new SearchRequest(); // <4> + searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.query(QueryBuilders.matchQuery("user", "luca")); + secondSearchRequest.source(searchSourceBuilder); +- request.add(secondSearchRequest); ++ secondSearchRequest.add(request); + // end::multi-search-request-basic + // tag::multi-search-execute + MultiSearchResponse response = client.msearch(request, RequestOptions.DEFAULT); +diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/rollover/RolloverRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/rollover/RolloverRequestTests.java +old mode 100644 +new mode 100755 +index 57798c393db..0ab199b3a4e +--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/rollover/RolloverRequestTests.java ++++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/rollover/RolloverRequestTests.java +@@ -50,7 +50,7 @@ public class RolloverRequestTests extends ESTestCase { + MaxSizeCondition maxSizeCondition = new MaxSizeCondition(new ByteSizeValue(2000)); + MaxDocsCondition maxDocsCondition = new MaxDocsCondition(10000L); + Condition[] expectedConditions = new Condition[] {maxAgeCondition, maxSizeCondition, maxDocsCondition}; +- rolloverRequest.addMaxIndexAgeCondition(maxAgeCondition.value()); ++ maxAgeCondition.value().addMaxIndexAgeCondition(rolloverRequest); + rolloverRequest.addMaxIndexSizeCondition(maxSizeCondition.value()); + rolloverRequest.addMaxIndexDocsCondition(maxDocsCondition.value()); + List> requestConditions = new ArrayList<>(rolloverRequest.getConditions().values()); +diff --git a/client/rest/src/main/java/org/elasticsearch/client/Response.java b/client/rest/src/main/java/org/elasticsearch/client/Response.java +old mode 100644 +new mode 100755 +index e267a68bcbd..1f4cc341647 +--- a/client/rest/src/main/java/org/elasticsearch/client/Response.java ++++ b/client/rest/src/main/java/org/elasticsearch/client/Response.java +@@ -25,6 +25,9 @@ import org.apache.http.HttpHost; + import org.apache.http.HttpResponse; + import org.apache.http.RequestLine; + import org.apache.http.StatusLine; ++import org.elasticsearch.common.xcontent.XContent; ++import org.elasticsearch.common.xcontent.XContentHelper; ++import org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource; + + import java.util.ArrayList; + import java.util.List; +@@ -152,4 +155,45 @@ public class Response { + ", response=" + response.getStatusLine() + + '}'; + } ++ ++ /** ++ * Determine if the current resource should replaced the checked one based on its version (or lack thereof). ++ *

++ * This expects a response like (where {@code resourceName} is replaced with its value): ++ *


++     * {
++     *   "resourceName": {
++     *     "version": 6000002
++     *   }
++     * }
++     * 
++ * ++ * @param xContent The XContent parser to use ++ * @param resourceName The name of the looked up resource, which is expected to be the top-level key ++ * @param minimumVersion The minimum version allowed without being replaced (expected to be the last updated version). ++ * @param publishableHttpResource ++ * @return {@code true} represents that it should be replaced. {@code false} that it should be left alone. ++ * @throws IOException if any issue occurs while parsing the {@code xContent} {@code response}. ++ * @throws RuntimeException if the response format is changed. ++ */ ++ public boolean shouldReplaceResource(final XContent xContent, ++ final String resourceName, final int minimumVersion, PublishableHttpResource publishableHttpResource) ++ throws IOException { ++ // no named content used; so EMPTY is fine ++ final Map resources = XContentHelper.convertToMap(xContent, getEntity().getContent(), false); ++ ++ // if it's empty, then there's no version in the response thanks to filter_path ++ if (resources.isEmpty() == false) { ++ @SuppressWarnings("unchecked") ++ final Map resource = (Map) resources.get(resourceName); ++ final Object version = resource != null ? resource.get("version") : null; ++ ++ // the version in the template is expected to include the alpha/beta/rc codes as well ++ if (version instanceof Number) { ++ return ((Number) version).intValue() < minimumVersion; ++ } ++ } ++ ++ return true; ++ } + } +diff --git a/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java b/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java +old mode 100644 +new mode 100755 +index a0ebff5d670..8228c3b82ad +--- a/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java ++++ b/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java +@@ -19,6 +19,8 @@ + + package org.elasticsearch.cli; + ++import org.elasticsearch.env.NodeRepurposeCommand; ++ + import java.io.BufferedReader; + import java.io.Console; + import java.io.IOException; +@@ -42,6 +44,12 @@ public abstract class Terminal { + /** The default terminal implementation, which will be a console if available, or stdout/stderr if not. */ + public static final Terminal DEFAULT = ConsoleTerminal.isSupported() ? new ConsoleTerminal() : new SystemTerminal(); + ++ public void outputHowToSeeVerboseInformation(NodeRepurposeCommand nodeRepurposeCommand) { ++ if (isPrintable(Verbosity.VERBOSE) == false) { ++ println("Use -v to see list of paths and indices affected"); ++ } ++ } ++ + /** Defines the available verbosity levels of messages to be printed. */ + public enum Verbosity { + SILENT, /* always printed */ +diff --git a/libs/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java b/libs/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java +old mode 100644 +new mode 100755 +index f40fbbe73a9..884a62fdd85 +--- a/libs/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java ++++ b/libs/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java +@@ -19,6 +19,9 @@ + + package org.elasticsearch.common.unit; + ++import org.elasticsearch.action.admin.indices.rollover.MaxAgeCondition; ++import org.elasticsearch.client.indices.rollover.RolloverRequest; ++ + import java.util.Locale; + import java.util.Objects; + import java.util.concurrent.TimeUnit; +@@ -348,4 +351,17 @@ public class TimeValue implements Comparable { + double otherValue = ((double) timeValue.duration) * timeValue.timeUnit.toNanos(1); + return Double.compare(thisValue, otherValue); + } ++ ++ /** ++ * Adds condition to check if the index is at least age old ++ * @param rolloverRequest ++ */ ++ public RolloverRequest addMaxIndexAgeCondition(RolloverRequest rolloverRequest) { ++ MaxAgeCondition maxAgeCondition = new MaxAgeCondition(this); ++ if (rolloverRequest.getConditions().containsKey(maxAgeCondition.name())) { ++ throw new IllegalArgumentException(maxAgeCondition.name() + " condition is already set"); ++ } ++ rolloverRequest.getConditions().put(maxAgeCondition.name(), maxAgeCondition); ++ return rolloverRequest; ++ } + } +diff --git a/libs/geo/src/main/java/org/elasticsearch/geo/geometry/LinearRing.java b/libs/geo/src/main/java/org/elasticsearch/geo/geometry/LinearRing.java +old mode 100644 +new mode 100755 +index d27e512ef34..8fe258ed034 +--- a/libs/geo/src/main/java/org/elasticsearch/geo/geometry/LinearRing.java ++++ b/libs/geo/src/main/java/org/elasticsearch/geo/geometry/LinearRing.java +@@ -57,4 +57,10 @@ public class LinearRing extends Line { + public T visit(GeometryVisitor visitor) throws E { + return visitor.visit(this); + } ++ ++ public void checkRing(Polygon polygon) { ++ if (length() < 4) { ++ throw new IllegalArgumentException("at least 4 polygon points required"); ++ } ++ } + } +diff --git a/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Polygon.java b/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Polygon.java +old mode 100644 +new mode 100755 +index ec6f564774c..0384dfc9c9b +--- a/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Polygon.java ++++ b/libs/geo/src/main/java/org/elasticsearch/geo/geometry/Polygon.java +@@ -48,12 +48,12 @@ public final class Polygon implements Geometry { + throw new IllegalArgumentException("holes must not be null"); + } + boolean hasAlt = polygon.hasAlt(); +- checkRing(polygon); ++ polygon.checkRing(this); + for (LinearRing hole : holes) { + if (hole.hasAlt() != hasAlt) { + throw new IllegalArgumentException("holes must have the same number of dimensions as the polygon"); + } +- checkRing(hole); ++ hole.checkRing(this); + } + this.hasAlt = hasAlt; + } +@@ -70,12 +70,6 @@ public final class Polygon implements Geometry { + return ShapeType.POLYGON; + } + +- private void checkRing(LinearRing ring) { +- if (ring.length() < 4) { +- throw new IllegalArgumentException("at least 4 polygon points required"); +- } +- } +- + public int getNumberOfHoles() { + return holes.size(); + } +diff --git a/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java b/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java +old mode 100644 +new mode 100755 +index 473e8626a4c..f0987a66dcf +--- a/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java ++++ b/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java +@@ -19,6 +19,8 @@ + + package org.elasticsearch.grok; + ++import org.elasticsearch.ElasticsearchTimeoutException; ++import org.elasticsearch.xpack.ml.filestructurefinder.TimeoutChecker; + import org.jcodings.specific.UTF8Encoding; + import org.joni.Matcher; + import org.joni.NameEntry; +@@ -312,5 +314,24 @@ public final class Grok { + } + } + ++ /** ++ * Wrapper around {@link Grok#captures} that translates any timeout exception ++ * to the style thrown by this class's {@link #check} method. ++ * @param text The text to match and extract values from. ++ * @param where Which stage of the operation is currently in progress? ++ * @param timeoutChecker ++ * @return A map containing field names and their respective coerced values that matched. ++ * @throws ElasticsearchTimeoutException If the operation is found to have taken longer than the permitted time. ++ */ ++ public Map grokCaptures(String text, String where, TimeoutChecker timeoutChecker) { ++ ++ try { ++ return captures(text); ++ } finally { ++ // If a timeout has occurred then this check will overwrite any timeout exception thrown by Grok.captures() and this ++ // is intentional - the exception from this class makes more sense in the context of the find file structure API ++ timeoutChecker.check(where); ++ } ++ } + } + +diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java b/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java +old mode 100644 +new mode 100755 +index 5c3b519e390..09eef168d65 +--- a/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java ++++ b/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java +@@ -20,6 +20,7 @@ + package org.elasticsearch.nio; + + import org.elasticsearch.nio.utils.ExceptionsHelper; ++import org.elasticsearch.xpack.security.transport.nio.SSLDriver; + + import java.nio.ByteBuffer; + import java.util.ArrayDeque; +@@ -264,4 +265,11 @@ public final class InboundChannelBuffer implements AutoCloseable { + private int indexInPage(long index) { + return (int) (index & PAGE_MASK); + } ++ ++ public void ensureApplicationBufferSize(SSLDriver sslDriver) { ++ int applicationBufferSize = sslDriver.getSSLEngine().getSession().getApplicationBufferSize(); ++ if (getRemaining() < applicationBufferSize) { ++ ensureCapacity(getIndex() + sslDriver.getSSLEngine().getSession().getApplicationBufferSize()); ++ } ++ } + } +diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/ParseField.java b/libs/x-content/src/main/java/org/elasticsearch/common/ParseField.java +old mode 100644 +new mode 100755 +index 084d82372c0..01a5d081e82 +--- a/libs/x-content/src/main/java/org/elasticsearch/common/ParseField.java ++++ b/libs/x-content/src/main/java/org/elasticsearch/common/ParseField.java +@@ -18,7 +18,9 @@ + */ + package org.elasticsearch.common; + ++import org.elasticsearch.client.ml.datafeed.DatafeedUpdate; + import org.elasticsearch.common.xcontent.DeprecationHandler; ++import org.elasticsearch.common.xcontent.XContentBuilder; + + import java.util.Collections; + import java.util.HashSet; +@@ -150,6 +152,12 @@ public class ParseField { + return deprecatedNames; + } + ++ public void addOptionalField(XContentBuilder builder, Object value, DatafeedUpdate datafeedUpdate) throws IOException { ++ if (value != null) { ++ builder.field(getPreferredName(), value); ++ } ++ } ++ + public static class CommonFields { + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField FIELDS = new ParseField("fields"); +diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java +old mode 100644 +new mode 100755 +index 9135bf648a1..12d17fc5192 +--- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java ++++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java +@@ -21,6 +21,7 @@ package org.elasticsearch.common.xcontent; + + import org.elasticsearch.common.CheckedFunction; + import org.elasticsearch.common.ParseField; ++import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; + + import java.io.IOException; + import java.util.ArrayList; +@@ -43,6 +44,10 @@ public class NamedXContentRegistry { + */ + public static final NamedXContentRegistry EMPTY = new NamedXContentRegistry(emptyList()); + ++ public XContentParser parser(InputStream stream, XContentSource xContentSource) throws IOException { ++ return xContentSource.getContentType().xContent().createParser(this, LoggingDeprecationHandler.INSTANCE, stream); ++ } ++ + /** + * An entry in the {@linkplain NamedXContentRegistry} containing the name of the object and the parser that can parse it. + */ +diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java +old mode 100644 +new mode 100755 +index 7196fdbf984..ae6c7fc3d31 +--- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java ++++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java +@@ -19,6 +19,10 @@ + + package org.elasticsearch.common.xcontent; + ++import org.elasticsearch.ElasticsearchParseException; ++import org.elasticsearch.action.admin.indices.alias.Alias; ++import org.elasticsearch.client.indices.CreateIndexRequest; ++import org.elasticsearch.common.bytes.BytesReference; + import org.elasticsearch.common.xcontent.cbor.CborXContent; + import org.elasticsearch.common.xcontent.json.JsonXContent; + import org.elasticsearch.common.xcontent.smile.SmileXContent; +@@ -179,4 +183,23 @@ public enum XContentType { + + public abstract String mediaTypeWithoutParameters(); + ++ /** ++ * Sets the aliases that will be associated with the index when it gets created ++ * @param source ++ * @param createIndexRequest ++ */ ++ public CreateIndexRequest aliases(BytesReference source, CreateIndexRequest createIndexRequest) { ++ // EMPTY is safe here because we never call namedObject ++ try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, ++ DeprecationHandler.THROW_UNSUPPORTED_OPERATION, source, this)) { ++ //move to the first alias ++ parser.nextToken(); ++ while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { ++ createIndexRequest.alias(Alias.fromXContent(parser)); ++ } ++ return createIndexRequest; ++ } catch(IOException e) { ++ throw new ElasticsearchParseException("Failed to parse aliases", e); ++ } ++ } + } +diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java +old mode 100644 +new mode 100755 +index 9a7bf5eb915..b829d41631c +--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java ++++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java +@@ -71,7 +71,7 @@ public class CompoundAnalysisTests extends ESTestCase { + private List analyze(Settings settings, String analyzerName, String text) throws IOException { + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); + AnalysisModule analysisModule = createAnalysisModule(settings); +- IndexAnalyzers indexAnalyzers = analysisModule.getAnalysisRegistry().build(idxSettings); ++ IndexAnalyzers indexAnalyzers = idxSettings.build(analysisModule.getAnalysisRegistry()); + Analyzer analyzer = indexAnalyzers.get(analyzerName).analyzer(); + + TokenStream stream = analyzer.tokenStream("" , text); +diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java +old mode 100644 +new mode 100755 +index c39fa05c26f..d6107a7e774 +--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java ++++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java +@@ -52,8 +52,8 @@ public class MultiplexerTokenFilterTests extends ESTokenStreamTestCase { + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); + +- IndexAnalyzers indexAnalyzers = new AnalysisModule(TestEnvironment.newEnvironment(settings), +- Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry().build(idxSettings); ++ IndexAnalyzers indexAnalyzers = idxSettings.build(new AnalysisModule(TestEnvironment.newEnvironment(settings), ++ Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry()); + + try (NamedAnalyzer analyzer = indexAnalyzers.get("myAnalyzer")) { + assertNotNull(analyzer); +@@ -89,8 +89,8 @@ public class MultiplexerTokenFilterTests extends ESTokenStreamTestCase { + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); + +- IndexAnalyzers indexAnalyzers = new AnalysisModule(TestEnvironment.newEnvironment(settings), +- Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry().build(idxSettings); ++ IndexAnalyzers indexAnalyzers = idxSettings.build(new AnalysisModule(TestEnvironment.newEnvironment(settings), ++ Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry()); + + try (NamedAnalyzer analyzer = indexAnalyzers.get("myAnalyzer")) { + assertNotNull(analyzer); +diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java +old mode 100644 +new mode 100755 +index 18afbdcecb3..6938cfdde5b +--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java ++++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java +@@ -75,7 +75,7 @@ public class PredicateTokenScriptFilterTests extends ESTokenStreamTestCase { + AnalysisModule module + = new AnalysisModule(TestEnvironment.newEnvironment(settings), Collections.singletonList(plugin)); + +- IndexAnalyzers analyzers = module.getAnalysisRegistry().build(idxSettings); ++ IndexAnalyzers analyzers = idxSettings.build(module.getAnalysisRegistry()); + + try (NamedAnalyzer analyzer = analyzers.get("myAnalyzer")) { + assertNotNull(analyzer); +diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java +old mode 100644 +new mode 100755 +index 39134ef1f53..4089366f0d9 +--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java ++++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java +@@ -75,7 +75,7 @@ public class ScriptedConditionTokenFilterTests extends ESTokenStreamTestCase { + AnalysisModule module + = new AnalysisModule(TestEnvironment.newEnvironment(settings), Collections.singletonList(plugin)); + +- IndexAnalyzers analyzers = module.getAnalysisRegistry().build(idxSettings); ++ IndexAnalyzers analyzers = idxSettings.build(module.getAnalysisRegistry()); + + try (NamedAnalyzer analyzer = analyzers.get("myAnalyzer")) { + assertNotNull(analyzer); +diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java +old mode 100644 +new mode 100755 +index c80f99484a9..cd80ec9b05f +--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java ++++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java +@@ -56,15 +56,6 @@ public class MultiSearchTemplateRequest extends ActionRequest implements Composi + return this; + } + +- /** +- * Add a search template request to execute. Note, the order is important, the search response will be returned in the +- * same order as the search requests. +- */ +- public MultiSearchTemplateRequest add(SearchTemplateRequest request) { +- requests.add(request); +- return this; +- } +- + /** + * Returns the amount of search requests specified in this multi search requests are allowed to be ran concurrently. + */ +diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java +old mode 100644 +new mode 100755 +index 5195ce93963..0a082d8d977 +--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java ++++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java +@@ -86,7 +86,7 @@ public class RestMultiSearchTemplateAction extends BaseRestHandler { + SearchTemplateRequest searchTemplateRequest = SearchTemplateRequest.fromXContent(bytes); + if (searchTemplateRequest.getScript() != null) { + searchTemplateRequest.setRequest(searchRequest); +- multiRequest.add(searchTemplateRequest); ++ searchTemplateRequest.add(multiRequest); + } else { + throw new IllegalArgumentException("Malformed search template"); + } +diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java +old mode 100644 +new mode 100755 +index da3cc368814..257feff4e25 +--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java ++++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java +@@ -246,4 +246,14 @@ public class SearchTemplateRequest extends ActionRequest implements CompositeInd + out.writeMap(scriptParams); + } + } ++ ++ /** ++ * Add a search template request to execute. Note, the order is important, the search response will be returned in the ++ * same order as the search requests. ++ * @param multiSearchTemplateRequest ++ */ ++ public MultiSearchTemplateRequest add(MultiSearchTemplateRequest multiSearchTemplateRequest) { ++ multiSearchTemplateRequest.requests().add(this); ++ return multiSearchTemplateRequest; ++ } + } +diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java +old mode 100644 +new mode 100755 +index 7da78a449d7..fc59e789d83 +--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java ++++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java +@@ -74,7 +74,7 @@ public class TransportMultiSearchTemplateAction extends HandledTransportAction params5 = new HashMap<>(); + params5.put("groups", Arrays.asList(1, 2, 3)); + search5.setScriptParams(params5); +- multiRequest.add(search5); ++ search5.add(multiRequest); + + MultiSearchTemplateResponse response = client().execute(MultiSearchTemplateAction.INSTANCE, multiRequest).get(); + assertThat(response.getResponses(), arrayWithSize(5)); +diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java +old mode 100644 +new mode 100755 +index 39400197a38..8ef9757ebc2 +--- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java ++++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java +@@ -128,7 +128,7 @@ public class MultiSearchTemplateRequestTests extends ESTestCase { + scriptParams.put("value", randomAlphaOfLengthBetween(2, 5)); + searchTemplateRequest.setScriptParams(scriptParams); + +- multiSearchTemplateRequest.add(searchTemplateRequest); ++ searchTemplateRequest.add(multiSearchTemplateRequest); + } + + //Serialize the request +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java +old mode 100644 +new mode 100755 +index a6a15b8ce1e..ee3dbb83207 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java +@@ -39,17 +39,7 @@ public class Globals { + public Globals(BitSet statements) { + this.statements = statements; + } +- +- /** Adds a new synthetic method to be written. It must be analyzed! */ +- public void addSyntheticMethod(SFunction function) { +- if (!function.synthetic) { +- throw new IllegalStateException("method: " + function.name + " is not synthetic"); +- } +- if (syntheticMethods.put(function.name, function) != null) { +- throw new IllegalStateException("synthetic method: " + function.name + " already exists"); +- } +- } +- ++ + /** Adds a new constant initializer to be written */ + public void addConstantInitializer(Constant constant) { + if (constantInitializers.put(constant.name, constant) != null) { +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Location.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Location.java +old mode 100644 +new mode 100755 +index d90baa06551..f37c486a68a +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Location.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Location.java +@@ -96,4 +96,17 @@ public final class Location { + } + return fileName.toString(); + } ++ ++ /** ++ * Encodes the offset into the line number table as {@code offset + 1}. ++ *

++ * This is invoked before instructions that can hit exceptions. ++ * @param methodWriter ++ */ ++ public void writeDebugInfo(MethodWriter methodWriter) { ++ // TODO: maybe track these in bitsets too? this is trickier... ++ Label label = new Label(); ++ visitLabel(label); ++ visitLineNumber(getOffset() + 1, label); ++ } + } +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java +old mode 100644 +new mode 100755 +index ed4cce5ddda..e25e7a03bab +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java +@@ -125,21 +125,9 @@ public final class MethodWriter extends GeneratorAdapter { + statements.set(offset); + } + +- /** +- * Encodes the offset into the line number table as {@code offset + 1}. +- *

+- * This is invoked before instructions that can hit exceptions. +- */ +- public void writeDebugInfo(Location location) { +- // TODO: maybe track these in bitsets too? this is trickier... +- Label label = new Label(); +- visitLabel(label); +- visitLineNumber(location.getOffset() + 1, label); +- } +- + public void writeLoopCounter(int slot, int count, Location location) { + assert slot != -1; +- writeDebugInfo(location); ++ location.writeDebugInfo(this); + final Label end = new Label(); + + iinc(slot, -count); +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java +old mode 100644 +new mode 100755 +index 3efd700413d..1d604ad84a9 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java +@@ -766,14 +766,14 @@ public final class Walker extends PainlessParserBaseVisitor { + public ANode visitDynamic(DynamicContext ctx) { + AExpression primary = (AExpression)visit(ctx.primary()); + +- return buildPostfixChain(primary, null, ctx.postfix()); ++ return primary.buildPostfixChain(null, ctx.postfix(), this); + } + + @Override + public ANode visitStatic(StaticContext ctx) { + String type = ctx.decltype().getText(); + +- return buildPostfixChain(new EStatic(location(ctx), type), ctx.postdot(), ctx.postfix()); ++ return new EStatic(location(ctx), type).buildPostfixChain(ctx.postdot(), ctx.postfix(), this); + } + + @Override +@@ -894,20 +894,6 @@ public final class Walker extends PainlessParserBaseVisitor { + return new ENewObj(location(ctx), type, arguments); + } + +- private AExpression buildPostfixChain(AExpression primary, PostdotContext postdot, List postfixes) { +- AExpression prefix = primary; +- +- if (postdot != null) { +- prefix = visitPostdot(postdot, prefix); +- } +- +- for (PostfixContext postfix : postfixes) { +- prefix = visitPostfix(postfix, prefix); +- } +- +- return prefix; +- } +- + @Override + public ANode visitPostfix(PostfixContext ctx) { + throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); +@@ -992,7 +978,7 @@ public final class Walker extends PainlessParserBaseVisitor { + expressions.add((AExpression)visit(expression)); + } + +- return buildPostfixChain(new ENewArray(location(ctx), type.toString(), expressions, false), ctx.postdot(), ctx.postfix()); ++ return new ENewArray(location(ctx), type.toString(), expressions, false).buildPostfixChain(ctx.postdot(), ctx.postfix(), this); + } + + @Override +@@ -1004,7 +990,7 @@ public final class Walker extends PainlessParserBaseVisitor { + expressions.add((AExpression)visit(expression)); + } + +- return buildPostfixChain(new ENewArray(location(ctx), type, expressions, true), null, ctx.postfix()); ++ return new ENewArray(location(ctx), type, expressions, true).buildPostfixChain(null, ctx.postfix(), this); + } + + @Override +@@ -1124,8 +1110,8 @@ public final class Walker extends PainlessParserBaseVisitor { + new ENewArray(location, arrayType, Arrays.asList( + new EVariable(location, "size")), false)); + String name = nextLambda(); +- globals.addSyntheticMethod(new SFunction(new FunctionReserved(), location, arrayType, name, +- Arrays.asList("int"), Arrays.asList("size"), Arrays.asList(code), true)); ++ new SFunction(new FunctionReserved(), location, arrayType, name, ++ Arrays.asList("int"), Arrays.asList("size"), Arrays.asList(code), true).addSyntheticMethod(globals); + + return new EFunctionRef(location(ctx), "this", name); + } +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java +old mode 100644 +new mode 100755 +index ddf289564b1..fccc2ed60f8 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java +@@ -22,6 +22,8 @@ package org.elasticsearch.painless.node; + import org.elasticsearch.painless.AnalyzerCaster; + import org.elasticsearch.painless.Locals; + import org.elasticsearch.painless.Location; ++import org.elasticsearch.painless.antlr.PainlessParser; ++import org.elasticsearch.painless.antlr.Walker; + import org.elasticsearch.painless.lookup.PainlessCast; + import org.elasticsearch.painless.lookup.PainlessLookupUtility; + +@@ -214,4 +216,18 @@ public abstract class AExpression extends ANode { + } + } + } ++ ++ public AExpression buildPostfixChain(PainlessParser.PostdotContext postdot, List postfixes, Walker walker) { ++ AExpression prefix = this; ++ ++ if (postdot != null) { ++ prefix = walker.visitPostdot(postdot, prefix); ++ } ++ ++ for (PainlessParser.PostfixContext postfix : postfixes) { ++ prefix = walker.visitPostfix(postfix, prefix); ++ } ++ ++ return prefix; ++ } + } +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java +old mode 100644 +new mode 100755 +index 584e5df6342..185363b5977 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java +@@ -238,7 +238,7 @@ public final class EAssignment extends AExpression { + */ + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + // For the case where the assignment represents a String concatenation + // we must, depending on the Java version, write a StringBuilder or +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java +old mode 100644 +new mode 100755 +index 00abe788bf4..e16e003978e +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java +@@ -620,7 +620,7 @@ public final class EBinary extends AExpression { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + if (promote == String.class && operation == Operation.ADD) { + if (!cat) { +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +old mode 100644 +new mode 100755 +index ba150ea5f9e..663477fc4a1 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +@@ -147,7 +147,7 @@ public final class ECallLocal extends AExpression { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + if (localMethod != null) { + for (AExpression argument : arguments) { +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +old mode 100644 +new mode 100755 +index a649fa7611c..bebf9bb45fd +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +@@ -82,7 +82,7 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + if (defPointer != null) { + // dynamic interface: push captured parameter on stack + // TODO: don't do this: its just to cutover :) +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java +old mode 100644 +new mode 100755 +index 08236a965fe..5949cde8e3e +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java +@@ -57,7 +57,7 @@ final class ECast extends AExpression { + @Override + void write(MethodWriter writer, Globals globals) { + child.write(writer, globals); +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + writer.writeCast(cast); + } + +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java +old mode 100644 +new mode 100755 +index 4d8a71ae3eb..02eb91c9562 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java +@@ -429,7 +429,7 @@ public final class EComp extends AExpression { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + left.write(writer, globals); + +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java +old mode 100644 +new mode 100755 +index fee5cba1aac..eaaa985dd4b +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java +@@ -89,7 +89,7 @@ public final class EConditional extends AExpression { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + Label fals = new Label(); + Label end = new Label(); +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java +old mode 100644 +new mode 100755 +index cb4d9323962..80624bea7b2 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java +@@ -93,7 +93,7 @@ public class EElvis extends AExpression { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + Label end = new Label(); + +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +old mode 100644 +new mode 100755 +index c97cc66c7c7..a1db67a76e5 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +@@ -65,7 +65,7 @@ public final class EFunctionRef extends AExpression implements ILambda { + @Override + void write(MethodWriter writer, Globals globals) { + if (ref != null) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + writer.invokeLambdaCall(ref); + } else { + // TODO: don't do this: its just to cutover :) +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +old mode 100644 +new mode 100755 +index af906416ca7..2802068fa61 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +@@ -188,10 +188,10 @@ public final class ELambda extends AExpression implements ILambda { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + if (ref != null) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + // load captures + for (Variable capture : captures) { + writer.visitVarInsn(MethodWriter.getType(capture.clazz).getOpcode(Opcodes.ILOAD), capture.getSlot()); +@@ -208,7 +208,7 @@ public final class ELambda extends AExpression implements ILambda { + } + + // add synthetic method to the queue to be written +- globals.addSyntheticMethod(desugared); ++ desugared.addSyntheticMethod(globals); + } + + @Override +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +old mode 100644 +new mode 100755 +index 8c9154aaaf3..b1fbae5d043 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +@@ -90,7 +90,7 @@ public final class EListInit extends AExpression { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + writer.newInstance(MethodWriter.getType(actual)); + writer.dup(); +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +old mode 100644 +new mode 100755 +index 11c12b2cd0a..ef8f5f33220 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +@@ -109,7 +109,7 @@ public final class EMapInit extends AExpression { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + writer.newInstance(MethodWriter.getType(actual)); + writer.dup(); +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java +old mode 100644 +new mode 100755 +index cef005de9c3..c24c30c9fa2 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java +@@ -78,7 +78,7 @@ public final class ENewArray extends AExpression { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + if (initialize) { + writer.push(arguments.size()); +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +old mode 100644 +new mode 100755 +index 9423ed5d109..a8108269e55 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +@@ -96,7 +96,7 @@ public final class ENewObj extends AExpression { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + writer.newInstance(MethodWriter.getType(actual)); + +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java +old mode 100644 +new mode 100755 +index fa249b9df62..9ab1adc5668 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java +@@ -78,7 +78,7 @@ public final class ERegex extends AExpression { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + writer.getStatic(WriterConstants.CLASS_TYPE, constant.name, org.objectweb.asm.Type.getType(Pattern.class)); + globals.addConstantInitializer(constant); +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java +old mode 100644 +new mode 100755 +index 1c0fce81876..f847275b6e4 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java +@@ -187,7 +187,7 @@ public final class EUnary extends AExpression { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + if (operation == Operation.NOT) { + Label fals = new Label(); +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java +old mode 100644 +new mode 100755 +index 0148046f7ec..352f46b99b0 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java +@@ -62,7 +62,7 @@ final class PSubArrayLength extends AStoreable { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + writer.arrayLength(); + } + +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java +old mode 100644 +new mode 100755 +index abd7128a042..7f4be4ba2b3 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java +@@ -85,13 +85,13 @@ final class PSubBrace extends AStoreable { + + @Override + void load(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + writer.arrayLoad(MethodWriter.getType(actual)); + } + + @Override + void store(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + writer.arrayStore(MethodWriter.getType(actual)); + } + +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java +old mode 100644 +new mode 100755 +index fe2ae52603b..fbdc11e19ce +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java +@@ -68,7 +68,7 @@ final class PSubCallInvoke extends AExpression { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + if (box.isPrimitive()) { + writer.box(MethodWriter.getType(box)); +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java +old mode 100644 +new mode 100755 +index afad497dec7..a8ceb7f46ce +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java +@@ -89,7 +89,7 @@ final class PSubDefArray extends AStoreable { + + @Override + void load(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + Type methodType = + Type.getMethodType(MethodWriter.getType(actual), Type.getType(Object.class), MethodWriter.getType(index.actual)); +@@ -98,7 +98,7 @@ final class PSubDefArray extends AStoreable { + + @Override + void store(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + Type methodType = + Type.getMethodType( +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java +old mode 100644 +new mode 100755 +index 7f4e253b409..5094697c98d +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java +@@ -89,7 +89,7 @@ final class PSubDefCall extends AExpression { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + List parameterTypes = new ArrayList<>(); + +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java +old mode 100644 +new mode 100755 +index bf00d8d8599..9afdf5229e3 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java +@@ -54,7 +54,7 @@ final class PSubDefField extends AStoreable { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + org.objectweb.asm.Type methodType = + org.objectweb.asm.Type.getMethodType(MethodWriter.getType(actual), org.objectweb.asm.Type.getType(Object.class)); +@@ -83,7 +83,7 @@ final class PSubDefField extends AStoreable { + + @Override + void load(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + org.objectweb.asm.Type methodType = + org.objectweb.asm.Type.getMethodType(MethodWriter.getType(actual), org.objectweb.asm.Type.getType(Object.class)); +@@ -92,7 +92,7 @@ final class PSubDefField extends AStoreable { + + @Override + void store(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + org.objectweb.asm.Type methodType = org.objectweb.asm.Type.getMethodType( + org.objectweb.asm.Type.getType(void.class), org.objectweb.asm.Type.getType(Object.class), MethodWriter.getType(actual)); +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java +old mode 100644 +new mode 100755 +index 9e09f810250..cb1e1d03622 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java +@@ -61,7 +61,7 @@ final class PSubField extends AStoreable { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + if (java.lang.reflect.Modifier.isStatic(field.javaField.getModifiers())) { + writer.getStatic(Type.getType( +@@ -94,7 +94,7 @@ final class PSubField extends AStoreable { + + @Override + void load(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + if (java.lang.reflect.Modifier.isStatic(field.javaField.getModifiers())) { + writer.getStatic(Type.getType( +@@ -107,7 +107,7 @@ final class PSubField extends AStoreable { + + @Override + void store(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + if (java.lang.reflect.Modifier.isStatic(field.javaField.getModifiers())) { + writer.putStatic(Type.getType( +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java +old mode 100644 +new mode 100755 +index 3bc4913fde9..33df4eaaa25 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java +@@ -116,7 +116,7 @@ final class PSubListShortcut extends AStoreable { + + @Override + void load(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + writer.invokeMethodCall(getter); + + if (getter.returnType == getter.javaMethod.getReturnType()) { +@@ -126,7 +126,7 @@ final class PSubListShortcut extends AStoreable { + + @Override + void store(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + writer.invokeMethodCall(setter); + writer.writePop(MethodWriter.getType(setter.returnType).getSize()); + } +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java +old mode 100644 +new mode 100755 +index 0a0f099bd68..255c4f08954 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java +@@ -87,7 +87,7 @@ final class PSubMapShortcut extends AStoreable { + void write(MethodWriter writer, Globals globals) { + index.write(writer, globals); + +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + writer.invokeMethodCall(getter); + + if (getter.returnType != getter.javaMethod.getReturnType()) { +@@ -117,7 +117,7 @@ final class PSubMapShortcut extends AStoreable { + + @Override + void load(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + writer.invokeMethodCall(getter); + + if (getter.returnType != getter.javaMethod.getReturnType()) { +@@ -127,7 +127,7 @@ final class PSubMapShortcut extends AStoreable { + + @Override + void store(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + writer.invokeMethodCall(setter); + writer.writePop(MethodWriter.getType(setter.returnType).getSize()); + } +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java +old mode 100644 +new mode 100755 +index 43b0feb0009..72388e2017c +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java +@@ -59,7 +59,7 @@ public class PSubNullSafeCallInvoke extends AExpression { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + Label end = new Label(); + writer.dup(); +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java +old mode 100644 +new mode 100755 +index 16975660477..3804df74882 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java +@@ -76,7 +76,7 @@ final class PSubShortcut extends AStoreable { + + @Override + void write(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + writer.invokeMethodCall(getter); + +@@ -107,7 +107,7 @@ final class PSubShortcut extends AStoreable { + + @Override + void load(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + writer.invokeMethodCall(getter); + +@@ -118,7 +118,7 @@ final class PSubShortcut extends AStoreable { + + @Override + void store(MethodWriter writer, Globals globals) { +- writer.writeDebugInfo(location); ++ location.writeDebugInfo(writer); + + writer.invokeMethodCall(setter); + +diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +old mode 100644 +new mode 100755 +index 9464a54ea2c..31f76561818 +--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java ++++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +@@ -47,6 +47,17 @@ import static java.util.Collections.unmodifiableSet; + * Represents a user-defined function. + */ + public final class SFunction extends AStatement { ++ /** Adds a new synthetic method to be written. It must be analyzed! ++ * @param globals*/ ++ public void addSyntheticMethod(Globals globals) { ++ if (!synthetic) { ++ throw new IllegalStateException("method: " + name + " is not synthetic"); ++ } ++ if (globals.getSyntheticMethods().put(name, this) != null) { ++ throw new IllegalStateException("synthetic method: " + name + " already exists"); ++ } ++ } ++ + public static final class FunctionReserved implements Reserved { + private final Set usedVariables = new HashSet<>(); + private int maxLoopCounter = 0; +diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java +old mode 100644 +new mode 100755 +index 46e358319a2..df95f99fba5 +--- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java ++++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java +@@ -159,7 +159,7 @@ public abstract class ParentJoinAggregator extends BucketsAggregator implements + int globalOrdinal = (int) globalOrdinals.nextOrd(); + assert globalOrdinal != -1 && globalOrdinals.nextOrd() == SortedSetDocValues.NO_MORE_ORDS; + if (existsGlobalOrdinal(globalOrdinal)) { +- collectBucket(sub, docId, 0); ++ sub.collectBucket(docId, 0, ParentJoinAggregator.this); + } + } + } +diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java +old mode 100644 +new mode 100755 +index b3c8f434f26..e125767eecb +--- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java ++++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java +@@ -129,7 +129,7 @@ public class TransportRankEvalAction extends HandledTransportAction status.getStatus()) { +- status = failure.getStatus(); +- } +- } +- for (SearchFailure failure: response.getSearchFailures()) { +- RestStatus failureStatus = ExceptionsHelper.status(failure.getReason()); +- if (failureStatus.getStatus() > status.getStatus()) { +- status = failureStatus; +- } +- } +- return status; +- } + } +diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +old mode 100644 +new mode 100755 +index 4a0813a6a74..5acaa6601b6 +--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java ++++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +@@ -181,7 +181,7 @@ public class RetryTests extends ESIntegTestCase { + } + + Retry retry = new Retry(BackoffPolicy.exponentialBackoff(), client().threadPool()); +- BulkResponse initialBulkResponse = retry.withBackoff(client()::bulk, bulk.request()).actionGet(); ++ BulkResponse initialBulkResponse = bulk.request().withBackoff(client()::bulk, retry).actionGet(); + assertFalse(initialBulkResponse.buildFailureMessage(), initialBulkResponse.hasFailures()); + client().admin().indices().prepareRefresh("source").get(); + +diff --git a/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java +old mode 100644 +new mode 100755 +index 9916eb5dfed..88670048866 +--- a/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java ++++ b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java +@@ -30,11 +30,9 @@ import org.elasticsearch.xpack.core.security.authc.Authentication; + import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; + import org.elasticsearch.xpack.core.security.authz.ResolvedIndices; + import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +-import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; + import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; + import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl.IndexAccessControl; + import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; +-import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivileges; + import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; + import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; + import org.elasticsearch.xpack.core.security.user.User; +@@ -134,9 +132,9 @@ public class CustomAuthorizationEngine implements AuthorizationEngine { + Collection applicationPrivilegeDescriptors, + ActionListener listener) { + if (isSuperuser(authentication.getUser())) { +- listener.onResponse(getHasPrivilegesResponse(authentication, hasPrivilegesRequest, true)); ++ listener.onResponse(hasPrivilegesRequest.getHasPrivilegesResponse(authentication, true, this)); + } else { +- listener.onResponse(getHasPrivilegesResponse(authentication, hasPrivilegesRequest, false)); ++ listener.onResponse(hasPrivilegesRequest.getHasPrivilegesResponse(authentication, false, this)); + } + } + +@@ -150,53 +148,6 @@ public class CustomAuthorizationEngine implements AuthorizationEngine { + } + } + +- private HasPrivilegesResponse getHasPrivilegesResponse(Authentication authentication, HasPrivilegesRequest hasPrivilegesRequest, +- boolean authorized) { +- Map clusterPrivMap = new HashMap<>(); +- for (String clusterPriv : hasPrivilegesRequest.clusterPrivileges()) { +- clusterPrivMap.put(clusterPriv, authorized); +- } +- final Map indices = new LinkedHashMap<>(); +- for (IndicesPrivileges check : hasPrivilegesRequest.indexPrivileges()) { +- for (String index : check.getIndices()) { +- final Map privileges = new HashMap<>(); +- final ResourcePrivileges existing = indices.get(index); +- if (existing != null) { +- privileges.putAll(existing.getPrivileges()); +- } +- for (String privilege : check.getPrivileges()) { +- privileges.put(privilege, authorized); +- } +- indices.put(index, ResourcePrivileges.builder(index).addPrivileges(privileges).build()); +- } +- } +- final Map> privilegesByApplication = new HashMap<>(); +- Set applicationNames = Arrays.stream(hasPrivilegesRequest.applicationPrivileges()) +- .map(RoleDescriptor.ApplicationResourcePrivileges::getApplication) +- .collect(Collectors.toSet()); +- for (String applicationName : applicationNames) { +- final Map appPrivilegesByResource = new LinkedHashMap<>(); +- for (RoleDescriptor.ApplicationResourcePrivileges p : hasPrivilegesRequest.applicationPrivileges()) { +- if (applicationName.equals(p.getApplication())) { +- for (String resource : p.getResources()) { +- final Map privileges = new HashMap<>(); +- final ResourcePrivileges existing = appPrivilegesByResource.get(resource); +- if (existing != null) { +- privileges.putAll(existing.getPrivileges()); +- } +- for (String privilege : p.getPrivileges()) { +- privileges.put(privilege, authorized); +- } +- appPrivilegesByResource.put(resource, ResourcePrivileges.builder(resource).addPrivileges(privileges).build()); +- } +- } +- } +- privilegesByApplication.put(applicationName, appPrivilegesByResource.values()); +- } +- return new HasPrivilegesResponse(authentication.getUser().principal(), authorized, clusterPrivMap, indices.values(), +- privilegesByApplication); +- } +- + private GetUserPrivilegesResponse getUserPrivilegesResponse(boolean isSuperuser) { + final Set cluster = isSuperuser ? Collections.singleton("ALL") : Collections.emptySet(); + final Set conditionalCluster = Collections.emptySet(); +diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java +old mode 100644 +new mode 100755 +index b609e28a2ac..acb3d75735d +--- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java ++++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java +@@ -70,8 +70,8 @@ public class TestDeprecationHeaderRestAction extends BaseRestHandler { + super(settings); + this.settings = settings; + +- controller.registerAsDeprecatedHandler(RestRequest.Method.GET, "/_test_cluster/deprecated_settings", this, +- DEPRECATED_ENDPOINT, deprecationLogger); ++ deprecationLogger.registerAsDeprecatedHandler(RestRequest.Method.GET, "/_test_cluster/deprecated_settings", this, ++ DEPRECATED_ENDPOINT, controller); + } + + @Override +diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java +old mode 100644 +new mode 100755 +index c685d39c756..ccea324efe6 +--- a/server/src/main/java/org/elasticsearch/Version.java ++++ b/server/src/main/java/org/elasticsearch/Version.java +@@ -25,6 +25,7 @@ import org.elasticsearch.common.SuppressForbidden; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.settings.Settings; ++import org.elasticsearch.common.settings.SettingsException; + import org.elasticsearch.common.xcontent.ToXContentFragment; + import org.elasticsearch.common.xcontent.XContentBuilder; + import org.elasticsearch.monitor.jvm.JvmInfo; +@@ -307,6 +308,23 @@ public class Version implements Comparable, ToXContentFragment { + return builder.value(toString()); + } + ++ /** ++ * Returns a parsed version. ++ * @param setting ++ * @param settings ++ */ ++ public Version getAsVersion(String setting, Settings settings) throws SettingsException { ++ String sValue = settings.get(setting); ++ if (sValue == null) { ++ return this; ++ } ++ try { ++ return fromId(Integer.parseInt(sValue)); ++ } catch (Exception e) { ++ throw new SettingsException("Failed to parse version setting [" + setting + "] with value [" + sValue + "]", e); ++ } ++ } ++ + /* + * We need the declared versions when computing the minimum compatibility version. As computing the declared versions uses reflection it + * is not cheap. Since computing the minimum compatibility version can occur often, we use this holder to compute the declared versions +diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanation.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanation.java +old mode 100644 +new mode 100755 +index cdf122545c4..b8faee2cf50 +--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanation.java ++++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanation.java +@@ -24,7 +24,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; + import org.elasticsearch.cluster.routing.ShardRouting; + import org.elasticsearch.cluster.routing.ShardRoutingState; + import org.elasticsearch.cluster.routing.UnassignedInfo; +-import org.elasticsearch.cluster.routing.allocation.AllocationDecision; + import org.elasticsearch.cluster.routing.allocation.ShardAllocationDecision; + import org.elasticsearch.common.Nullable; + import org.elasticsearch.common.io.stream.StreamInput; +@@ -147,7 +146,7 @@ public final class ClusterAllocationExplanation implements ToXContentObject, Wri + builder.field("primary", shardRouting.primary()); + builder.field("current_state", shardRouting.state().toString().toLowerCase(Locale.ROOT)); + if (shardRouting.unassignedInfo() != null) { +- unassignedInfoToXContent(shardRouting.unassignedInfo(), builder); ++ shardRouting.unassignedInfo().unassignedInfoToXContent(builder, this); + } + if (currentNode != null) { + builder.startObject("current_node"); +@@ -185,22 +184,4 @@ public final class ClusterAllocationExplanation implements ToXContentObject, Wri + return builder; + } + +- private XContentBuilder unassignedInfoToXContent(UnassignedInfo unassignedInfo, XContentBuilder builder) +- throws IOException { +- +- builder.startObject("unassigned_info"); +- builder.field("reason", unassignedInfo.getReason()); +- builder.field("at", +- UnassignedInfo.DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(unassignedInfo.getUnassignedTimeInMillis()))); +- if (unassignedInfo.getNumFailedAllocations() > 0) { +- builder.field("failed_allocation_attempts", unassignedInfo.getNumFailedAllocations()); +- } +- String details = unassignedInfo.getDetails(); +- if (details != null) { +- builder.field("details", details); +- } +- builder.field("last_allocation_status", AllocationDecision.fromAllocationStatus(unassignedInfo.getLastAllocationStatus())); +- builder.endObject(); +- return builder; +- } + } +diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java +old mode 100644 +new mode 100755 +index f19a3b05b5b..75c25fb4a6a +--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java ++++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java +@@ -22,7 +22,6 @@ import org.elasticsearch.action.ActionRequestValidationException; + import org.elasticsearch.action.support.master.MasterNodeRequest; + import org.elasticsearch.cluster.ClusterState; + import org.elasticsearch.cluster.coordination.CoordinationMetaData.VotingConfigExclusion; +-import org.elasticsearch.cluster.node.DiscoveryNode; + import org.elasticsearch.cluster.node.DiscoveryNodes; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; +@@ -70,23 +69,9 @@ public class AddVotingConfigExclusionsRequest extends MasterNodeRequest resolveVotingConfigExclusions(ClusterState currentState) { +- final DiscoveryNodes allNodes = currentState.nodes(); +- final Set resolvedNodes = Arrays.stream(allNodes.resolveNodes(nodeDescriptions)) +- .map(allNodes::get).filter(DiscoveryNode::isMasterNode).map(VotingConfigExclusion::new).collect(Collectors.toSet()); +- +- if (resolvedNodes.isEmpty()) { +- throw new IllegalArgumentException("add voting config exclusions request for " + Arrays.asList(nodeDescriptions) +- + " matched no master-eligible nodes"); +- } +- +- resolvedNodes.removeIf(n -> currentState.getVotingConfigExclusions().contains(n)); +- return resolvedNodes; +- } +- + Set resolveVotingConfigExclusionsAndCheckMaximum(ClusterState currentState, int maxExclusionsCount, + String maximumSettingKey) { +- final Set resolvedExclusions = resolveVotingConfigExclusions(currentState); ++ final Set resolvedExclusions = currentState.resolveVotingConfigExclusions(this); + + final int oldExclusionsCount = currentState.getVotingConfigExclusions().size(); + final int newExclusionsCount = resolvedExclusions.size(); +diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +old mode 100644 +new mode 100755 +index 23ffbd0dd1e..026de58c42b +--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java ++++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +@@ -30,7 +30,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; + import org.elasticsearch.cluster.service.ClusterService; + import org.elasticsearch.common.inject.Inject; + import org.elasticsearch.common.regex.Regex; +-import org.elasticsearch.repositories.IndexId; + import org.elasticsearch.repositories.RepositoryData; + import org.elasticsearch.snapshots.SnapshotId; + import org.elasticsearch.snapshots.SnapshotInfo; +@@ -137,7 +136,7 @@ public class TransportGetSnapshotsAction extends TransportMasterNodeAction buildSimpleSnapshotInfos(final Set toResolve, +- final RepositoryData repositoryData, +- final List currentSnapshots) { +- List snapshotInfos = new ArrayList<>(); +- for (SnapshotInfo snapshotInfo : currentSnapshots) { +- if (toResolve.remove(snapshotInfo.snapshotId())) { +- snapshotInfos.add(snapshotInfo.basic()); +- } +- } +- Map> snapshotsToIndices = new HashMap<>(); +- for (IndexId indexId : repositoryData.getIndices().values()) { +- for (SnapshotId snapshotId : repositoryData.getSnapshots(indexId)) { +- if (toResolve.contains(snapshotId)) { +- snapshotsToIndices.computeIfAbsent(snapshotId, (k) -> new ArrayList<>()) +- .add(indexId.getName()); +- } +- } +- } +- for (Map.Entry> entry : snapshotsToIndices.entrySet()) { +- final List indices = entry.getValue(); +- CollectionUtil.timSort(indices); +- final SnapshotId snapshotId = entry.getKey(); +- snapshotInfos.add(new SnapshotInfo(snapshotId, indices, repositoryData.getSnapshotState(snapshotId))); +- } +- CollectionUtil.timSort(snapshotInfos); +- return Collections.unmodifiableList(snapshotInfos); +- } + } +diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequest.java +old mode 100644 +new mode 100755 +index 7f07aeb3644..4ca98beae3f +--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequest.java ++++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequest.java +@@ -19,14 +19,20 @@ + + package org.elasticsearch.action.admin.cluster.state; + ++import org.elasticsearch.ElasticsearchStatusException; ++import org.elasticsearch.action.ActionListener; + import org.elasticsearch.action.ActionRequestValidationException; + import org.elasticsearch.action.IndicesRequest; + import org.elasticsearch.action.support.IndicesOptions; + import org.elasticsearch.action.support.master.MasterNodeReadRequest; ++import org.elasticsearch.client.Client; + import org.elasticsearch.common.Strings; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.unit.TimeValue; ++import org.elasticsearch.license.RemoteClusterLicenseChecker; ++import org.elasticsearch.license.XPackLicenseState; ++import org.elasticsearch.xpack.ccr.CcrLicenseChecker; + + import java.io.IOException; + +@@ -191,4 +197,51 @@ public class ClusterStateRequest extends MasterNodeReadRequest onFailure, ++ final Consumer leaderClusterStateConsumer, ++ final Function nonCompliantLicense, ++ final Function unknownLicense, CcrLicenseChecker ccrLicenseChecker) { ++ // we have to check the license on the remote cluster ++ new RemoteClusterLicenseChecker(client, XPackLicenseState::isCcrAllowedForOperationMode).checkRemoteClusterLicenses( ++ Collections.singletonList(clusterAlias), ++ new ActionListener() { ++ ++ @Override ++ public void onResponse(final RemoteClusterLicenseChecker.LicenseCheck licenseCheck) { ++ if (licenseCheck.isSuccess()) { ++ final ActionListener clusterStateListener = ++ ActionListener.wrap(leaderClusterStateConsumer::accept, onFailure); ++ // following an index in remote cluster, so use remote client to fetch leader index metadata ++ remoteClient.admin().cluster().state(ClusterStateRequest.this, clusterStateListener); ++ } else { ++ onFailure.accept(nonCompliantLicense.apply(licenseCheck)); ++ } ++ } ++ ++ @Override ++ public void onFailure(final Exception e) { ++ onFailure.accept(unknownLicense.apply(e)); ++ } ++ ++ }); ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java +old mode 100644 +new mode 100755 +index 9d9f4c61726..c731159efaf +--- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java ++++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java +@@ -20,6 +20,7 @@ + package org.elasticsearch.action.admin.indices.alias; + + import org.elasticsearch.ElasticsearchGenerationException; ++import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; + import org.elasticsearch.common.Nullable; + import org.elasticsearch.common.ParseField; + import org.elasticsearch.common.Strings; +@@ -296,4 +297,13 @@ public class Alias implements Streamable, ToXContentFragment { + public int hashCode() { + return name != null ? name.hashCode() : 0; + } ++ ++ /** ++ * Adds an alias that will be associated with the index when it gets created ++ * @param createIndexRequest ++ */ ++ public CreateIndexRequest alias(CreateIndexRequest createIndexRequest) { ++ createIndexRequest.aliases().add(this); ++ return createIndexRequest; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesResponse.java +old mode 100644 +new mode 100755 +index d0ad58b6e35..2e3f446b981 +--- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesResponse.java ++++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesResponse.java +@@ -20,11 +20,15 @@ + package org.elasticsearch.action.admin.indices.alias.get; + + import com.carrotsearch.hppc.cursors.ObjectObjectCursor; ++import org.elasticsearch.action.ActionListener; + import org.elasticsearch.action.ActionResponse; ++import org.elasticsearch.action.admin.indices.get.GetIndexResponse; + import org.elasticsearch.cluster.metadata.AliasMetaData; + import org.elasticsearch.common.collect.ImmutableOpenMap; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; ++import org.elasticsearch.index.IndexSettings; ++import org.elasticsearch.xpack.sql.analysis.index.IndexResolver; + + import java.io.IOException; + import java.util.ArrayList; +@@ -93,4 +97,42 @@ public class GetAliasesResponse extends ActionResponse { + public int hashCode() { + return Objects.hash(aliases); + } ++ ++ public void filterResults(String javaRegex, GetIndexResponse indices, ++ // these are needed to filter out the different results from the same index response ++ boolean retrieveIndices, ++ boolean retrieveFrozenIndices, ++ ActionListener> listener, IndexResolver indexResolver) { ++ ++ // since the index name does not support ?, filter the results manually ++ Pattern pattern = javaRegex != null ? Pattern.compile(javaRegex) : null; ++ ++ Set result = new TreeSet<>(Comparator.comparing(IndexResolver.IndexInfo::name)); ++ // filter aliases (if present) ++ if (this != null) { ++ for (ObjectCursor> cursor : getAliases().values()) { ++ for (AliasMetaData amd : cursor.value) { ++ String alias = amd.alias(); ++ if (alias != null && (pattern == null || pattern.matcher(alias).matches())) { ++ result.add(new IndexResolver.IndexInfo(alias, IndexResolver.IndexType.ALIAS)); ++ } ++ } ++ } ++ } ++ ++ // filter indices (if present) ++ String[] indicesNames = indices != null ? indices.indices() : null; ++ if (indicesNames != null) { ++ for (String indexName : indicesNames) { ++ boolean isFrozen = retrieveFrozenIndices ++ && IndexSettings.INDEX_SEARCH_THROTTLED.get(indices.getSettings().get(indexName)) == Boolean.TRUE; ++ ++ if (pattern == null || pattern.matcher(indexName).matches()) { ++ result.add(new IndexResolver.IndexInfo(indexName, isFrozen ? IndexResolver.IndexType.FROZEN_INDEX : IndexResolver.IndexType.STANDARD_INDEX)); ++ } ++ } ++ } ++ ++ listener.onResponse(result); ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +old mode 100644 +new mode 100755 +index cc90eb1c32e..4a3cbd3b5f9 +--- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java ++++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +@@ -314,7 +314,7 @@ public class CreateIndexRequest extends AcknowledgedRequest + //move to the first alias + parser.nextToken(); + while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { +- alias(Alias.fromXContent(parser)); ++ Alias.fromXContent(parser).alias(this); + } + return this; + } catch(IOException e) { +@@ -322,14 +322,6 @@ public class CreateIndexRequest extends AcknowledgedRequest + } + } + +- /** +- * Adds an alias that will be associated with the index when it gets created +- */ +- public CreateIndexRequest alias(Alias alias) { +- this.aliases.add(alias); +- return this; +- } +- + /** + * Sets the settings and mappings as a single source. + */ +diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java +old mode 100644 +new mode 100755 +index 93b4184f958..f59a73635b9 +--- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java ++++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java +@@ -180,7 +180,7 @@ public class CreateIndexRequestBuilder + * Adds an alias that will be associated with the index when it gets created + */ + public CreateIndexRequestBuilder addAlias(Alias alias) { +- request.alias(alias); ++ alias.alias(request); + return this; + } + +diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestBuilder.java +old mode 100644 +new mode 100755 +index 85867b11ad6..b3b1119fef8 +--- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestBuilder.java ++++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestBuilder.java +@@ -70,7 +70,7 @@ public class RolloverRequestBuilder extends MasterNodeOperationRequestBuilder implements + } + + /** +- * Sets the number of shard copies that should be active for creation of the +- * new shrunken index to return. Defaults to {@link ActiveShardCount#DEFAULT}, which will +- * wait for one shard copy (the primary) to become active. Set this value to +- * {@link ActiveShardCount#ALL} to wait for all shards (primary and all replicas) to be active +- * before returning. Otherwise, use {@link ActiveShardCount#from(int)} to set this value to any +- * non-negative integer, up to the number of copies per shard (number of replicas + 1), +- * to wait for the desired amount of shard copies to become active before returning. +- * Index creation will only wait up until the timeout value for the number of shard copies +- * to be active before returning. Check {@link ResizeResponse#isShardsAcknowledged()} to +- * determine if the requisite shard copies were all started before returning or timing out. +- * +- * @param waitForActiveShards number of active shard copies to wait on +- */ +- public void setWaitForActiveShards(ActiveShardCount waitForActiveShards) { +- this.getTargetIndexRequest().waitForActiveShards(waitForActiveShards); +- } +- +- /** +- * A shortcut for {@link #setWaitForActiveShards(ActiveShardCount)} where the numerical ++ * A shortcut for {@link ActiveShardCount#setWaitForActiveShards(ResizeRequest)} where the numerical + * shard count is passed in, instead of having to first call {@link ActiveShardCount#from(int)} + * to get the ActiveShardCount. + */ + public void setWaitForActiveShards(final int waitForActiveShards) { +- setWaitForActiveShards(ActiveShardCount.from(waitForActiveShards)); ++ ActiveShardCount.from(waitForActiveShards).setWaitForActiveShards(this); + } + + /** +diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java +old mode 100644 +new mode 100755 +index e4b9a34b004..4e204bac334 +--- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java ++++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java +@@ -62,7 +62,7 @@ public class ResizeRequestBuilder extends AcknowledgedRequestBuilder withBackoff(BiConsumer> consumer, Retry retry) { ++ PlainActionFuture future = PlainActionFuture.newFuture(); ++ retry.withBackoff(consumer, this, future); ++ return future; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/action/bulk/Retry.java b/server/src/main/java/org/elasticsearch/action/bulk/Retry.java +old mode 100644 +new mode 100755 +index e2f90049759..37355d6eafd +--- a/server/src/main/java/org/elasticsearch/action/bulk/Retry.java ++++ b/server/src/main/java/org/elasticsearch/action/bulk/Retry.java +@@ -21,7 +21,6 @@ package org.elasticsearch.action.bulk; + import org.apache.logging.log4j.Logger; + import org.apache.logging.log4j.LogManager; + import org.elasticsearch.action.ActionListener; +-import org.elasticsearch.action.support.PlainActionFuture; + import org.elasticsearch.common.unit.TimeValue; + import org.elasticsearch.rest.RestStatus; + import org.elasticsearch.threadpool.Scheduler; +@@ -59,21 +58,6 @@ public class Retry { + r.execute(bulkRequest); + } + +- /** +- * Invokes #accept(BulkRequest, ActionListener). Backs off on the provided exception. Retries will be scheduled using +- * the class's thread pool. +- * +- * @param consumer The consumer to which apply the request and listener +- * @param bulkRequest The bulk request that should be executed. +- * @return a future representing the bulk response returned by the client. +- */ +- public PlainActionFuture withBackoff(BiConsumer> consumer, +- BulkRequest bulkRequest) { +- PlainActionFuture future = PlainActionFuture.newFuture(); +- withBackoff(consumer, bulkRequest, future); +- return future; +- } +- + static class RetryHandler implements ActionListener { + private static final RestStatus RETRY_STATUS = RestStatus.TOO_MANY_REQUESTS; + private static final Logger logger = LogManager.getLogger(RetryHandler.class); +diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +old mode 100644 +new mode 100755 +index afc81b21da4..588782022c9 +--- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java ++++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +@@ -93,7 +93,7 @@ final class ExpandSearchPhase extends SearchPhase { + .postFilter(searchRequest.source().postFilter()); + SearchRequest groupRequest = new SearchRequest(searchRequest); + groupRequest.source(sourceBuilder); +- multiRequest.add(groupRequest); ++ groupRequest.add(multiRequest); + } + } + context.getSearchTransport().sendExecuteMultiSearch(multiRequest, context.getTask(), +diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java +old mode 100644 +new mode 100755 +index 528be036916..24a5506cb6a +--- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java ++++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java +@@ -69,15 +69,6 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice + return this; + } + +- /** +- * Add a search request to execute. Note, the order is important, the search response will be returned in the +- * same order as the search requests. +- */ +- public MultiSearchRequest add(SearchRequest request) { +- requests.add(request); +- return this; +- } +- + /** + * Returns the amount of search requests specified in this multi search requests are allowed to be ran concurrently. + */ +diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequestBuilder.java +old mode 100644 +new mode 100755 +index ce43f47a497..6323008cf8e +--- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequestBuilder.java ++++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequestBuilder.java +@@ -46,7 +46,7 @@ public class MultiSearchRequestBuilder extends ActionRequestBuilder i + + private FilterSettings filterSettings; + ++ public MultiTermVectorsRequest add(MultiTermVectorsRequest multiTermVectorsRequest) { ++ multiTermVectorsRequest.getRequests().add(this); ++ return multiTermVectorsRequest; ++ } ++ + public static final class FilterSettings { + public Integer maxNumTerms; + public Integer minTermFreq; +diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +old mode 100644 +new mode 100755 +index aa9121a040e..e999c0cb148 +--- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java ++++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +@@ -314,7 +314,7 @@ public class UpdateRequest extends InstanceShardOperationRequest + */ + @Deprecated + public UpdateRequest script(String script, ScriptType scriptType) { +- updateOrCreateScript(script, scriptType, null, null); ++ scriptType.updateOrCreateScript(script, null, null, this); + return this; + } + +@@ -327,7 +327,7 @@ public class UpdateRequest extends InstanceShardOperationRequest + */ + @Deprecated + public UpdateRequest script(String script) { +- updateOrCreateScript(script, ScriptType.INLINE, null, null); ++ ScriptType.INLINE.updateOrCreateScript(script, null, null, this); + return this; + } + +@@ -338,7 +338,7 @@ public class UpdateRequest extends InstanceShardOperationRequest + */ + @Deprecated + public UpdateRequest scriptLang(String scriptLang) { +- updateOrCreateScript(null, null, scriptLang, null); ++ null.updateOrCreateScript(null, scriptLang, null, this); + return this; + } + +@@ -361,13 +361,13 @@ public class UpdateRequest extends InstanceShardOperationRequest + if (script == null) { + HashMap scriptParams = new HashMap<>(); + scriptParams.put(name, value); +- updateOrCreateScript(null, null, null, scriptParams); ++ null.updateOrCreateScript(null, null, scriptParams, this); + } else { + Map scriptParams = script.getParams(); + if (scriptParams == null) { + scriptParams = new HashMap<>(); + scriptParams.put(name, value); +- updateOrCreateScript(null, null, null, scriptParams); ++ null.updateOrCreateScript(null, null, scriptParams, this); + } else { + scriptParams.put(name, value); + } +@@ -382,24 +382,10 @@ public class UpdateRequest extends InstanceShardOperationRequest + */ + @Deprecated + public UpdateRequest scriptParams(Map scriptParams) { +- updateOrCreateScript(null, null, null, scriptParams); ++ null.updateOrCreateScript(null, null, scriptParams, this); + return this; + } + +- private void updateOrCreateScript(String scriptContent, ScriptType type, String lang, Map params) { +- Script script = script(); +- if (script == null) { +- script = new Script(type == null ? ScriptType.INLINE : type, lang, scriptContent == null ? "" : scriptContent, params); +- } else { +- String newScriptContent = scriptContent == null ? script.getIdOrCode() : scriptContent; +- ScriptType newScriptType = type == null ? script.getType() : type; +- String newScriptLang = lang == null ? script.getLang() : lang; +- Map newScriptParams = params == null ? script.getParams() : params; +- script = new Script(newScriptType, newScriptLang, newScriptContent, newScriptParams); +- } +- script(script); +- } +- + /** + * The script to execute. Note, make sure not to send different script each + * times and instead use script params if possible with the same +diff --git a/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java b/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java +old mode 100644 +new mode 100755 +index 27de6619053..102f33e2dc7 +--- a/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java ++++ b/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java +@@ -23,9 +23,20 @@ import org.elasticsearch.action.Action; + import org.elasticsearch.action.ActionListener; + import org.elasticsearch.action.ActionRequest; + import org.elasticsearch.action.ActionResponse; ++import org.elasticsearch.action.support.IndicesOptions; + import org.elasticsearch.cluster.node.DiscoveryNode; ++import org.elasticsearch.index.IndexNotFoundException; ++import org.elasticsearch.index.query.IdsQueryBuilder; ++import org.elasticsearch.index.reindex.DeleteByQueryAction; ++import org.elasticsearch.index.reindex.DeleteByQueryRequest; + import org.elasticsearch.tasks.Task; + import org.elasticsearch.tasks.TaskId; ++import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; ++import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; ++import org.elasticsearch.xpack.ml.action.TransportDeleteJobAction; ++import org.elasticsearch.xpack.ml.utils.MlIndicesUtils; ++ ++import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; + + /** + * A {@linkplain Client} that sets the parent task on all requests that it makes. Use this to conveniently implement actions that cause +@@ -62,4 +73,26 @@ public class ParentTaskAssigningClient extends FilterClient { + request.setParentTask(parentTask); + super.doExecute(action, request, listener); + } ++ ++ public void deleteQuantiles(String jobId, ActionListener finishedHandler, TransportDeleteJobAction transportDeleteJobAction) { ++ // The quantiles type and doc ID changed in v5.5 so delete both the old and new format ++ DeleteByQueryRequest request = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexPattern()); ++ // Just use ID here, not type, as trying to delete different types spams the logs with an exception stack trace ++ IdsQueryBuilder query = new IdsQueryBuilder().addIds(Quantiles.documentId(jobId)); ++ request.setQuery(query); ++ MlIndicesUtils.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen()).setIndicesOptions(request); ++ request.setAbortOnVersionConflict(false); ++ request.setRefresh(true); ++ ++ executeAsyncWithOrigin(this, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, ActionListener.wrap( ++ response -> finishedHandler.onResponse(true), ++ e -> { ++ // It's not a problem for us if the index wasn't found - it's equivalent to document not found ++ if (e instanceof IndexNotFoundException) { ++ finishedHandler.onResponse(true); ++ } else { ++ finishedHandler.onFailure(e); ++ } ++ })); ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/client/node/NodeClient.java b/server/src/main/java/org/elasticsearch/client/node/NodeClient.java +old mode 100644 +new mode 100755 +index 0ad863c9367..f1bb50f103c +--- a/server/src/main/java/org/elasticsearch/client/node/NodeClient.java ++++ b/server/src/main/java/org/elasticsearch/client/node/NodeClient.java +@@ -27,11 +27,17 @@ import org.elasticsearch.action.support.TransportAction; + import org.elasticsearch.client.Client; + import org.elasticsearch.client.support.AbstractClient; + import org.elasticsearch.cluster.node.DiscoveryNode; ++import org.elasticsearch.common.Strings; + import org.elasticsearch.common.settings.Settings; ++import org.elasticsearch.rest.BaseRestHandler; ++import org.elasticsearch.rest.RestRequest; ++import org.elasticsearch.rest.action.RestToXContentListener; + import org.elasticsearch.tasks.Task; + import org.elasticsearch.tasks.TaskListener; + import org.elasticsearch.threadpool.ThreadPool; + import org.elasticsearch.transport.RemoteClusterService; ++import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction; ++import org.elasticsearch.xpack.deprecation.RestDeprecationInfoAction; + + import java.util.Map; + import java.util.function.Supplier; +@@ -122,4 +128,9 @@ public class NodeClient extends AbstractClient { + public Client getRemoteClusterClient(String clusterAlias) { + return remoteClusterService.getRemoteClusterClient(threadPool(), clusterAlias); + } ++ ++ public BaseRestHandler.RestChannelConsumer handleGet(final RestRequest request, RestDeprecationInfoAction restDeprecationInfoAction) { ++ DeprecationInfoAction.Request infoRequest = new DeprecationInfoAction.Request(Strings.splitStringByCommaToArray(request.param("index"))); ++ return channel -> execute(DeprecationInfoAction.INSTANCE, infoRequest, new RestToXContentListener<>(channel)); ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +old mode 100644 +new mode 100755 +index 6a5e2a32496..554ff10b7be +--- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java ++++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +@@ -23,6 +23,7 @@ import com.carrotsearch.hppc.cursors.IntObjectCursor; + import com.carrotsearch.hppc.cursors.ObjectCursor; + import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.elasticsearch.Version; ++import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; + import org.elasticsearch.client.transport.TransportClient; + import org.elasticsearch.cluster.block.ClusterBlock; + import org.elasticsearch.cluster.block.ClusterBlocks; +@@ -96,6 +97,20 @@ public class ClusterState implements ToXContentFragment, Diffable + + public static final ClusterState EMPTY_STATE = builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).build(); + ++ public Set resolveVotingConfigExclusions(AddVotingConfigExclusionsRequest addVotingConfigExclusionsRequest) { ++ final DiscoveryNodes allNodes = nodes(); ++ final Set resolvedNodes = Arrays.stream(allNodes.resolveNodes(addVotingConfigExclusionsRequest.getNodeDescriptions())) ++ .map(allNodes::get).filter(DiscoveryNode::isMasterNode).map(VotingConfigExclusion::new).collect(Collectors.toSet()); ++ ++ if (resolvedNodes.isEmpty()) { ++ throw new IllegalArgumentException("add voting config exclusions request for " + Arrays.asList(addVotingConfigExclusionsRequest.getNodeDescriptions()) ++ + " matched no master-eligible nodes"); ++ } ++ ++ resolvedNodes.removeIf(n -> getVotingConfigExclusions().contains(n)); ++ return resolvedNodes; ++ } ++ + /** + * An interface that implementors use when a class requires a client to maybe have a feature. + */ +diff --git a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +old mode 100644 +new mode 100755 +index ae9506706e3..7c48af60bd9 +--- a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java ++++ b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +@@ -374,16 +374,6 @@ public class SnapshotsInProgress extends AbstractNamedDiffable implement + return this.entries; + } + +- public Entry snapshot(final Snapshot snapshot) { +- for (Entry entry : entries) { +- final Snapshot curr = entry.snapshot(); +- if (curr.equals(snapshot)) { +- return entry; +- } +- } +- return null; +- } +- + @Override + public String getWriteableName() { + return TYPE; +diff --git a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java +old mode 100644 +new mode 100755 +index be65a2b4130..05e5fb823da +--- a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java ++++ b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java +@@ -28,7 +28,6 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.util.set.Sets; +-import org.elasticsearch.rest.RestStatus; + + import java.io.IOException; + import java.util.EnumMap; +@@ -130,18 +129,6 @@ public class ClusterBlocks extends AbstractDiffable { + return global(level).size() > 0; + } + +- /** +- * Is there a global block with the provided status? +- */ +- public boolean hasGlobalBlockWithStatus(final RestStatus status) { +- for (ClusterBlock clusterBlock : global) { +- if (clusterBlock.status().equals(status)) { +- return true; +- } +- } +- return false; +- } +- + public boolean hasIndexBlock(String index, ClusterBlock block) { + return indicesBlocks.containsKey(index) && indicesBlocks.get(index).contains(block); + } +diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java +old mode 100644 +new mode 100755 +index 7434a246eed..2cc6d552eaa +--- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java ++++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java +@@ -171,7 +171,7 @@ public class JoinTaskExecutor implements ClusterStateTaskExecutor, W + } + } + +- if (clusterState.blocks().hasGlobalBlockWithStatus(RestStatus.SERVICE_UNAVAILABLE)) { ++ if (RestStatus.SERVICE_UNAVAILABLE.hasGlobalBlockWithStatus(clusterState.blocks())) { + computeStatus = ClusterHealthStatus.RED; + } + +diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +old mode 100644 +new mode 100755 +index 412e16e14a3..2978372cb83 +--- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java ++++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +@@ -58,6 +58,7 @@ import org.elasticsearch.index.mapper.MapperService; + import org.elasticsearch.index.seqno.SequenceNumbers; + import org.elasticsearch.index.shard.ShardId; + import org.elasticsearch.rest.RestStatus; ++import org.elasticsearch.tasks.TaskResultsService; + + import java.io.IOException; + import java.time.Instant; +@@ -97,6 +98,18 @@ public class IndexMetaData implements Diffable, ToXContentFragmen + new ClusterBlock(12, "index read-only / allow delete (api)", false, false, + true, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.METADATA_WRITE, ClusterBlockLevel.WRITE)); + ++ public int getTaskResultMappingVersion(TaskResultsService taskResultsService) { ++ MappingMetaData mappingMetaData = getMappings().get(TaskResultsService.TASK_TYPE); ++ if (mappingMetaData == null) { ++ return 0; ++ } ++ @SuppressWarnings("unchecked") Map meta = (Map) mappingMetaData.sourceAsMap().get("_meta"); ++ if (meta == null || meta.containsKey(TaskResultsService.TASK_RESULT_MAPPING_VERSION_META_FIELD) == false) { ++ return 1; // The mapping was created before meta field was introduced ++ } ++ return (int) meta.get(TaskResultsService.TASK_RESULT_MAPPING_VERSION_META_FIELD); ++ } ++ + public enum State { + OPEN((byte) 0), + CLOSE((byte) 1); +@@ -1171,7 +1184,7 @@ public class IndexMetaData implements Diffable, ToXContentFragmen + initialRecoveryFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, initialRecoveryMap); + } + Version indexCreatedVersion = Version.indexCreated(settings); +- Version indexUpgradedVersion = settings.getAsVersion(IndexMetaData.SETTING_VERSION_UPGRADED, indexCreatedVersion); ++ Version indexUpgradedVersion = indexCreatedVersion.getAsVersion(IndexMetaData.SETTING_VERSION_UPGRADED, settings); + + if (primaryTerms == null) { + initializePrimaryTerms(); +@@ -1431,7 +1444,7 @@ public class IndexMetaData implements Diffable, ToXContentFragmen + if (version != Version.V_EMPTY) { + builder.put(SETTING_VERSION_CREATED_STRING, version.toString()); + } +- Version versionUpgraded = settings.getAsVersion(SETTING_VERSION_UPGRADED, null); ++ Version versionUpgraded = null.getAsVersion(SETTING_VERSION_UPGRADED, settings); + if (versionUpgraded != null) { + builder.put(SETTING_VERSION_UPGRADED_STRING, versionUpgraded.toString()); + } +diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +old mode 100644 +new mode 100755 +index a9f751453f6..a82fcdcc258 +--- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java ++++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +@@ -36,6 +36,10 @@ import org.elasticsearch.cluster.NamedDiffableValueSerializer; + import org.elasticsearch.cluster.block.ClusterBlock; + import org.elasticsearch.cluster.block.ClusterBlockLevel; + import org.elasticsearch.cluster.coordination.CoordinationMetaData; ++import org.elasticsearch.cluster.routing.IndexRoutingTable; ++import org.elasticsearch.cluster.routing.IndexShardRoutingTable; ++import org.elasticsearch.cluster.routing.RecoverySource; ++import org.elasticsearch.cluster.routing.ShardRouting; + import org.elasticsearch.common.Nullable; + import org.elasticsearch.common.Strings; + import org.elasticsearch.common.UUIDs; +@@ -90,6 +94,67 @@ public class MetaData implements Iterable, Diffable, To + public static final String ALL = "_all"; + public static final String UNKNOWN_CLUSTER_UUID = "_na_"; + ++ public boolean validate(IndexRoutingTable indexRoutingTable) { ++ // check index exists ++ if (!hasIndex(indexRoutingTable.getIndex().getName())) { ++ throw new IllegalStateException(indexRoutingTable.getIndex() + " exists in routing does not exists in metadata"); ++ } ++ IndexMetaData indexMetaData = index(indexRoutingTable.getIndex().getName()); ++ if (indexMetaData.getIndexUUID().equals(indexRoutingTable.getIndex().getUUID()) == false) { ++ throw new IllegalStateException(indexRoutingTable.getIndex().getName() + " exists in routing does not exists in metadata with the same uuid"); ++ } ++ ++ // check the number of shards ++ if (indexMetaData.getNumberOfShards() != indexRoutingTable.shards().size()) { ++ Set expected = new HashSet<>(); ++ for (int i = 0; i < indexMetaData.getNumberOfShards(); i++) { ++ expected.add(i); ++ } ++ for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { ++ expected.remove(indexShardRoutingTable.shardId().id()); ++ } ++ throw new IllegalStateException("Wrong number of shards in routing table, missing: " + expected); ++ } ++ ++ // check the replicas ++ for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { ++ int routingNumberOfReplicas = indexShardRoutingTable.size() - 1; ++ if (routingNumberOfReplicas != indexMetaData.getNumberOfReplicas()) { ++ throw new IllegalStateException("Shard [" + indexShardRoutingTable.shardId().id() + ++ "] routing table has wrong number of replicas, expected [" + indexMetaData.getNumberOfReplicas() + ++ "], got [" + routingNumberOfReplicas + "]"); ++ } ++ for (ShardRouting shardRouting : indexShardRoutingTable) { ++ if (!shardRouting.index().equals(indexRoutingTable.getIndex())) { ++ throw new IllegalStateException("shard routing has an index [" + shardRouting.index() + "] that is different " + ++ "from the routing table"); ++ } ++ final Set inSyncAllocationIds = indexMetaData.inSyncAllocationIds(shardRouting.id()); ++ if (shardRouting.active() && ++ inSyncAllocationIds.contains(shardRouting.allocationId().getId()) == false) { ++ throw new IllegalStateException("active shard routing " + shardRouting + " has no corresponding entry in the in-sync " + ++ "allocation set " + inSyncAllocationIds); ++ } ++ ++ if (shardRouting.primary() && shardRouting.initializing() && ++ shardRouting.recoverySource().getType() == RecoverySource.Type.EXISTING_STORE) { ++ if (inSyncAllocationIds.contains(RecoverySource.ExistingStoreRecoverySource.FORCED_ALLOCATION_ID)) { ++ if (inSyncAllocationIds.size() != 1) { ++ throw new IllegalStateException("a primary shard routing " + shardRouting ++ + " is a primary that is recovering from a stale primary has unexpected allocation ids in in-sync " + ++ "allocation set " + inSyncAllocationIds); ++ } ++ } else if (inSyncAllocationIds.contains(shardRouting.allocationId().getId()) == false) { ++ throw new IllegalStateException("a primary shard routing " + shardRouting ++ + " is a primary that is recovering from a known allocation id but has no corresponding entry in the in-sync " + ++ "allocation set " + inSyncAllocationIds); ++ } ++ } ++ } ++ } ++ return true; ++ } ++ + public enum XContentContext { + /* Custom metadata should be returns as part of API call */ + API, +diff --git a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +old mode 100644 +new mode 100755 +index afab4af3204..8a65053614d +--- a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java ++++ b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +@@ -20,15 +20,19 @@ + package org.elasticsearch.cluster.node; + + import org.elasticsearch.Version; ++import org.elasticsearch.action.ActionResponse; + import org.elasticsearch.common.UUIDs; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.io.stream.Writeable; + import org.elasticsearch.common.settings.Settings; + import org.elasticsearch.common.transport.TransportAddress; ++import org.elasticsearch.common.xcontent.ToXContent; + import org.elasticsearch.common.xcontent.ToXContentFragment; + import org.elasticsearch.common.xcontent.XContentBuilder; + import org.elasticsearch.node.Node; ++import org.elasticsearch.tasks.Task; ++import org.elasticsearch.tasks.TaskResult; + + import java.io.IOException; + import java.util.Collections; +@@ -391,6 +395,14 @@ public class DiscoveryNode implements Writeable, ToXContentFragment { + return builder; + } + ++ public TaskResult result(ActionResponse response, Task task) throws IOException { ++ if (response instanceof ToXContent) { ++ return new TaskResult(task.taskInfo(getId(), true), (ToXContent) response); ++ } else { ++ throw new IllegalStateException("response has to implement ToXContent to be able to store the results"); ++ } ++ } ++ + /** + * Enum that holds all the possible roles that that a node can fulfill in a cluster. + * Each role has its name and a corresponding abbreviation used by cat apis. +diff --git a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +old mode 100644 +new mode 100755 +index 02f4d5d93bf..088e8d82362 +--- a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java ++++ b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +@@ -31,7 +31,6 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.regex.Regex; +-import org.elasticsearch.common.transport.TransportAddress; + + import java.io.IOException; + import java.util.ArrayList; +@@ -239,22 +238,6 @@ public class DiscoveryNodes extends AbstractDiffable implements + return null; + } + +- /** +- * Get a node by its address +- * +- * @param address {@link TransportAddress} of the wanted node +- * @return node identified by the given address or null if no such node exists +- */ +- public DiscoveryNode findByAddress(TransportAddress address) { +- for (ObjectCursor cursor : nodes.values()) { +- DiscoveryNode node = cursor.value; +- if (node.getAddress().equals(address)) { +- return node; +- } +- } +- return null; +- } +- + /** + * Returns the version of the node with the oldest version in the cluster that is not a client node + * +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java +old mode 100644 +new mode 100755 +index 195ae2cce25..db61601c4c1 +--- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java +@@ -26,7 +26,6 @@ import org.apache.lucene.util.CollectionUtil; + import org.elasticsearch.cluster.AbstractDiffable; + import org.elasticsearch.cluster.Diff; + import org.elasticsearch.cluster.metadata.IndexMetaData; +-import org.elasticsearch.cluster.metadata.MetaData; + import org.elasticsearch.cluster.routing.RecoverySource.EmptyStoreRecoverySource; + import org.elasticsearch.cluster.routing.RecoverySource.ExistingStoreRecoverySource; + import org.elasticsearch.cluster.routing.RecoverySource.LocalShardsRecoverySource; +@@ -97,67 +96,6 @@ public class IndexRoutingTable extends AbstractDiffable imple + return index; + } + +- boolean validate(MetaData metaData) { +- // check index exists +- if (!metaData.hasIndex(index.getName())) { +- throw new IllegalStateException(index + " exists in routing does not exists in metadata"); +- } +- IndexMetaData indexMetaData = metaData.index(index.getName()); +- if (indexMetaData.getIndexUUID().equals(index.getUUID()) == false) { +- throw new IllegalStateException(index.getName() + " exists in routing does not exists in metadata with the same uuid"); +- } +- +- // check the number of shards +- if (indexMetaData.getNumberOfShards() != shards().size()) { +- Set expected = new HashSet<>(); +- for (int i = 0; i < indexMetaData.getNumberOfShards(); i++) { +- expected.add(i); +- } +- for (IndexShardRoutingTable indexShardRoutingTable : this) { +- expected.remove(indexShardRoutingTable.shardId().id()); +- } +- throw new IllegalStateException("Wrong number of shards in routing table, missing: " + expected); +- } +- +- // check the replicas +- for (IndexShardRoutingTable indexShardRoutingTable : this) { +- int routingNumberOfReplicas = indexShardRoutingTable.size() - 1; +- if (routingNumberOfReplicas != indexMetaData.getNumberOfReplicas()) { +- throw new IllegalStateException("Shard [" + indexShardRoutingTable.shardId().id() + +- "] routing table has wrong number of replicas, expected [" + indexMetaData.getNumberOfReplicas() + +- "], got [" + routingNumberOfReplicas + "]"); +- } +- for (ShardRouting shardRouting : indexShardRoutingTable) { +- if (!shardRouting.index().equals(index)) { +- throw new IllegalStateException("shard routing has an index [" + shardRouting.index() + "] that is different " + +- "from the routing table"); +- } +- final Set inSyncAllocationIds = indexMetaData.inSyncAllocationIds(shardRouting.id()); +- if (shardRouting.active() && +- inSyncAllocationIds.contains(shardRouting.allocationId().getId()) == false) { +- throw new IllegalStateException("active shard routing " + shardRouting + " has no corresponding entry in the in-sync " + +- "allocation set " + inSyncAllocationIds); +- } +- +- if (shardRouting.primary() && shardRouting.initializing() && +- shardRouting.recoverySource().getType() == RecoverySource.Type.EXISTING_STORE) { +- if (inSyncAllocationIds.contains(RecoverySource.ExistingStoreRecoverySource.FORCED_ALLOCATION_ID)) { +- if (inSyncAllocationIds.size() != 1) { +- throw new IllegalStateException("a primary shard routing " + shardRouting +- + " is a primary that is recovering from a stale primary has unexpected allocation ids in in-sync " + +- "allocation set " + inSyncAllocationIds); +- } +- } else if (inSyncAllocationIds.contains(shardRouting.allocationId().getId()) == false) { +- throw new IllegalStateException("a primary shard routing " + shardRouting +- + " is a primary that is recovering from a known allocation id but has no corresponding entry in the in-sync " + +- "allocation set " + inSyncAllocationIds); +- } +- } +- } +- } +- return true; +- } +- + @Override + public Iterator iterator() { + return shards.valuesIt(); +@@ -260,7 +198,7 @@ public class IndexRoutingTable extends AbstractDiffable imple + public List shardsWithState(ShardRoutingState state) { + List shards = new ArrayList<>(); + for (IndexShardRoutingTable shardRoutingTable : this) { +- shards.addAll(shardRoutingTable.shardsWithState(state)); ++ shards.addAll(state.shardsWithState(shardRoutingTable)); + } + return shards; + } +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +old mode 100644 +new mode 100755 +index ca3661f3e6f..d7a852ec688 +--- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +@@ -650,19 +650,6 @@ public class IndexShardRoutingTable implements Iterable { + return shards; + } + +- public List shardsWithState(ShardRoutingState state) { +- if (state == ShardRoutingState.INITIALIZING) { +- return allInitializingShards; +- } +- List shards = new ArrayList<>(); +- for (ShardRouting shardEntry : this) { +- if (shardEntry.state() == state) { +- shards.add(shardEntry); +- } +- } +- return shards; +- } +- + public static class Builder { + + private ShardId shardId; +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java +old mode 100644 +new mode 100755 +index 995be24dd3f..7fbadad0afb +--- a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java +@@ -19,8 +19,11 @@ + + package org.elasticsearch.cluster.routing; + ++import org.elasticsearch.cluster.DiskUsage; + import org.elasticsearch.cluster.node.DiscoveryNode; ++import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; + import org.elasticsearch.common.Nullable; ++import org.elasticsearch.common.collect.ImmutableOpenMap; + import org.elasticsearch.index.shard.ShardId; + + import java.util.ArrayList; +@@ -223,4 +226,24 @@ public class RoutingNode implements Iterable { + public boolean isEmpty() { + return shards.isEmpty(); + } ++ ++ /** ++ * Returns a {@link DiskUsage} for the {@link RoutingNode} using the ++ * average usage of other nodes in the disk usage map. ++ * @param usages Map of nodeId to DiskUsage for all known nodes ++ * @param diskThresholdDecider ++ * @return DiskUsage representing given node using the average disk usage ++ */ ++ public DiskUsage averageUsage(ImmutableOpenMap usages, DiskThresholdDecider diskThresholdDecider) { ++ if (usages.size() == 0) { ++ return new DiskUsage(nodeId(), node().getName(), "_na_", 0, 0); ++ } ++ long totalBytes = 0; ++ long freeBytes = 0; ++ for (ObjectCursor du : usages.values()) { ++ totalBytes += du.value.getTotalBytes(); ++ freeBytes += du.value.getFreeBytes(); ++ } ++ return new DiskUsage(nodeId(), node().getName(), "_na_", totalBytes / usages.size(), freeBytes / usages.size()); ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +old mode 100644 +new mode 100755 +index 4750476805d..1890695d849 +--- a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +@@ -29,6 +29,8 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; + import org.elasticsearch.cluster.metadata.MetaData; + import org.elasticsearch.cluster.node.DiscoveryNode; + import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus; ++import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; ++import org.elasticsearch.cluster.routing.allocation.command.AbstractAllocateAllocationCommand; + import org.elasticsearch.common.Nullable; + import org.elasticsearch.common.Randomness; + import org.elasticsearch.common.collect.Tuple; +@@ -797,6 +799,34 @@ public class RoutingNodes implements Iterable { + return nodesToShards.size(); + } + ++ /** ++ * Initializes an unassigned shard on a node and removes it from the unassigned ++ * @param allocation the allocation ++ * @param routingNode the node to initialize it to ++ * @param shardRouting the shard routing that is to be matched in unassigned shards ++ * @param unassignedInfo unassigned info to override ++ * @param recoverySource recovery source to override ++ * @param abstractAllocateAllocationCommand ++ */ ++ public void initializeUnassignedShard(RoutingAllocation allocation, RoutingNode routingNode, ++ ShardRouting shardRouting, @Nullable UnassignedInfo unassignedInfo, ++ @Nullable RecoverySource recoverySource, AbstractAllocateAllocationCommand abstractAllocateAllocationCommand) { ++ for (UnassignedShards.UnassignedIterator it = unassigned().iterator(); it.hasNext(); ) { ++ ShardRouting unassigned = it.next(); ++ if (!unassigned.equalsIgnoringMetaData(shardRouting)) { ++ continue; ++ } ++ if (unassignedInfo != null || recoverySource != null) { ++ unassigned = it.updateUnassigned(unassignedInfo != null ? unassignedInfo : unassigned.unassignedInfo(), ++ recoverySource != null ? recoverySource : unassigned.recoverySource(), allocation.changes()); ++ } ++ it.initialize(routingNode.nodeId(), null, ++ allocation.clusterInfo().getShardSize(unassigned, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE), allocation.changes()); ++ return; ++ } ++ assert false : "shard to initialize not found in list of unassigned shards"; ++ } ++ + public static final class UnassignedShards implements Iterable { + + private final RoutingNodes nodes; +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +old mode 100644 +new mode 100755 +index 3a495775639..271bbe6e3ea +--- a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +@@ -103,11 +103,6 @@ public class RoutingTable implements Iterable, Diffable, Diffable assignedShards, SameShardAllocationDecider sameShardAllocationDecider) { ++ for (ShardRouting assignedShard : assignedShards) { ++ if (node.nodeId().equals(assignedShard.currentNodeId())) { ++ if (assignedShard.isSameAllocation(this)) { ++ return allocation.decision(Decision.NO, SameShardAllocationDecider.NAME, ++ "the shard cannot be allocated to the node on which it already exists [%s]", ++ toString()); ++ } else { ++ return allocation.decision(Decision.NO, SameShardAllocationDecider.NAME, ++ "the shard cannot be allocated to the same node on which a copy of the shard already exists [%s]", ++ assignedShard.toString()); ++ } ++ } ++ } ++ return allocation.decision(Decision.YES, SameShardAllocationDecider.NAME, "the shard does not exist on the same node"); ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRoutingState.java b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRoutingState.java +old mode 100644 +new mode 100755 +index b36e1fcc88a..5abf964f556 +--- a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRoutingState.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRoutingState.java +@@ -71,4 +71,17 @@ public enum ShardRoutingState { + throw new IllegalStateException("No routing state mapped for [" + value + "]"); + } + } ++ ++ public List shardsWithState(IndexShardRoutingTable indexShardRoutingTable) { ++ if (this == INITIALIZING) { ++ return indexShardRoutingTable.getAllInitializingShards(); ++ } ++ List shards = new ArrayList<>(); ++ for (ShardRouting shardEntry : indexShardRoutingTable) { ++ if (shardEntry.state() == this) { ++ shards.add(shardEntry); ++ } ++ } ++ return shards; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java b/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java +old mode 100644 +new mode 100755 +index f83d2391b19..4106781acde +--- a/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java +@@ -20,8 +20,10 @@ + package org.elasticsearch.cluster.routing; + + import org.elasticsearch.ExceptionsHelper; ++import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplanation; + import org.elasticsearch.cluster.ClusterState; + import org.elasticsearch.cluster.metadata.MetaData; ++import org.elasticsearch.cluster.routing.allocation.AllocationDecision; + import org.elasticsearch.cluster.routing.allocation.decider.Decision; + import org.elasticsearch.common.Nullable; + import org.elasticsearch.common.io.stream.StreamInput; +@@ -51,6 +53,26 @@ public final class UnassignedInfo implements ToXContentFragment, Writeable { + public static final Setting INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = + Setting.positiveTimeSetting("index.unassigned.node_left.delayed_timeout", TimeValue.timeValueMinutes(1), Property.Dynamic, + Property.IndexScope); ++ ++ public XContentBuilder unassignedInfoToXContent(XContentBuilder builder, ClusterAllocationExplanation clusterAllocationExplanation) ++ throws IOException { ++ ++ builder.startObject("unassigned_info"); ++ builder.field("reason", getReason()); ++ builder.field("at", ++ DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(getUnassignedTimeInMillis()))); ++ if (getNumFailedAllocations() > 0) { ++ builder.field("failed_allocation_attempts", getNumFailedAllocations()); ++ } ++ String details = getDetails(); ++ if (details != null) { ++ builder.field("details", details); ++ } ++ builder.field("last_allocation_status", AllocationDecision.fromAllocationStatus(getLastAllocationStatus())); ++ builder.endObject(); ++ return builder; ++ } ++ + /** + * Reason why the shard is in unassigned state. + *

+diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +old mode 100644 +new mode 100755 +index c688a120a8b..7982ad9e485 +--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +@@ -337,7 +337,7 @@ public class AllocationService { + allocation.debugDecision(true); + // we ignore disable allocation, because commands are explicit + allocation.ignoreDisable(true); +- RoutingExplanations explanations = commands.execute(allocation, explain); ++ RoutingExplanations explanations = allocation.execute(explain, commands); + // we revert the ignore disable flag, since when rerouting, we want the original setting to take place + allocation.ignoreDisable(false); + // the assumption is that commands will move / act on shards (or fail through exceptions) +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RerouteExplanation.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RerouteExplanation.java +old mode 100644 +new mode 100755 +index 761096907d7..7873c0c3152 +--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RerouteExplanation.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RerouteExplanation.java +@@ -73,4 +73,9 @@ public class RerouteExplanation implements ToXContentObject { + builder.endObject(); + return builder; + } ++ ++ public RoutingExplanations add(RoutingExplanations routingExplanations) { ++ routingExplanations.explanations().add(this); ++ return routingExplanations; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java +old mode 100644 +new mode 100755 +index d49869d030e..2b5cb282f0d +--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java +@@ -28,6 +28,8 @@ import org.elasticsearch.cluster.routing.RoutingChangesObserver; + import org.elasticsearch.cluster.routing.RoutingNodes; + import org.elasticsearch.cluster.routing.RoutingTable; + import org.elasticsearch.cluster.routing.ShardRouting; ++import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand; ++import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; + import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; + import org.elasticsearch.cluster.routing.allocation.decider.Decision; + import org.elasticsearch.common.collect.ImmutableOpenMap; +@@ -284,6 +286,21 @@ public class RoutingAllocation { + this.hasPendingAsyncFetch = true; + } + ++ /** ++ * Executes all wrapped commands on a given {@link RoutingAllocation} ++ * ++ * @param explain ++ * @param allocationCommands ++ * @throws org.elasticsearch.ElasticsearchException if something happens during execution ++ */ ++ public RoutingExplanations execute(boolean explain, AllocationCommands allocationCommands) { ++ RoutingExplanations explanations = new RoutingExplanations(); ++ for (AllocationCommand command : allocationCommands.commands()) { ++ command.execute(this, explain).add(explanations); ++ } ++ return explanations; ++ } ++ + public enum DebugMode { + /** + * debug mode is off +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingExplanations.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingExplanations.java +old mode 100644 +new mode 100755 +index fe97b524298..6f7067f5b1d +--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingExplanations.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingExplanations.java +@@ -22,7 +22,6 @@ package org.elasticsearch.cluster.routing.allocation; + import org.elasticsearch.cluster.routing.allocation.decider.Decision; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; +-import org.elasticsearch.common.xcontent.ToXContent.Params; + import org.elasticsearch.common.xcontent.ToXContentFragment; + import org.elasticsearch.common.xcontent.XContentBuilder; + +@@ -43,11 +42,6 @@ public class RoutingExplanations implements ToXContentFragment { + this.explanations = new ArrayList<>(); + } + +- public RoutingExplanations add(RerouteExplanation explanation) { +- this.explanations.add(explanation); +- return this; +- } +- + public List explanations() { + return this.explanations; + } +@@ -72,7 +66,7 @@ public class RoutingExplanations implements ToXContentFragment { + RoutingExplanations exp = new RoutingExplanations(); + for (int i = 0; i < exCount; i++) { + RerouteExplanation explanation = RerouteExplanation.readFrom(in); +- exp.add(explanation); ++ explanation.add(exp); + } + return exp; + } +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java +old mode 100644 +new mode 100755 +index 0e6ba4f051d..db4837616db +--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java +@@ -20,15 +20,12 @@ + package org.elasticsearch.cluster.routing.allocation.command; + + import org.elasticsearch.cluster.node.DiscoveryNode; +-import org.elasticsearch.cluster.routing.RecoverySource; + import org.elasticsearch.cluster.routing.RoutingNode; + import org.elasticsearch.cluster.routing.RoutingNodes; + import org.elasticsearch.cluster.routing.ShardRouting; +-import org.elasticsearch.cluster.routing.UnassignedInfo; + import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; + import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; + import org.elasticsearch.cluster.routing.allocation.decider.Decision; +-import org.elasticsearch.common.Nullable; + import org.elasticsearch.common.ParseField; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; +@@ -188,36 +185,7 @@ public abstract class AbstractAllocateAllocationCommand implements AllocationCom + */ + protected void initializeUnassignedShard(RoutingAllocation allocation, RoutingNodes routingNodes, + RoutingNode routingNode, ShardRouting shardRouting) { +- initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting, null, null); +- } +- +- /** +- * Initializes an unassigned shard on a node and removes it from the unassigned +- * +- * @param allocation the allocation +- * @param routingNodes the routing nodes +- * @param routingNode the node to initialize it to +- * @param shardRouting the shard routing that is to be matched in unassigned shards +- * @param unassignedInfo unassigned info to override +- * @param recoverySource recovery source to override +- */ +- protected void initializeUnassignedShard(RoutingAllocation allocation, RoutingNodes routingNodes, RoutingNode routingNode, +- ShardRouting shardRouting, @Nullable UnassignedInfo unassignedInfo, +- @Nullable RecoverySource recoverySource) { +- for (RoutingNodes.UnassignedShards.UnassignedIterator it = routingNodes.unassigned().iterator(); it.hasNext(); ) { +- ShardRouting unassigned = it.next(); +- if (!unassigned.equalsIgnoringMetaData(shardRouting)) { +- continue; +- } +- if (unassignedInfo != null || recoverySource != null) { +- unassigned = it.updateUnassigned(unassignedInfo != null ? unassignedInfo : unassigned.unassignedInfo(), +- recoverySource != null ? recoverySource : unassigned.recoverySource(), allocation.changes()); +- } +- it.initialize(routingNode.nodeId(), null, +- allocation.clusterInfo().getShardSize(unassigned, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE), allocation.changes()); +- return; +- } +- assert false : "shard to initialize not found in list of unassigned shards"; ++ routingNodes.initializeUnassignedShard(allocation, routingNode, shardRouting, null, null, this); + } + + @Override +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java +old mode 100644 +new mode 100755 +index 4d037570dd2..f88c01fbcf2 +--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java +@@ -135,8 +135,8 @@ public class AllocateEmptyPrimaryAllocationCommand extends BasePrimaryAllocation + shardRouting.unassignedInfo().getLastAllocationStatus()); + } + +- initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting, unassignedInfoToUpdate, +- EmptyStoreRecoverySource.INSTANCE); ++ routingNodes.initializeUnassignedShard(allocation, routingNode, shardRouting, unassignedInfoToUpdate, ++ EmptyStoreRecoverySource.INSTANCE, this); + + return new RerouteExplanation(this, allocation.decision(Decision.YES, name() + " (allocation command)", "ignore deciders")); + } +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java +old mode 100644 +new mode 100755 +index f4c9aba17d7..848d422ebc2 +--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java +@@ -129,8 +129,8 @@ public class AllocateStalePrimaryAllocationCommand extends BasePrimaryAllocation + "trying to allocate an existing primary shard [" + index + "][" + shardId + "], while no such shard has ever been active"); + } + +- initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting, null, +- RecoverySource.ExistingStoreRecoverySource.FORCE_STALE_PRIMARY_INSTANCE); ++ routingNodes.initializeUnassignedShard(allocation, routingNode, shardRouting, null, ++ RecoverySource.ExistingStoreRecoverySource.FORCE_STALE_PRIMARY_INSTANCE, this); + return new RerouteExplanation(this, allocation.decision(Decision.YES, name() + " (allocation command)", "ignore deciders")); + } + +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java +old mode 100644 +new mode 100755 +index 67122cb3ff1..b54c199b7d8 +--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java +@@ -20,8 +20,6 @@ + package org.elasticsearch.cluster.routing.allocation.command; + + import org.elasticsearch.ElasticsearchParseException; +-import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +-import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; + import org.elasticsearch.common.Strings; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; +@@ -73,19 +71,6 @@ public class AllocationCommands implements ToXContentFragment { + return this.commands; + } + +- /** +- * Executes all wrapped commands on a given {@link RoutingAllocation} +- * @param allocation {@link RoutingAllocation} to apply this command to +- * @throws org.elasticsearch.ElasticsearchException if something happens during execution +- */ +- public RoutingExplanations execute(RoutingAllocation allocation, boolean explain) { +- RoutingExplanations explanations = new RoutingExplanations(); +- for (AllocationCommand command : commands) { +- explanations.add(command.execute(allocation, explain)); +- } +- return explanations; +- } +- + /** + * Reads a {@link AllocationCommands} from a {@link StreamInput} + * @param in {@link StreamInput} to read from +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +old mode 100644 +new mode 100755 +index 1f048fca76c..2cf0e848e72 +--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +@@ -314,7 +314,7 @@ public class DiskThresholdDecider extends AllocationDecider { + if (usage == null) { + // If there is no usage, and we have other nodes in the cluster, + // use the average usage for all nodes as the usage for this node +- usage = averageUsage(node, usages); ++ usage = node.averageUsage(usages, this); + if (logger.isDebugEnabled()) { + logger.debug("unable to determine disk usage for {}, defaulting to average across nodes [{} total] [{} free] [{}% free]", + node.nodeId(), usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeDiskAsPercentage()); +@@ -334,26 +334,6 @@ public class DiskThresholdDecider extends AllocationDecider { + return usage; + } + +- /** +- * Returns a {@link DiskUsage} for the {@link RoutingNode} using the +- * average usage of other nodes in the disk usage map. +- * @param node Node to return an averaged DiskUsage object for +- * @param usages Map of nodeId to DiskUsage for all known nodes +- * @return DiskUsage representing given node using the average disk usage +- */ +- DiskUsage averageUsage(RoutingNode node, ImmutableOpenMap usages) { +- if (usages.size() == 0) { +- return new DiskUsage(node.nodeId(), node.node().getName(), "_na_", 0, 0); +- } +- long totalBytes = 0; +- long freeBytes = 0; +- for (ObjectCursor du : usages.values()) { +- totalBytes += du.value.getTotalBytes(); +- freeBytes += du.value.getFreeBytes(); +- } +- return new DiskUsage(node.nodeId(), node.node().getName(), "_na_", totalBytes / usages.size(), freeBytes / usages.size()); +- } +- + /** + * Given the DiskUsage for a node and the size of the shard, return the + * percentage of free disk if the shard were to be allocated to the node. +diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java +old mode 100644 +new mode 100755 +index 2961b3faaf4..6a5146b306e +--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java ++++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java +@@ -69,7 +69,7 @@ public class SameShardAllocationDecider extends AllocationDecider { + @Override + public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + Iterable assignedShards = allocation.routingNodes().assignedShards(shardRouting.shardId()); +- Decision decision = decideSameNode(shardRouting, node, allocation, assignedShards); ++ Decision decision = shardRouting.decideSameNode(node, allocation, assignedShards, this); + if (decision.type() == Decision.Type.NO || sameHost == false) { + // if its already a NO decision looking at the node, or we aren't configured to look at the host, return the decision + return decision; +@@ -113,24 +113,7 @@ public class SameShardAllocationDecider extends AllocationDecider { + public Decision canForceAllocatePrimary(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + assert shardRouting.primary() : "must not call force allocate on a non-primary shard"; + Iterable assignedShards = allocation.routingNodes().assignedShards(shardRouting.shardId()); +- return decideSameNode(shardRouting, node, allocation, assignedShards); ++ return shardRouting.decideSameNode(node, allocation, assignedShards, this); + } + +- private Decision decideSameNode(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation, +- Iterable assignedShards) { +- for (ShardRouting assignedShard : assignedShards) { +- if (node.nodeId().equals(assignedShard.currentNodeId())) { +- if (assignedShard.isSameAllocation(shardRouting)) { +- return allocation.decision(Decision.NO, NAME, +- "the shard cannot be allocated to the node on which it already exists [%s]", +- shardRouting.toString()); +- } else { +- return allocation.decision(Decision.NO, NAME, +- "the shard cannot be allocated to the same node on which a copy of the shard already exists [%s]", +- assignedShard.toString()); +- } +- } +- } +- return allocation.decision(Decision.YES, NAME, "the shard does not exist on the same node"); +- } + } +diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +old mode 100644 +new mode 100755 +index 11b6f451d59..158fc030a39 +--- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java ++++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +@@ -31,7 +31,10 @@ import org.elasticsearch.cluster.ClusterStateTaskConfig; + import org.elasticsearch.cluster.LocalNodeMasterListener; + import org.elasticsearch.cluster.NodeConnectionsService; + import org.elasticsearch.cluster.TimeoutClusterStateListener; ++import org.elasticsearch.cluster.coordination.CoordinationState; ++import org.elasticsearch.cluster.coordination.InMemoryPersistedState; + import org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException; ++import org.elasticsearch.cluster.node.DiscoveryNode; + import org.elasticsearch.cluster.node.DiscoveryNodes; + import org.elasticsearch.common.Nullable; + import org.elasticsearch.common.Priority; +@@ -44,6 +47,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; + import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; + import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; + import org.elasticsearch.common.util.iterable.Iterables; ++import org.elasticsearch.gateway.GatewayMetaState; + import org.elasticsearch.threadpool.Scheduler; + import org.elasticsearch.threadpool.ThreadPool; + +@@ -144,6 +148,17 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements + threadPool.scheduler()); + } + ++ public CoordinationState.PersistedState getPersistedState(Settings settings, GatewayMetaState gatewayMetaState) { ++ gatewayMetaState.applyClusterStateUpdaters(); ++ if (DiscoveryNode.isMasterNode(settings) == false) { ++ // use Zen1 way of writing cluster state for non-master-eligible nodes ++ // this avoids concurrent manipulating of IndexMetadata with IndicesStore ++ addLowPriorityApplier(gatewayMetaState); ++ return new InMemoryPersistedState(gatewayMetaState.getCurrentTerm(), gatewayMetaState.getLastAcceptedState()); ++ } ++ return gatewayMetaState; ++ } ++ + class UpdateTask extends SourcePrioritizedRunnable implements Function { + final ClusterApplyListener listener; + final Function updateFunction; +diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java b/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java +old mode 100644 +new mode 100755 +index ea02aebb0aa..0148739e953 +--- a/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java ++++ b/server/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java +@@ -19,6 +19,8 @@ + + package org.elasticsearch.common.blobstore; + ++import org.elasticsearch.common.blobstore.fs.FsBlobStore; ++ + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; +@@ -76,4 +78,18 @@ public class BlobPath implements Iterable { + } + return sb.toString(); + } ++ ++ public Path buildPath(FsBlobStore fsBlobStore) { ++ String[] paths = toArray(); ++ if (paths.length == 0) { ++ return fsBlobStore.path(); ++ } ++ Path blobPath = fsBlobStore.path().resolve(paths[0]); ++ if (paths.length > 1) { ++ for (int i = 1; i < paths.length; i++) { ++ blobPath = blobPath.resolve(paths[i]); ++ } ++ } ++ return blobPath; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java +old mode 100644 +new mode 100755 +index 60c39a48e09..c1e79f59ee1 +--- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java ++++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java +@@ -77,24 +77,11 @@ public class FsBlobStore implements BlobStore { + } + + private synchronized Path buildAndCreate(BlobPath path) throws IOException { +- Path f = buildPath(path); ++ Path f = path.buildPath(this); + if (readOnly == false) { + Files.createDirectories(f); + } + return f; + } + +- private Path buildPath(BlobPath path) { +- String[] paths = path.toArray(); +- if (paths.length == 0) { +- return path(); +- } +- Path blobPath = this.path.resolve(paths[0]); +- if (paths.length > 1) { +- for (int i = 1; i < paths.length; i++) { +- blobPath = blobPath.resolve(paths[i]); +- } +- } +- return blobPath; +- } + } +diff --git a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java +old mode 100644 +new mode 100755 +index 2c4867cbdfe..ab66f139beb +--- a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java ++++ b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java +@@ -20,10 +20,12 @@ package org.elasticsearch.common.bytes; + + import org.apache.lucene.util.BytesRef; + import org.apache.lucene.util.BytesRefIterator; ++import org.elasticsearch.ElasticsearchParseException; ++import org.elasticsearch.action.admin.indices.alias.Alias; ++import org.elasticsearch.client.indices.PutIndexTemplateRequest; + import org.elasticsearch.common.io.stream.BytesStream; + import org.elasticsearch.common.io.stream.StreamInput; +-import org.elasticsearch.common.xcontent.ToXContentFragment; +-import org.elasticsearch.common.xcontent.XContentBuilder; ++import org.elasticsearch.common.xcontent.*; + + import java.io.ByteArrayOutputStream; + import java.io.EOFException; +@@ -295,6 +297,25 @@ public abstract class BytesReference implements Comparable, ToXC + ref.offset += length; + } + ++ /** ++ * Sets the aliases that will be associated with the index when it gets created ++ * @param putIndexTemplateRequest ++ */ ++ public PutIndexTemplateRequest aliases(PutIndexTemplateRequest putIndexTemplateRequest) { ++ // EMPTY is safe here because we never call namedObject ++ try (XContentParser parser = XContentHelper ++ .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, this)) { ++ //move to the first alias ++ parser.nextToken(); ++ while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { ++ putIndexTemplateRequest.alias(Alias.fromXContent(parser)); ++ } ++ return putIndexTemplateRequest; ++ } catch(IOException e) { ++ throw new ElasticsearchParseException("Failed to parse aliases", e); ++ } ++ } ++ + /** + * Instead of adding the complexity of {@link InputStream#reset()} etc to the actual impl + * this wrapper builds it on top of the BytesReferenceStreamInput which is much simpler +diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoDistance.java b/server/src/main/java/org/elasticsearch/common/geo/GeoDistance.java +old mode 100644 +new mode 100755 +index b1767e73103..8ec48477614 +--- a/server/src/main/java/org/elasticsearch/common/geo/GeoDistance.java ++++ b/server/src/main/java/org/elasticsearch/common/geo/GeoDistance.java +@@ -22,7 +22,6 @@ package org.elasticsearch.common.geo; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.io.stream.Writeable; +-import org.elasticsearch.common.unit.DistanceUnit; + + import java.io.IOException; + import java.util.Locale; +@@ -69,12 +68,4 @@ public enum GeoDistance implements Writeable { + throw new IllegalArgumentException("No geo distance for [" + name + "]"); + } + +- /** compute the distance between two points using the selected algorithm (PLANE, ARC) */ +- public double calculate(double srcLat, double srcLon, double dstLat, double dstLon, DistanceUnit unit) { +- if (this == PLANE) { +- return DistanceUnit.convert(GeoUtils.planeDistance(srcLat, srcLon, dstLat, dstLon), +- DistanceUnit.METERS, unit); +- } +- return DistanceUnit.convert(GeoUtils.arcDistance(srcLat, srcLon, dstLat, dstLon), DistanceUnit.METERS, unit); +- } + } +diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +old mode 100644 +new mode 100755 +index f990a9750e0..b067085f6de +--- a/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java ++++ b/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +@@ -661,7 +661,7 @@ public class GeoUtils { + public double doubleValue() throws IOException { + final GeoPoint from = fromPoints[0]; + final GeoPoint to = singleValues.geoPointValue(); +- return distance.calculate(from.lat(), from.lon(), to.lat(), to.lon(), unit); ++ return unit.calculate(from.lat(), from.lon(), to.lat(), to.lon(), distance); + } + + }); +@@ -675,7 +675,7 @@ public class GeoUtils { + for (int i = 0; i < geoPointValues.docValueCount(); ++i) { + final GeoPoint point = geoPointValues.nextValue(); + for (GeoPoint from : fromPoints) { +- values[v] = distance.calculate(from.lat(), from.lon(), point.lat(), point.lon(), unit); ++ values[v] = unit.calculate(from.lat(), from.lon(), point.lat(), point.lon(), distance); + v++; + } + } +diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/Errors.java b/server/src/main/java/org/elasticsearch/common/inject/internal/Errors.java +old mode 100644 +new mode 100755 +index 472eb425484..58b2d97956c +--- a/server/src/main/java/org/elasticsearch/common/inject/internal/Errors.java ++++ b/server/src/main/java/org/elasticsearch/common/inject/internal/Errors.java +@@ -377,16 +377,9 @@ public final class Errors { + throw new ProvisionException(getMessages()); + } + +- private Message merge(Message message) { +- List sources = new ArrayList<>(); +- sources.addAll(getSources()); +- sources.addAll(message.getSources()); +- return new Message(sources, message.getMessage(), message.getCause()); +- } +- + public Errors merge(Collection messages) { + for (Message message : messages) { +- addMessage(merge(message)); ++ addMessage(message.merge(this)); + } + return this; + } +diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/Message.java b/server/src/main/java/org/elasticsearch/common/inject/spi/Message.java +old mode 100644 +new mode 100755 +index 619feca805e..d7f04dd73ad +--- a/server/src/main/java/org/elasticsearch/common/inject/spi/Message.java ++++ b/server/src/main/java/org/elasticsearch/common/inject/spi/Message.java +@@ -128,4 +128,11 @@ public final class Message implements Element { + public void applyTo(Binder binder) { + binder.withSource(getSource()).addError(this); + } +-} +\ No newline at end of file ++ ++ public Message merge(Errors errors) { ++ List sources = new ArrayList<>(); ++ sources.addAll(errors.getSources()); ++ sources.addAll(getSources()); ++ return new Message(sources, getMessage(), getCause()); ++ } ++} +diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +old mode 100644 +new mode 100755 +index 7a763c5a049..9886e75678a +--- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java ++++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +@@ -34,10 +34,12 @@ import org.elasticsearch.common.Nullable; + import org.elasticsearch.common.Strings; + import org.elasticsearch.common.bytes.BytesArray; + import org.elasticsearch.common.bytes.BytesReference; ++import org.elasticsearch.common.document.DocumentField; + import org.elasticsearch.common.geo.GeoPoint; + import org.elasticsearch.common.text.Text; + import org.elasticsearch.common.unit.TimeValue; + import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; ++import org.elasticsearch.index.get.GetResult; + import org.joda.time.DateTime; + import org.joda.time.DateTimeZone; + +@@ -1103,4 +1105,19 @@ public abstract class StreamInput extends InputStream { + return null; + } + } ++ ++ public Map readFields(GetResult getResult) throws IOException { ++ Map fields = null; ++ int size = readVInt(); ++ if (size == 0) { ++ fields = new HashMap<>(); ++ } else { ++ fields = new HashMap<>(size); ++ for (int i = 0; i < size; i++) { ++ DocumentField field = DocumentField.readDocumentField(this); ++ fields.put(field.getName(), field); ++ } ++ } ++ return fields; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +old mode 100644 +new mode 100755 +index 431e00fcf06..9c33448c95f +--- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java ++++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +@@ -39,6 +39,7 @@ import org.elasticsearch.common.io.stream.Writeable.Writer; + import org.elasticsearch.common.text.Text; + import org.elasticsearch.common.unit.TimeValue; + import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; ++import org.elasticsearch.index.engine.Segment; + import org.elasticsearch.script.JodaCompatibleZonedDateTime; + import org.joda.time.DateTimeZone; + import org.joda.time.ReadableInstant; +@@ -1193,4 +1194,43 @@ public abstract class StreamOutput extends OutputStream { + } + } + ++ public void writeSegmentSort(Sort sort, Segment segment) throws IOException { ++ if (sort == null) { ++ writeVInt(0); ++ return; ++ } ++ writeVInt(sort.getSort().length); ++ for (SortField field : sort.getSort()) { ++ writeString(field.getField()); ++ if (field instanceof SortedSetSortField) { ++ writeByte((byte) 0); ++ writeOptionalBoolean(field.getMissingValue() == null ? ++ null : field.getMissingValue() == SortField.STRING_FIRST); ++ writeBoolean(((SortedSetSortField) field).getSelector() == SortedSetSelector.Type.MAX); ++ writeBoolean(field.getReverse()); ++ } else if (field instanceof SortedNumericSortField) { ++ switch (((SortedNumericSortField) field).getNumericType()) { ++ case INT: ++ writeByte((byte) 1); ++ break; ++ case FLOAT: ++ writeByte((byte) 2); ++ break; ++ case DOUBLE: ++ writeByte((byte) 3); ++ break; ++ case LONG: ++ writeByte((byte) 4); ++ break; ++ default: ++ throw new IOException("invalid index sort field:" + field); ++ } ++ writeGenericValue(field.getMissingValue()); ++ writeBoolean(((SortedNumericSortField) field).getSelector() == SortedNumericSelector.Type.MAX); ++ writeBoolean(field.getReverse()); ++ } else { ++ throw new IOException("invalid index sort field:" + field); ++ } ++ } ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java b/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java +old mode 100644 +new mode 100755 +index d5d682f807a..c22ab72a914 +--- a/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java ++++ b/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java +@@ -25,6 +25,10 @@ import org.elasticsearch.Build; + import org.elasticsearch.Version; + import org.elasticsearch.common.SuppressLoggerChecks; + import org.elasticsearch.common.util.concurrent.ThreadContext; ++import org.elasticsearch.rest.DeprecationRestHandler; ++import org.elasticsearch.rest.RestController; ++import org.elasticsearch.rest.RestHandler; ++import org.elasticsearch.rest.RestRequest; + + import java.nio.charset.Charset; + import java.security.AccessController; +@@ -395,4 +399,18 @@ public class DeprecationLogger { + } + } + ++ /** ++ * Registers a REST handler to be executed when the provided {@code method} and {@code path} match the request. ++ * @param method GET, POST, etc. ++ * @param path Path to handle (e.g., "/{index}/{type}/_bulk") ++ * @param handler The handler to actually execute ++ * @param deprecationMessage The message to log and send as a header in the response ++ * @param restController ++ */ ++ public void registerAsDeprecatedHandler(RestRequest.Method method, String path, RestHandler handler, ++ String deprecationMessage, RestController restController) { ++ assert (handler instanceof DeprecationRestHandler) == false; ++ ++ restController.registerHandler(method, path, new DeprecationRestHandler(handler, deprecationMessage, this)); ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java +old mode 100644 +new mode 100755 +index 394b8bbe65d..186590e42d0 +--- a/server/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java ++++ b/server/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java +@@ -153,27 +153,7 @@ public class MoreLikeThisQuery extends Query { + handleUnlike(mlt, this.unlikeText, this.unlikeFields); + } + +- return createQuery(mlt); +- } +- +- private Query createQuery(XMoreLikeThis mlt) throws IOException { +- BooleanQuery.Builder bqBuilder = new BooleanQuery.Builder(); +- if (this.likeFields != null) { +- Query mltQuery = mlt.like(this.likeFields); +- mltQuery = Queries.applyMinimumShouldMatch((BooleanQuery) mltQuery, minimumShouldMatch); +- bqBuilder.add(mltQuery, BooleanClause.Occur.SHOULD); +- } +- if (this.likeText != null) { +- Reader[] readers = new Reader[likeText.length]; +- for (int i = 0; i < readers.length; i++) { +- readers[i] = new StringReader(likeText[i]); +- } +- //LUCENE 4 UPGRADE this mapps the 3.6 behavior (only use the first field) +- Query mltQuery = mlt.like(moreLikeFields[0], readers); +- mltQuery = Queries.applyMinimumShouldMatch((BooleanQuery) mltQuery, minimumShouldMatch); +- bqBuilder.add(mltQuery, BooleanClause.Occur.SHOULD); +- } +- return bqBuilder.build(); ++ return mlt.createQueryOther(this); + } + + private void handleUnlike(XMoreLikeThis mlt, String[] unlikeText, Fields[] unlikeFields) throws IOException { +diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java +old mode 100644 +new mode 100755 +index 1010c917eca..d92f62043b6 +--- a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java ++++ b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java +@@ -1012,6 +1012,26 @@ public final class XMoreLikeThis { + return al.toArray(res); + } + ++ public Query createQueryOther(MoreLikeThisQuery moreLikeThisQuery) throws IOException { ++ BooleanQuery.Builder bqBuilder = new BooleanQuery.Builder(); ++ if (moreLikeThisQuery.getLikeFields() != null) { ++ Query mltQuery = like(moreLikeThisQuery.getLikeFields()); ++ mltQuery = Queries.applyMinimumShouldMatch((BooleanQuery) mltQuery, moreLikeThisQuery.getMinimumShouldMatch()); ++ bqBuilder.add(mltQuery, BooleanClause.Occur.SHOULD); ++ } ++ if (moreLikeThisQuery.getLikeTexts() != null) { ++ Reader[] readers = new Reader[moreLikeThisQuery.getLikeTexts().length]; ++ for (int i = 0; i < readers.length; i++) { ++ readers[i] = new StringReader(moreLikeThisQuery.getLikeTexts()[i]); ++ } ++ //LUCENE 4 UPGRADE this mapps the 3.6 behavior (only use the first field) ++ Query mltQuery = like(moreLikeThisQuery.getMoreLikeFields()[0], readers); ++ mltQuery = Queries.applyMinimumShouldMatch((BooleanQuery) mltQuery, moreLikeThisQuery.getMinimumShouldMatch()); ++ bqBuilder.add(mltQuery, BooleanClause.Occur.SHOULD); ++ } ++ return bqBuilder.build(); ++ } ++ + /** + * PriorityQueue that orders words by score. + */ +diff --git a/server/src/main/java/org/elasticsearch/common/settings/Settings.java b/server/src/main/java/org/elasticsearch/common/settings/Settings.java +old mode 100644 +new mode 100755 +index 5789abf76d8..84d54b6362c +--- a/server/src/main/java/org/elasticsearch/common/settings/Settings.java ++++ b/server/src/main/java/org/elasticsearch/common/settings/Settings.java +@@ -462,21 +462,6 @@ public final class Settings implements ToXContentFragment { + return getGroupsInternal("", false); + } + +- /** +- * Returns a parsed version. +- */ +- public Version getAsVersion(String setting, Version defaultVersion) throws SettingsException { +- String sValue = get(setting); +- if (sValue == null) { +- return defaultVersion; +- } +- try { +- return Version.fromId(Integer.parseInt(sValue)); +- } catch (Exception e) { +- throw new SettingsException("Failed to parse version setting [" + setting + "] with value [" + sValue + "]", e); +- } +- } +- + /** + * @return The direct keys of this settings + */ +diff --git a/server/src/main/java/org/elasticsearch/common/transport/BoundTransportAddress.java b/server/src/main/java/org/elasticsearch/common/transport/BoundTransportAddress.java +old mode 100644 +new mode 100755 +index 336b9c536a1..a8546ea3c38 +--- a/server/src/main/java/org/elasticsearch/common/transport/BoundTransportAddress.java ++++ b/server/src/main/java/org/elasticsearch/common/transport/BoundTransportAddress.java +@@ -22,6 +22,8 @@ package org.elasticsearch.common.transport; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.io.stream.Streamable; ++import org.elasticsearch.common.network.NetworkAddress; ++import org.elasticsearch.node.Node; + + import java.io.IOException; + +@@ -99,4 +101,25 @@ public class BoundTransportAddress implements Streamable { + } + return builder.toString(); + } ++ ++ /** Writes a file to the logs dir containing the ports for the given transport type ++ * @param type ++ * @param node*/ ++ public void writePortsFile(String type, Node node) { ++ Path tmpPortsFile = node.getEnvironment().logsFile().resolve(type + ".ports.tmp"); ++ try (BufferedWriter writer = Files.newBufferedWriter(tmpPortsFile, Charset.forName("UTF-8"))) { ++ for (TransportAddress address : boundAddresses()) { ++ InetAddress inetAddress = InetAddress.getByName(address.getAddress()); ++ writer.write(NetworkAddress.format(new InetSocketAddress(inetAddress, address.getPort())) + "\n"); ++ } ++ } catch (IOException e) { ++ throw new RuntimeException("Failed to write ports file", e); ++ } ++ Path portsFile = node.getEnvironment().logsFile().resolve(type + ".ports"); ++ try { ++ Files.move(tmpPortsFile, portsFile, StandardCopyOption.ATOMIC_MOVE); ++ } catch (IOException e) { ++ throw new RuntimeException("Failed to rename ports file", e); ++ } ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/common/transport/TransportAddress.java b/server/src/main/java/org/elasticsearch/common/transport/TransportAddress.java +old mode 100644 +new mode 100755 +index 819eda5a4c4..8bb14e9d8eb +--- a/server/src/main/java/org/elasticsearch/common/transport/TransportAddress.java ++++ b/server/src/main/java/org/elasticsearch/common/transport/TransportAddress.java +@@ -19,6 +19,8 @@ + + package org.elasticsearch.common.transport; + ++import org.elasticsearch.cluster.node.DiscoveryNode; ++import org.elasticsearch.cluster.node.DiscoveryNodes; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.io.stream.Writeable; +@@ -135,4 +137,20 @@ public final class TransportAddress implements Writeable, ToXContentFragment { + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.value(toString()); + } ++ ++ /** ++ * Get a node by its address ++ * ++ * ++ * @param discoveryNodes@return node identified by the given address or null if no such node exists ++ */ ++ public DiscoveryNode findByAddress(DiscoveryNodes discoveryNodes) { ++ for (ObjectCursor cursor : discoveryNodes.getNodes().values()) { ++ DiscoveryNode node = cursor.value; ++ if (node.getAddress().equals(this)) { ++ return node; ++ } ++ } ++ return null; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java b/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java +old mode 100644 +new mode 100755 +index 0b32091fb4f..20f11766d6f +--- a/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java ++++ b/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java +@@ -22,12 +22,15 @@ package org.elasticsearch.common.unit; + import org.apache.logging.log4j.LogManager; + import org.elasticsearch.ElasticsearchParseException; + import org.elasticsearch.common.Strings; ++import org.elasticsearch.common.breaker.CircuitBreaker; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.io.stream.Writeable; + import org.elasticsearch.common.logging.DeprecationLogger; + import org.elasticsearch.common.xcontent.ToXContentFragment; + import org.elasticsearch.common.xcontent.XContentBuilder; ++import org.elasticsearch.indices.breaker.BreakerSettings; ++import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; + + import java.io.IOException; + import java.util.Locale; +@@ -269,4 +272,11 @@ public class ByteSizeValue implements Writeable, Comparable, ToXC + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.value(toString()); + } ++ ++ public boolean validateTotalCircuitBreakerLimit(HierarchyCircuitBreakerService hierarchyCircuitBreakerService) { ++ BreakerSettings newParentSettings = new BreakerSettings(CircuitBreaker.PARENT, getBytes(), 1.0, ++ CircuitBreaker.Type.PARENT, null); ++ hierarchyCircuitBreakerService.validateSettings(new BreakerSettings[]{newParentSettings}); ++ return true; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/common/unit/DistanceUnit.java b/server/src/main/java/org/elasticsearch/common/unit/DistanceUnit.java +old mode 100644 +new mode 100755 +index b597725726c..db43a359375 +--- a/server/src/main/java/org/elasticsearch/common/unit/DistanceUnit.java ++++ b/server/src/main/java/org/elasticsearch/common/unit/DistanceUnit.java +@@ -19,6 +19,7 @@ + + package org.elasticsearch.common.unit; + ++import org.elasticsearch.common.geo.GeoDistance; + import org.elasticsearch.common.geo.GeoUtils; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; +@@ -210,6 +211,20 @@ public enum DistanceUnit implements Writeable { + return defaultUnit; + } + ++ /** compute the distance between two points using the selected algorithm (PLANE, ARC) ++ * @param srcLat ++ * @param srcLon ++ * @param dstLat ++ * @param dstLon ++ * @param geoDistance*/ ++ public double calculate(double srcLat, double srcLon, double dstLat, double dstLon, GeoDistance geoDistance) { ++ if (geoDistance == GeoDistance.PLANE) { ++ return convert(GeoUtils.planeDistance(srcLat, srcLon, dstLat, dstLon), ++ METERS, this); ++ } ++ return convert(GeoUtils.arcDistance(srcLat, srcLon, dstLat, dstLon), METERS, this); ++ } ++ + /** + * This class implements a value+unit tuple. + */ +diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/CountDown.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/CountDown.java +old mode 100644 +new mode 100755 +index b2a80fc68db..212206b1bbd +--- a/server/src/main/java/org/elasticsearch/common/util/concurrent/CountDown.java ++++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/CountDown.java +@@ -20,6 +20,12 @@ + package org.elasticsearch.common.util.concurrent; + + ++import org.elasticsearch.action.ActionListener; ++import org.elasticsearch.cluster.routing.ShardRouting; ++import org.elasticsearch.index.shard.ShardId; ++import org.elasticsearch.indices.flush.ShardsSyncedFlushResult; ++import org.elasticsearch.indices.flush.SyncedFlushService; ++ + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicInteger; + +@@ -77,4 +83,16 @@ public final class CountDown { + assert countDown.get() >= 0; + return countDown.get() == 0; + } ++ ++ public void countDownAndSendResponseIfDone(String syncId, ++ List shards, ++ ShardId shardId, ++ int totalShards, ++ ActionListener listener, ++ Map results, SyncedFlushService syncedFlushService) { ++ if (countDown()) { ++ assert results.size() == shards.size(); ++ listener.onResponse(new ShardsSyncedFlushResult(shardId, syncId, totalShards, results)); ++ } ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +old mode 100644 +new mode 100755 +index b52bc2b199c..93bcac6a63c +--- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java ++++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +@@ -30,6 +30,8 @@ import org.elasticsearch.common.settings.Setting; + import org.elasticsearch.common.settings.Setting.Property; + import org.elasticsearch.common.settings.Settings; + import org.elasticsearch.http.HttpTransportSettings; ++import org.elasticsearch.xpack.core.security.authc.Authentication; ++import org.elasticsearch.xpack.core.security.authc.AuthenticationField; + + import java.io.Closeable; + import java.io.IOException; +@@ -406,6 +408,15 @@ public final class ThreadContext implements Closeable, Writeable { + return threadLocal.closed.get(); + } + ++ public void ensureContextDoesNotContainAuthentication(Authentication authentication) { ++ if (getTransient(AuthenticationField.AUTHENTICATION_KEY) != null) { ++ if (getHeader(AuthenticationField.AUTHENTICATION_KEY) == null) { ++ throw new IllegalStateException("authentication present as a transient but not a header"); ++ } ++ throw new IllegalStateException("authentication is already present in the context"); ++ } ++ } ++ + @FunctionalInterface + public interface StoredContext extends AutoCloseable { + @Override +diff --git a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +old mode 100644 +new mode 100755 +index a14def8fa86..7b9b4caeca5 +--- a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java ++++ b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +@@ -128,7 +128,7 @@ public class DiscoveryModule { + discovery = new Coordinator(NODE_NAME_SETTING.get(settings), + settings, clusterSettings, + transportService, namedWriteableRegistry, allocationService, masterService, +- () -> gatewayMetaState.getPersistedState(settings, (ClusterApplierService) clusterApplier), seedHostsProvider, ++ () -> ((ClusterApplierService) clusterApplier).getPersistedState(settings, gatewayMetaState), seedHostsProvider, + clusterApplier, joinValidators, new Random(Randomness.get().nextLong())); + } else { + throw new IllegalArgumentException("Unknown discovery type [" + discoveryType + "]"); +diff --git a/server/src/main/java/org/elasticsearch/env/Environment.java b/server/src/main/java/org/elasticsearch/env/Environment.java +old mode 100644 +new mode 100755 +index 57ed2f98643..67a76b7943b +--- a/server/src/main/java/org/elasticsearch/env/Environment.java ++++ b/server/src/main/java/org/elasticsearch/env/Environment.java +@@ -26,6 +26,7 @@ import org.elasticsearch.common.io.PathUtils; + import org.elasticsearch.common.settings.Setting; + import org.elasticsearch.common.settings.Setting.Property; + import org.elasticsearch.common.settings.Settings; ++import org.elasticsearch.indices.analysis.HunspellService; + + import java.io.FileNotFoundException; + import java.io.IOException; +@@ -328,4 +329,8 @@ public class Environment { + private static void assertEquals(Object actual, Object expected, String name) { + assert Objects.deepEquals(actual, expected) : "actual " + name + " [" + actual + "] is different than [ " + expected + "]"; + } ++ ++ public Path resolveHunspellDirectory(HunspellService hunspellService) { ++ return configFile().resolve("hunspell"); ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +old mode 100644 +new mode 100755 +index 4d19dd66732..335e08ca46c +--- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java ++++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +@@ -458,7 +458,7 @@ public final class NodeEnvironment implements Closeable { + */ + public void deleteShardDirectoryUnderLock(ShardLock lock, IndexSettings indexSettings) throws IOException { + final ShardId shardId = lock.getShardId(); +- assert isShardLocked(shardId) : "shard " + shardId + " is not locked"; ++ assert shardId.isShardLocked(this) : "shard " + shardId + " is not locked"; + final Path[] paths = availableShardPaths(shardId); + logger.trace("acquiring locks for {}, paths: [{}]", shardId, paths); + acquireFSLockForPaths(indexSettings, paths); +@@ -500,15 +500,6 @@ public final class NodeEnvironment implements Closeable { + return existingPaths.size() == 0; + } + +- private boolean isShardLocked(ShardId id) { +- try { +- shardLock(id, "checking if shard is locked").close(); +- return false; +- } catch (ShardLockObtainFailedException ex) { +- return true; +- } +- } +- + /** + * Deletes an indexes data directory recursively iff all of the indexes + * shards locks were successfully acquired. If any of the indexes shard directories can't be locked +diff --git a/server/src/main/java/org/elasticsearch/env/NodeRepurposeCommand.java b/server/src/main/java/org/elasticsearch/env/NodeRepurposeCommand.java +old mode 100644 +new mode 100755 +index 20b5552dfa8..2fcee847703 +--- a/server/src/main/java/org/elasticsearch/env/NodeRepurposeCommand.java ++++ b/server/src/main/java/org/elasticsearch/env/NodeRepurposeCommand.java +@@ -103,7 +103,7 @@ public class NodeRepurposeCommand extends ElasticsearchNodeCommand { + outputVerboseInformation(terminal, nodePaths, indexPaths, indexUUIDs); + + terminal.println(noMasterMessage(indexUUIDs.size(), shardDataPaths.size(), indexMetaDataPaths.size())); +- outputHowToSeeVerboseInformation(terminal); ++ terminal.outputHowToSeeVerboseInformation(this); + + final Manifest manifest = loadManifest(terminal, dataPaths); + +@@ -134,7 +134,7 @@ public class NodeRepurposeCommand extends ElasticsearchNodeCommand { + outputVerboseInformation(terminal, nodePaths, shardDataPaths, indexUUIDs); + + terminal.println(shardMessage(shardDataPaths.size(), indexUUIDs.size())); +- outputHowToSeeVerboseInformation(terminal); ++ terminal.outputHowToSeeVerboseInformation(this); + + terminal.println("Node is being re-purposed as master and no-data. Clean-up of shard data will be performed."); + confirm(terminal, "Do you want to proceed?"); +@@ -154,11 +154,6 @@ public class NodeRepurposeCommand extends ElasticsearchNodeCommand { + } + } + +- private void outputHowToSeeVerboseInformation(Terminal terminal) { +- if (terminal.isPrintable(Terminal.Verbosity.VERBOSE) == false) { +- terminal.println("Use -v to see list of paths and indices affected"); +- } +- } + private String toIndexName(NodeEnvironment.NodePath[] nodePaths, String uuid) { + Path[] indexPaths = new Path[nodePaths.length]; + for (int i = 0; i < nodePaths.length; i++) { +diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +old mode 100644 +new mode 100755 +index 91bcb68370e..9b8e657d81b +--- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java ++++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +@@ -29,8 +29,6 @@ import org.elasticsearch.cluster.ClusterName; + import org.elasticsearch.cluster.ClusterState; + import org.elasticsearch.cluster.ClusterStateApplier; + import org.elasticsearch.cluster.coordination.CoordinationState; +-import org.elasticsearch.cluster.coordination.CoordinationState.PersistedState; +-import org.elasticsearch.cluster.coordination.InMemoryPersistedState; + import org.elasticsearch.cluster.metadata.IndexMetaData; + import org.elasticsearch.cluster.metadata.Manifest; + import org.elasticsearch.cluster.metadata.MetaData; +@@ -38,7 +36,6 @@ import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; + import org.elasticsearch.cluster.node.DiscoveryNode; + import org.elasticsearch.cluster.routing.RoutingNode; + import org.elasticsearch.cluster.routing.ShardRouting; +-import org.elasticsearch.cluster.service.ClusterApplierService; + import org.elasticsearch.cluster.service.ClusterService; + import org.elasticsearch.common.collect.ImmutableOpenMap; + import org.elasticsearch.common.collect.Tuple; +@@ -97,17 +94,6 @@ public class GatewayMetaState implements ClusterStateApplier, CoordinationState. + incrementalWrite = false; + } + +- public PersistedState getPersistedState(Settings settings, ClusterApplierService clusterApplierService) { +- applyClusterStateUpdaters(); +- if (DiscoveryNode.isMasterNode(settings) == false) { +- // use Zen1 way of writing cluster state for non-master-eligible nodes +- // this avoids concurrent manipulating of IndexMetadata with IndicesStore +- clusterApplierService.addLowPriorityApplier(this); +- return new InMemoryPersistedState(getCurrentTerm(), getLastAcceptedState()); +- } +- return this; +- } +- + private void initializeClusterState(ClusterName clusterName) throws IOException { + long startNS = System.nanoTime(); + Tuple manifestAndMetaData = metaStateService.loadFullState(); +diff --git a/server/src/main/java/org/elasticsearch/index/Index.java b/server/src/main/java/org/elasticsearch/index/Index.java +old mode 100644 +new mode 100755 +index 9b6f4dbd98a..c6a004bfc0b +--- a/server/src/main/java/org/elasticsearch/index/Index.java ++++ b/server/src/main/java/org/elasticsearch/index/Index.java +@@ -20,6 +20,8 @@ + package org.elasticsearch.index; + + import org.elasticsearch.cluster.ClusterState; ++import org.elasticsearch.cluster.routing.IndexRoutingTable; ++import org.elasticsearch.cluster.routing.RoutingTable; + import org.elasticsearch.common.ParseField; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; +@@ -119,6 +121,11 @@ public class Index implements Writeable, ToXContentObject { + return INDEX_PARSER.parse(parser, null).build(); + } + ++ public boolean hasIndex(RoutingTable routingTable) { ++ IndexRoutingTable indexRouting = routingTable.index(getName()); ++ return indexRouting != null && indexRouting.getIndex().equals(this); ++ } ++ + /** + * Builder for Index objects. Used by ObjectParser instances only. + */ +diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java +old mode 100644 +new mode 100755 +index ca0f34803cc..0c01dadec63 +--- a/server/src/main/java/org/elasticsearch/index/IndexModule.java ++++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java +@@ -441,7 +441,7 @@ public final class IndexModule { + */ + public MapperService newIndexMapperService(NamedXContentRegistry xContentRegistry, MapperRegistry mapperRegistry, + ScriptService scriptService) throws IOException { +- return new MapperService(indexSettings, analysisRegistry.build(indexSettings), xContentRegistry, ++ return new MapperService(indexSettings, indexSettings.build(analysisRegistry), xContentRegistry, + new SimilarityService(indexSettings, scriptService, similarities), mapperRegistry, + () -> { throw new UnsupportedOperationException("no index query shard context available"); }); + } +diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java +old mode 100644 +new mode 100755 +index 2d86a2b436d..72395dfeac4 +--- a/server/src/main/java/org/elasticsearch/index/IndexService.java ++++ b/server/src/main/java/org/elasticsearch/index/IndexService.java +@@ -172,7 +172,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust + this.warmer = null; + this.indexCache = null; + } else { +- this.mapperService = new MapperService(indexSettings, registry.build(indexSettings), xContentRegistry, similarityService, ++ this.mapperService = new MapperService(indexSettings, indexSettings.build(registry), xContentRegistry, similarityService, + mapperRegistry, + // we parse all percolator queries as they would be parsed on shard 0 + () -> newQueryShardContext(0, null, System::currentTimeMillis, null)); +diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java +old mode 100644 +new mode 100755 +index d4cc38f0b95..30ca9dcfcc2 +--- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java ++++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java +@@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; + import org.elasticsearch.common.unit.ByteSizeUnit; + import org.elasticsearch.common.unit.ByteSizeValue; + import org.elasticsearch.common.unit.TimeValue; ++import org.elasticsearch.index.analysis.*; + import org.elasticsearch.index.translog.Translog; + import org.elasticsearch.ingest.IngestService; + import org.elasticsearch.node.Node; +@@ -962,4 +963,17 @@ public final class IndexSettings { + private void setSearchThrottled(boolean searchThrottled) { + this.searchThrottled = searchThrottled; + } ++ ++ /** ++ * Creates an index-level {@link IndexAnalyzers} from this registry using the given index settings ++ * @param analysisRegistry ++ */ ++ public IndexAnalyzers build(AnalysisRegistry analysisRegistry) throws IOException { ++ final Map charFilterFactories = analysisRegistry.buildCharFilterFactories(this); ++ final Map tokenizerFactories = analysisRegistry.buildTokenizerFactories(this); ++ final Map tokenFilterFactories = analysisRegistry.buildTokenFilterFactories(this); ++ final Map> analyzerFactories = analysisRegistry.buildAnalyzerFactories(this); ++ final Map> normalizerFactories = analysisRegistry.buildNormalizerFactories(this); ++ return analysisRegistry.build(this, analyzerFactories, normalizerFactories, tokenizerFactories, charFilterFactories, tokenFilterFactories); ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisMode.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisMode.java +old mode 100644 +new mode 100755 +index ea9e1e0c6aa..717fca166bc +--- a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisMode.java ++++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisMode.java +@@ -19,6 +19,8 @@ + + package org.elasticsearch.index.analysis; + ++import org.elasticsearch.index.mapper.MapperException; ++ + /** + * Enum representing the mode in which token filters and analyzers are allowed to operate. + * While most token filters are allowed both in index and search time analyzers, some are +@@ -79,4 +81,33 @@ public enum AnalysisMode { + * + */ + abstract AnalysisMode merge(AnalysisMode other); ++ ++ /** ++ * Checks the wrapped analyzer for the provided restricted {@link AnalysisMode} and throws ++ * an error if the analyzer is not allowed to run in that mode. The error contains more detailed information about ++ * the offending filters that caused the analyzer to not be allowed in this mode. ++ * @param namedAnalyzer ++ */ ++ public void checkAllowedInMode(NamedAnalyzer namedAnalyzer) { ++ Objects.requireNonNull(this); ++ if (namedAnalyzer.getAnalysisMode() == ALL) { ++ return; // everything allowed if this analyzer is in ALL mode ++ } ++ if (namedAnalyzer.getAnalysisMode() != this) { ++ if (namedAnalyzer.analyzer() instanceof CustomAnalyzer) { ++ TokenFilterFactory[] tokenFilters = ((CustomAnalyzer) namedAnalyzer.analyzer()).tokenFilters(); ++ List offendingFilters = new ArrayList<>(); ++ for (TokenFilterFactory tokenFilter : tokenFilters) { ++ if (tokenFilter.getAnalysisMode() != this) { ++ offendingFilters.add(tokenFilter.name()); ++ } ++ } ++ throw new MapperException("analyzer [" + namedAnalyzer.name() + "] contains filters " + offendingFilters ++ + " that are not allowed to run in " + getReadableName() + " mode."); ++ } else { ++ throw new MapperException( ++ "analyzer [" + namedAnalyzer.name() + "] contains components that are not allowed to run in " + getReadableName() + " mode."); ++ } ++ } ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +old mode 100644 +new mode 100755 +index 684d36c311f..d949e17c940 +--- a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java ++++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +@@ -45,7 +45,7 @@ import static java.util.Collections.unmodifiableMap; + + /** + * An internal registry for tokenizer, token filter, char filter and analyzer. +- * This class exists per node and allows to create per-index {@link IndexAnalyzers} via {@link #build(IndexSettings)} ++ * This class exists per node and allows to create per-index {@link IndexAnalyzers} via {@link IndexSettings#build(AnalysisRegistry)} + */ + public final class AnalysisRegistry implements Closeable { + public static final String INDEX_ANALYSIS_CHAR_FILTER = "index.analysis.char_filter"; +@@ -152,18 +152,6 @@ public final class AnalysisRegistry implements Closeable { + } + } + +- /** +- * Creates an index-level {@link IndexAnalyzers} from this registry using the given index settings +- */ +- public IndexAnalyzers build(IndexSettings indexSettings) throws IOException { +- final Map charFilterFactories = buildCharFilterFactories(indexSettings); +- final Map tokenizerFactories = buildTokenizerFactories(indexSettings); +- final Map tokenFilterFactories = buildTokenFilterFactories(indexSettings); +- final Map> analyzerFactories = buildAnalyzerFactories(indexSettings); +- final Map> normalizerFactories = buildNormalizerFactories(indexSettings); +- return build(indexSettings, analyzerFactories, normalizerFactories, tokenizerFactories, charFilterFactories, tokenFilterFactories); +- } +- + public Map buildTokenFilterFactories(IndexSettings indexSettings) throws IOException { + final Map tokenFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_FILTER); + return buildMapping(Component.FILTER, indexSettings, tokenFiltersSettings, this.tokenFilters, +@@ -441,7 +429,7 @@ public final class AnalysisRegistry implements Closeable { + if (defaultAnalyzer == null) { + throw new IllegalArgumentException("no default analyzer configured"); + } +- defaultAnalyzer.checkAllowedInMode(AnalysisMode.ALL); ++ AnalysisMode.ALL.checkAllowedInMode(defaultAnalyzer); + + if (analyzers.containsKey("default_index")) { + throw new IllegalArgumentException("setting [index.analysis.analyzer.default_index] is not supported anymore, use " + +diff --git a/server/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java b/server/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java +old mode 100644 +new mode 100755 +index 4831d88f3aa..66e99512eb3 +--- a/server/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java ++++ b/server/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java +@@ -21,7 +21,6 @@ package org.elasticsearch.index.analysis; + + import org.apache.lucene.analysis.Analyzer; + import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; +-import org.elasticsearch.index.mapper.MapperException; + + import java.util.ArrayList; + import java.util.List; +@@ -101,34 +100,6 @@ public class NamedAnalyzer extends DelegatingAnalyzerWrapper { + return super.getPositionIncrementGap(fieldName); + } + +- /** +- * Checks the wrapped analyzer for the provided restricted {@link AnalysisMode} and throws +- * an error if the analyzer is not allowed to run in that mode. The error contains more detailed information about +- * the offending filters that caused the analyzer to not be allowed in this mode. +- */ +- public void checkAllowedInMode(AnalysisMode mode) { +- Objects.requireNonNull(mode); +- if (this.getAnalysisMode() == AnalysisMode.ALL) { +- return; // everything allowed if this analyzer is in ALL mode +- } +- if (this.getAnalysisMode() != mode) { +- if (analyzer instanceof CustomAnalyzer) { +- TokenFilterFactory[] tokenFilters = ((CustomAnalyzer) analyzer).tokenFilters(); +- List offendingFilters = new ArrayList<>(); +- for (TokenFilterFactory tokenFilter : tokenFilters) { +- if (tokenFilter.getAnalysisMode() != mode) { +- offendingFilters.add(tokenFilter.name()); +- } +- } +- throw new MapperException("analyzer [" + name + "] contains filters " + offendingFilters +- + " that are not allowed to run in " + mode.getReadableName() + " mode."); +- } else { +- throw new MapperException( +- "analyzer [" + name + "] contains components that are not allowed to run in " + mode.getReadableName() + " mode."); +- } +- } +- } +- + @Override + public String toString() { + return "analyzer name[" + name + "], analyzer [" + analyzer + "], analysisMode [" + analysisMode + "]"; +diff --git a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +old mode 100644 +new mode 100755 +index b1812c40e03..b1ce4d685a9 +--- a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java ++++ b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +@@ -241,7 +241,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent + hasNested = true; + for (ObjectMapper objectMapper : docMapper.objectMappers().values()) { + if (objectMapper.nested().isNested()) { +- ObjectMapper parentObjectMapper = objectMapper.getParentObjectMapper(mapperService); ++ ObjectMapper parentObjectMapper = mapperService.getParentObjectMapper(objectMapper); + if (parentObjectMapper != null && parentObjectMapper.nested().isNested()) { + warmUp.add(parentObjectMapper.nestedTypeFilter()); + } +diff --git a/server/src/main/java/org/elasticsearch/index/engine/Segment.java b/server/src/main/java/org/elasticsearch/index/engine/Segment.java +old mode 100644 +new mode 100755 +index b1e6d09d897..a48eabd9a30 +--- a/server/src/main/java/org/elasticsearch/index/engine/Segment.java ++++ b/server/src/main/java/org/elasticsearch/index/engine/Segment.java +@@ -199,7 +199,7 @@ public class Segment implements Streamable { + if (verbose) { + writeRamTree(out, ramTree); + } +- writeSegmentSort(out, segmentSort); ++ out.writeSegmentSort(segmentSort, this); + boolean hasAttributes = attributes != null; + out.writeBoolean(hasAttributes); + if (hasAttributes) { +@@ -258,46 +258,6 @@ public class Segment implements Streamable { + return new Sort(fields); + } + +- private void writeSegmentSort(StreamOutput out, Sort sort) throws IOException { +- if (sort == null) { +- out.writeVInt(0); +- return; +- } +- out.writeVInt(sort.getSort().length); +- for (SortField field : sort.getSort()) { +- out.writeString(field.getField()); +- if (field instanceof SortedSetSortField) { +- out.writeByte((byte) 0); +- out.writeOptionalBoolean(field.getMissingValue() == null ? +- null : field.getMissingValue() == SortField.STRING_FIRST); +- out.writeBoolean(((SortedSetSortField) field).getSelector() == SortedSetSelector.Type.MAX); +- out.writeBoolean(field.getReverse()); +- } else if (field instanceof SortedNumericSortField) { +- switch (((SortedNumericSortField) field).getNumericType()) { +- case INT: +- out.writeByte((byte) 1); +- break; +- case FLOAT: +- out.writeByte((byte) 2); +- break; +- case DOUBLE: +- out.writeByte((byte) 3); +- break; +- case LONG: +- out.writeByte((byte) 4); +- break; +- default: +- throw new IOException("invalid index sort field:" + field); +- } +- out.writeGenericValue(field.getMissingValue()); +- out.writeBoolean(((SortedNumericSortField) field).getSelector() == SortedNumericSelector.Type.MAX); +- out.writeBoolean(field.getReverse()); +- } else { +- throw new IOException("invalid index sort field:" + field); +- } +- } +- } +- + private Accountable readRamTree(StreamInput in) throws IOException { + final String name = in.readString(); + final long bytes = in.readVLong(); +diff --git a/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java b/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java +old mode 100644 +new mode 100755 +index 462f8ce8e68..9571deae8a7 +--- a/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java ++++ b/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java +@@ -31,6 +31,9 @@ import org.elasticsearch.index.mapper.MapperService; + import org.elasticsearch.index.mapper.RoutingFieldMapper; + import org.elasticsearch.index.mapper.SourceFieldMapper; + import org.elasticsearch.index.mapper.Uid; ++import org.elasticsearch.search.fetch.FetchPhase; ++import org.elasticsearch.search.fetch.FetchPhaseExecutionException; ++import org.elasticsearch.search.internal.SearchContext; + + import java.io.IOException; + import java.nio.charset.StandardCharsets; +@@ -197,4 +200,13 @@ public class FieldsVisitor extends StoredFieldVisitor { + } + values.add(value); + } ++ ++ public void loadStoredFields(SearchContext searchContext, LeafReaderContext readerContext, int docId, FetchPhase fetchPhase) { ++ reset(); ++ try { ++ readerContext.reader().document(docId, this); ++ } catch (IOException e) { ++ throw new FetchPhaseExecutionException(searchContext, "Failed to fetch doc id [" + docId + "]", e); ++ } ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/index/get/GetResult.java b/server/src/main/java/org/elasticsearch/index/get/GetResult.java +old mode 100644 +new mode 100755 +index ffaa42ce0ad..fb5cf184056 +--- a/server/src/main/java/org/elasticsearch/index/get/GetResult.java ++++ b/server/src/main/java/org/elasticsearch/index/get/GetResult.java +@@ -382,21 +382,6 @@ public class GetResult implements Streamable, Iterable, ToXConten + return result; + } + +- private Map readFields(StreamInput in) throws IOException { +- Map fields = null; +- int size = in.readVInt(); +- if (size == 0) { +- fields = new HashMap<>(); +- } else { +- fields = new HashMap<>(size); +- for (int i = 0; i < size; i++) { +- DocumentField field = DocumentField.readDocumentField(in); +- fields.put(field.getName(), field); +- } +- } +- return fields; +- } +- + static void splitFieldsByMetadata(Map fields, Map outOther, + Map outMetadata) { + if (fields == null) { +@@ -426,10 +411,10 @@ public class GetResult implements Streamable, Iterable, ToXConten + source = null; + } + if (in.getVersion().onOrAfter(Version.V_7_3_0)) { +- documentFields = readFields(in); +- metaFields = readFields(in); ++ documentFields = in.readFields(this); ++ metaFields = in.readFields(this); + } else { +- Map fields = readFields(in); ++ Map fields = in.readFields(this); + documentFields = new HashMap<>(); + metaFields = new HashMap<>(); + splitFieldsByMetadata(fields, documentFields, metaFields); +diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +old mode 100644 +new mode 100755 +index f77fc072c70..f182868a481 +--- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java ++++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +@@ -21,7 +21,6 @@ package org.elasticsearch.index.get; + + import org.apache.lucene.index.Term; + import org.elasticsearch.ElasticsearchException; +-import org.elasticsearch.common.Nullable; + import org.elasticsearch.common.bytes.BytesReference; + import org.elasticsearch.common.collect.Tuple; + import org.elasticsearch.common.document.DocumentField; +@@ -44,7 +43,6 @@ import org.elasticsearch.index.mapper.IdFieldMapper; + import org.elasticsearch.index.mapper.Mapper; + import org.elasticsearch.index.mapper.MapperService; + import org.elasticsearch.index.mapper.RoutingFieldMapper; +-import org.elasticsearch.index.mapper.SourceFieldMapper; + import org.elasticsearch.index.mapper.Uid; + import org.elasticsearch.index.shard.AbstractIndexShardComponent; + import org.elasticsearch.index.shard.IndexShard; +@@ -124,7 +122,7 @@ public final class ShardGetService extends AbstractIndexShardComponent { + currentMetric.inc(); + try { + long now = System.nanoTime(); +- fetchSourceContext = normalizeFetchSourceContent(fetchSourceContext, fields); ++ fetchSourceContext = fetchSourceContext.normalizeFetchSourceContent(fields, this); + GetResult getResult = innerGetLoadFromStoredFields(type, id, fields, fetchSourceContext, engineGetResult, mapperService); + if (getResult.isExists()) { + existsMetric.inc(System.nanoTime() - now); +@@ -137,27 +135,9 @@ public final class ShardGetService extends AbstractIndexShardComponent { + } + } + +- /** +- * decides what needs to be done based on the request input and always returns a valid non-null FetchSourceContext +- */ +- private FetchSourceContext normalizeFetchSourceContent(@Nullable FetchSourceContext context, @Nullable String[] gFields) { +- if (context != null) { +- return context; +- } +- if (gFields == null) { +- return FetchSourceContext.FETCH_SOURCE; +- } +- for (String field : gFields) { +- if (SourceFieldMapper.NAME.equals(field)) { +- return FetchSourceContext.FETCH_SOURCE; +- } +- } +- return FetchSourceContext.DO_NOT_FETCH_SOURCE; +- } +- + private GetResult innerGet(String type, String id, String[] gFields, boolean realtime, long version, VersionType versionType, + long ifSeqNo, long ifPrimaryTerm, FetchSourceContext fetchSourceContext, boolean readFromTranslog) { +- fetchSourceContext = normalizeFetchSourceContent(fetchSourceContext, gFields); ++ fetchSourceContext = fetchSourceContext.normalizeFetchSourceContent(gFields, this); + if (type == null || type.equals("_all")) { + DocumentMapper mapper = mapperService.documentMapper(); + type = mapper == null ? null : mapper.type(); +diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +old mode 100644 +new mode 100755 +index 044e65c7ec6..a08f1d51511 +--- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java ++++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +@@ -40,7 +40,6 @@ import org.elasticsearch.index.IndexSettings; + import org.elasticsearch.index.analysis.IndexAnalyzers; + import org.elasticsearch.index.mapper.MetadataFieldMapper.TypeParser; + import org.elasticsearch.index.query.QueryShardContext; +-import org.elasticsearch.search.internal.SearchContext; + + import java.io.IOException; + import java.util.ArrayList; +@@ -282,41 +281,6 @@ public class DocumentMapper implements ToXContentFragment { + return parsedDoc; + } + +- /** +- * Returns the best nested {@link ObjectMapper} instances that is in the scope of the specified nested docId. +- */ +- public ObjectMapper findNestedObjectMapper(int nestedDocId, SearchContext sc, LeafReaderContext context) throws IOException { +- ObjectMapper nestedObjectMapper = null; +- for (ObjectMapper objectMapper : objectMappers().values()) { +- if (!objectMapper.nested().isNested()) { +- continue; +- } +- +- Query filter = objectMapper.nestedTypeFilter(); +- if (filter == null) { +- continue; +- } +- // We can pass down 'null' as acceptedDocs, because nestedDocId is a doc to be fetched and +- // therefor is guaranteed to be a live doc. +- final Weight nestedWeight = filter.createWeight(sc.searcher(), ScoreMode.COMPLETE_NO_SCORES, 1f); +- Scorer scorer = nestedWeight.scorer(context); +- if (scorer == null) { +- continue; +- } +- +- if (scorer.iterator().advance(nestedDocId) == nestedDocId) { +- if (nestedObjectMapper == null) { +- nestedObjectMapper = objectMapper; +- } else { +- if (nestedObjectMapper.fullPath().length() < objectMapper.fullPath().length()) { +- nestedObjectMapper = objectMapper; +- } +- } +- } +- } +- return nestedObjectMapper; +- } +- + public DocumentMapper merge(Mapping mapping) { + Mapping merged = this.mapping.merge(mapping); + return new DocumentMapper(mapperService, merged); +diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +old mode 100644 +new mode 100755 +index 9f5293e7da9..10c3a4ab6ca +--- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java ++++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +@@ -638,32 +638,32 @@ final class DocumentParser { + MappedFieldType fieldType, String currentFieldName) { + Mapper.Builder builder = null; + if (fieldType instanceof TextFieldType) { +- builder = context.root().findTemplateBuilder(context, currentFieldName, "text", XContentFieldType.STRING); ++ builder = context.findTemplateBuilder(currentFieldName, "text", XContentFieldType.STRING, context.root()); + if (builder == null) { + builder = new TextFieldMapper.Builder(currentFieldName) + .addMultiField(new KeywordFieldMapper.Builder("keyword").ignoreAbove(256)); + } + } else if (fieldType instanceof KeywordFieldType) { +- builder = context.root().findTemplateBuilder(context, currentFieldName, "keyword", XContentFieldType.STRING); ++ builder = context.findTemplateBuilder(currentFieldName, "keyword", XContentFieldType.STRING, context.root()); + } else { + switch (fieldType.typeName()) { + case DateFieldMapper.CONTENT_TYPE: + builder = context.root().findTemplateBuilder(context, currentFieldName, XContentFieldType.DATE); + break; + case "long": +- builder = context.root().findTemplateBuilder(context, currentFieldName, "long", XContentFieldType.LONG); ++ builder = context.findTemplateBuilder(currentFieldName, "long", XContentFieldType.LONG, context.root()); + break; + case "double": +- builder = context.root().findTemplateBuilder(context, currentFieldName, "double", XContentFieldType.DOUBLE); ++ builder = context.findTemplateBuilder(currentFieldName, "double", XContentFieldType.DOUBLE, context.root()); + break; + case "integer": +- builder = context.root().findTemplateBuilder(context, currentFieldName, "integer", XContentFieldType.LONG); ++ builder = context.findTemplateBuilder(currentFieldName, "integer", XContentFieldType.LONG, context.root()); + break; + case "float": +- builder = context.root().findTemplateBuilder(context, currentFieldName, "float", XContentFieldType.DOUBLE); ++ builder = context.findTemplateBuilder(currentFieldName, "float", XContentFieldType.DOUBLE, context.root()); + break; + case BooleanFieldMapper.CONTENT_TYPE: +- builder = context.root().findTemplateBuilder(context, currentFieldName, "boolean", XContentFieldType.BOOLEAN); ++ builder = context.findTemplateBuilder(currentFieldName, "boolean", XContentFieldType.BOOLEAN, context.root()); + break; + default: + break; +diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +old mode 100644 +new mode 100755 +index 5ef68970940..33f9340f156 +--- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java ++++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +@@ -395,6 +395,18 @@ public abstract class MappedFieldType extends FieldType { + + "] which is of type [" + typeName() + "]"); + } + ++ /** ++ * Gets the search analyzer for the given field, or the default if there is none present for the field ++ * TODO: remove this by moving defaults into mappers themselves ++ * @param queryShardContext ++ */ ++ public Analyzer getSearchAnalyzer(QueryShardContext queryShardContext) { ++ if (searchAnalyzer() != null) { ++ return searchAnalyzer(); ++ } ++ return queryShardContext.getMapperService().searchAnalyzer(); ++ } ++ + /** + * An enum used to describe the relation between the range of terms in a + * shard when compared with a query range +diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +old mode 100644 +new mode 100755 +index 487a6ac4789..627cff2a238 +--- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java ++++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +@@ -77,6 +77,21 @@ import static java.util.Collections.unmodifiableMap; + + public class MapperService extends AbstractIndexComponent implements Closeable { + ++ /** ++ * Returns the parent {@link ObjectMapper} instance of the specified object mapper or null if there ++ * isn't any. ++ * @param objectMapper ++ */ ++ public ObjectMapper getParentObjectMapper(ObjectMapper objectMapper) { ++ int indexOfLastDot = objectMapper.fullPath().lastIndexOf('.'); ++ if (indexOfLastDot != -1) { ++ String parentNestObjectPath = objectMapper.fullPath().substring(0, indexOfLastDot); ++ return getObjectMapper(parentNestObjectPath); ++ } else { ++ return null; ++ } ++ } ++ + /** + * The reason why a mapping is being merged. + */ +diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +old mode 100644 +new mode 100755 +index fe60e1b62d9..93239e03df8 +--- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java ++++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +@@ -403,27 +403,13 @@ public class ObjectMapper extends Mapper implements Cloneable { + return dynamic; + } + +- /** +- * Returns the parent {@link ObjectMapper} instance of the specified object mapper or null if there +- * isn't any. +- */ +- public ObjectMapper getParentObjectMapper(MapperService mapperService) { +- int indexOfLastDot = fullPath().lastIndexOf('.'); +- if (indexOfLastDot != -1) { +- String parentNestObjectPath = fullPath().substring(0, indexOfLastDot); +- return mapperService.getObjectMapper(parentNestObjectPath); +- } else { +- return null; +- } +- } +- + /** + * Returns whether all parent objects fields are nested too. + */ + public boolean parentObjectMapperAreNested(MapperService mapperService) { +- for (ObjectMapper parent = getParentObjectMapper(mapperService); ++ for (ObjectMapper parent = mapperService.getParentObjectMapper(this); + parent != null; +- parent = parent.getParentObjectMapper(mapperService)) { ++ parent = mapperService.getParentObjectMapper(parent)) { + + if (parent.nested().isNested() == false) { + return false; +diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ParseContext.java b/server/src/main/java/org/elasticsearch/index/mapper/ParseContext.java +old mode 100644 +new mode 100755 +index 4cfd5be2afe..17798c4b282 +--- a/server/src/main/java/org/elasticsearch/index/mapper/ParseContext.java ++++ b/server/src/main/java/org/elasticsearch/index/mapper/ParseContext.java +@@ -39,6 +39,28 @@ import java.util.Set; + + public abstract class ParseContext implements Iterable{ + ++ /** ++ * Find a template. Returns {@code null} if no template could be found. ++ * @param name the field name ++ * @param dynamicType the field type to give the field if the template does not define one ++ * @param matchType the type of the field in the json document or null if unknown ++ * @param rootObjectMapper ++ * @return a mapper builder, or null if there is no template for such a field ++ */ ++ public Mapper.Builder findTemplateBuilder(String name, String dynamicType, DynamicTemplate.XContentFieldType matchType, RootObjectMapper rootObjectMapper) { ++ DynamicTemplate dynamicTemplate = rootObjectMapper.findTemplate(path(), name, matchType); ++ if (dynamicTemplate == null) { ++ return null; ++ } ++ Mapper.TypeParser.ParserContext parserContext = docMapperParser().parserContext(name); ++ String mappingType = dynamicTemplate.mappingType(dynamicType); ++ Mapper.TypeParser typeParser = parserContext.typeParser(mappingType); ++ if (typeParser == null) { ++ throw new MapperParsingException("failed to find type parsed [" + mappingType + "] for [" + name + "]"); ++ } ++ return typeParser.parse(name, dynamicTemplate.mappingForName(name, dynamicType), parserContext); ++ } ++ + /** Fork of {@link org.apache.lucene.document.Document} with additional functionality. */ + public static class Document implements Iterable { + +diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +old mode 100644 +new mode 100755 +index 89b1810bf39..1d9d16dc800 +--- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java ++++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +@@ -232,28 +232,7 @@ public class RootObjectMapper extends ObjectMapper { + } + + public Mapper.Builder findTemplateBuilder(ParseContext context, String name, XContentFieldType matchType) { +- return findTemplateBuilder(context, name, matchType.defaultMappingType(), matchType); +- } +- +- /** +- * Find a template. Returns {@code null} if no template could be found. +- * @param name the field name +- * @param dynamicType the field type to give the field if the template does not define one +- * @param matchType the type of the field in the json document or null if unknown +- * @return a mapper builder, or null if there is no template for such a field +- */ +- public Mapper.Builder findTemplateBuilder(ParseContext context, String name, String dynamicType, XContentFieldType matchType) { +- DynamicTemplate dynamicTemplate = findTemplate(context.path(), name, matchType); +- if (dynamicTemplate == null) { +- return null; +- } +- Mapper.TypeParser.ParserContext parserContext = context.docMapperParser().parserContext(name); +- String mappingType = dynamicTemplate.mappingType(dynamicType); +- Mapper.TypeParser typeParser = parserContext.typeParser(mappingType); +- if (typeParser == null) { +- throw new MapperParsingException("failed to find type parsed [" + mappingType + "] for [" + name + "]"); +- } +- return typeParser.parse(name, dynamicTemplate.mappingForName(name, dynamicType), parserContext); ++ return context.findTemplateBuilder(name, matchType.defaultMappingType(), matchType, this); + } + + public DynamicTemplate findTemplate(ContentPath path, String name, XContentFieldType matchType) { +diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +old mode 100644 +new mode 100755 +index 9848a23cac1..2b5ad371c2b +--- a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java ++++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +@@ -84,7 +84,7 @@ public class TypeParsers { + if (analyzer == null) { + throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); + } +- analyzer.checkAllowedInMode(AnalysisMode.SEARCH_TIME); ++ AnalysisMode.SEARCH_TIME.checkAllowedInMode(analyzer); + searchAnalyzer = analyzer; + iterator.remove(); + } else if (propName.equals("search_quote_analyzer")) { +@@ -92,7 +92,7 @@ public class TypeParsers { + if (analyzer == null) { + throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); + } +- analyzer.checkAllowedInMode(AnalysisMode.SEARCH_TIME); ++ AnalysisMode.SEARCH_TIME.checkAllowedInMode(analyzer); + searchQuoteAnalyzer = analyzer; + iterator.remove(); + } +@@ -102,16 +102,16 @@ public class TypeParsers { + { + if (indexAnalyzer != null) { + if (searchAnalyzer == null) { +- indexAnalyzer.checkAllowedInMode(AnalysisMode.ALL); ++ AnalysisMode.ALL.checkAllowedInMode(indexAnalyzer); + } else { +- indexAnalyzer.checkAllowedInMode(AnalysisMode.INDEX_TIME); ++ AnalysisMode.INDEX_TIME.checkAllowedInMode(indexAnalyzer); + } + } + if (searchAnalyzer != null) { +- searchAnalyzer.checkAllowedInMode(AnalysisMode.SEARCH_TIME); ++ AnalysisMode.SEARCH_TIME.checkAllowedInMode(searchAnalyzer); + } + if (searchQuoteAnalyzer != null) { +- searchQuoteAnalyzer.checkAllowedInMode(AnalysisMode.SEARCH_TIME); ++ AnalysisMode.SEARCH_TIME.checkAllowedInMode(searchQuoteAnalyzer); + } + } + +diff --git a/server/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java +old mode 100644 +new mode 100755 +index d646dc4bb4b..391430bfa66 +--- a/server/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java ++++ b/server/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java +@@ -349,7 +349,7 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder + resize(n); + for (int i = 0; i < n; i++) { + GeoPoint other = geoPointValues.nextValue(); +- double distance = distFunction.calculate( +- origin.lat(), origin.lon(), other.lat(), other.lon(), DistanceUnit.METERS); ++ double distance = DistanceUnit.METERS.calculate( ++ origin.lat(), origin.lon(), other.lat(), other.lon(), distFunction); + values[i] = Math.max(0.0d, distance - offset); + } + sort(); +diff --git a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java +old mode 100644 +new mode 100755 +index 7fe60db2ddd..6181ef23ea3 +--- a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java ++++ b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java +@@ -21,6 +21,7 @@ package org.elasticsearch.index.reindex; + + import org.elasticsearch.ElasticsearchException; + import org.elasticsearch.ElasticsearchParseException; ++import org.elasticsearch.ExceptionsHelper; + import org.elasticsearch.action.ActionResponse; + import org.elasticsearch.action.bulk.BulkItemResponse.Failure; + import org.elasticsearch.common.xcontent.ObjectParser; +@@ -307,4 +308,27 @@ public class BulkByScrollResponse extends ActionResponse implements ToXContentFr + builder.append(",search_failures=").append(getSearchFailures().subList(0, min(3, getSearchFailures().size()))); + return builder.append(']').toString(); + } ++ ++ public RestStatus getStatusOther(BulkIndexByScrollResponseContentListener bulkIndexByScrollResponseContentListener) { ++ /* ++ * Return the highest numbered rest status under the assumption that higher numbered statuses are "more error" and thus more ++ * interesting to the user. ++ */ ++ RestStatus status = RestStatus.OK; ++ if (isTimedOut()) { ++ status = RestStatus.REQUEST_TIMEOUT; ++ } ++ for (Failure failure : getBulkFailures()) { ++ if (failure.getStatus().getStatus() > status.getStatus()) { ++ status = failure.getStatus(); ++ } ++ } ++ for (SearchFailure failure: getSearchFailures()) { ++ RestStatus failureStatus = ExceptionsHelper.status(failure.getReason()); ++ if (failureStatus.getStatus() > status.getStatus()) { ++ status = failureStatus; ++ } ++ } ++ return status; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java +old mode 100644 +new mode 100755 +index 227ddd48977..6b297ec3437 +--- a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java ++++ b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java +@@ -118,14 +118,6 @@ public class DeleteByQueryRequest extends AbstractBulkByScrollRequest> listener) { + ActionListener.completeWith(listener, () -> { +- maybeSyncTranslog(indexShard); ++ indexShard.maybeSyncTranslog(this); + return new PrimaryResult<>(request, new ReplicationResponse()); + }); + } + + @Override + protected ReplicaResult shardOperationOnReplica(final Request request, final IndexShard indexShard) throws Exception { +- maybeSyncTranslog(indexShard); ++ indexShard.maybeSyncTranslog(this); + return new ReplicaResult(); + } + +- private void maybeSyncTranslog(final IndexShard indexShard) throws IOException { +- if (indexShard.getTranslogDurability() == Translog.Durability.REQUEST && +- indexShard.getLastSyncedGlobalCheckpoint() < indexShard.getGlobalCheckpoint()) { +- indexShard.sync(); +- } +- } +- + public static final class Request extends ReplicationRequest { + + private Request(StreamInput in) throws IOException { +diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +old mode 100644 +new mode 100755 +index fdd95614756..275401dd852 +--- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java ++++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +@@ -109,13 +109,7 @@ import org.elasticsearch.index.recovery.RecoveryStats; + import org.elasticsearch.index.refresh.RefreshStats; + import org.elasticsearch.index.search.stats.SearchStats; + import org.elasticsearch.index.search.stats.ShardSearchStats; +-import org.elasticsearch.index.seqno.ReplicationTracker; +-import org.elasticsearch.index.seqno.RetentionLease; +-import org.elasticsearch.index.seqno.RetentionLeaseStats; +-import org.elasticsearch.index.seqno.RetentionLeaseSyncer; +-import org.elasticsearch.index.seqno.RetentionLeases; +-import org.elasticsearch.index.seqno.SeqNoStats; +-import org.elasticsearch.index.seqno.SequenceNumbers; ++import org.elasticsearch.index.seqno.*; + import org.elasticsearch.index.shard.PrimaryReplicaSyncer.ResyncTask; + import org.elasticsearch.index.similarity.SimilarityService; + import org.elasticsearch.index.store.Store; +@@ -2415,6 +2409,13 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl + return replicationTracker.isRelocated(); + } + ++ public void maybeSyncTranslog(GlobalCheckpointSyncAction globalCheckpointSyncAction) throws IOException { ++ if (getTranslogDurability() == Translog.Durability.REQUEST && ++ getLastSyncedGlobalCheckpoint() < getGlobalCheckpoint()) { ++ sync(); ++ } ++ } ++ + class ShardEventListener implements Engine.EventListener { + private final CopyOnWriteArrayList> delegates = new CopyOnWriteArrayList<>(); + +diff --git a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java +old mode 100644 +new mode 100755 +index 16db596515b..d8944e1605e +--- a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java ++++ b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java +@@ -458,7 +458,7 @@ public class RemoveCorruptedShardDataCommand extends EnvironmentAwareCommand { + IndexMetaData.FORMAT.loadLatestState(logger, namedXContentRegistry, + shardPath.getDataPath().getParent()); + +- final Path nodePath = getNodePath(shardPath); ++ final Path nodePath = shardPath.getNodePath(this); + final NodeMetaData nodeMetaData = + NodeMetaData.FORMAT.loadLatestState(logger, namedXContentRegistry, nodePath); + +@@ -482,14 +482,6 @@ public class RemoveCorruptedShardDataCommand extends EnvironmentAwareCommand { + terminal.println(""); + } + +- private Path getNodePath(ShardPath shardPath) { +- final Path nodePath = shardPath.getDataPath().getParent().getParent().getParent(); +- if (Files.exists(nodePath) == false || Files.exists(nodePath.resolve(MetaDataStateFormat.STATE_DIR_NAME)) == false) { +- throw new ElasticsearchException("Unable to resolve node path for " + shardPath); +- } +- return nodePath; +- } +- + public enum CleanStatus { + CLEAN("clean"), + CLEAN_WITH_CORRUPTED_MARKER("marked corrupted, but no corruption detected"), +diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardId.java b/server/src/main/java/org/elasticsearch/index/shard/ShardId.java +old mode 100644 +new mode 100755 +index e3becbef7dd..a384fc900fb +--- a/server/src/main/java/org/elasticsearch/index/shard/ShardId.java ++++ b/server/src/main/java/org/elasticsearch/index/shard/ShardId.java +@@ -25,6 +25,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.io.stream.Writeable; + import org.elasticsearch.common.xcontent.ToXContentFragment; + import org.elasticsearch.common.xcontent.XContentBuilder; ++import org.elasticsearch.env.NodeEnvironment; ++import org.elasticsearch.env.ShardLockObtainFailedException; + import org.elasticsearch.index.Index; + + import java.io.IOException; +@@ -132,4 +134,13 @@ public class ShardId implements Comparable, ToXContentFragment, Writeab + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.value(toString()); + } ++ ++ public boolean isShardLocked(NodeEnvironment nodeEnvironment) { ++ try { ++ nodeEnvironment.shardLock(this, "checking if shard is locked").close(); ++ return false; ++ } catch (ShardLockObtainFailedException ex) { ++ return true; ++ } ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardPath.java b/server/src/main/java/org/elasticsearch/index/shard/ShardPath.java +old mode 100644 +new mode 100755 +index 32d38d98034..d468f50d89f +--- a/server/src/main/java/org/elasticsearch/index/shard/ShardPath.java ++++ b/server/src/main/java/org/elasticsearch/index/shard/ShardPath.java +@@ -19,11 +19,13 @@ + package org.elasticsearch.index.shard; + + import org.apache.logging.log4j.Logger; ++import org.elasticsearch.ElasticsearchException; + import org.elasticsearch.core.internal.io.IOUtils; + import org.elasticsearch.cluster.metadata.IndexMetaData; + import org.elasticsearch.common.xcontent.NamedXContentRegistry; + import org.elasticsearch.env.NodeEnvironment; + import org.elasticsearch.env.ShardLock; ++import org.elasticsearch.gateway.MetaDataStateFormat; + import org.elasticsearch.index.IndexSettings; + + import java.io.IOException; +@@ -307,4 +309,12 @@ public final class ShardPath { + ", shard=" + shardId + + '}'; + } ++ ++ public Path getNodePath(RemoveCorruptedShardDataCommand removeCorruptedShardDataCommand) { ++ final Path nodePath = getDataPath().getParent().getParent().getParent(); ++ if (Files.exists(nodePath) == false || Files.exists(nodePath.resolve(MetaDataStateFormat.STATE_DIR_NAME)) == false) { ++ throw new ElasticsearchException("Unable to resolve node path for " + this); ++ } ++ return nodePath; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java +old mode 100644 +new mode 100755 +index 5f1f7d23a8c..1104450550e +--- a/server/src/main/java/org/elasticsearch/index/store/Store.java ++++ b/server/src/main/java/org/elasticsearch/index/store/Store.java +@@ -70,7 +70,6 @@ import org.elasticsearch.common.unit.TimeValue; + import org.elasticsearch.common.util.concurrent.AbstractRefCounted; + import org.elasticsearch.common.util.concurrent.RefCounted; + import org.elasticsearch.common.util.iterable.Iterables; +-import org.elasticsearch.core.internal.io.IOUtils; + import org.elasticsearch.env.NodeEnvironment; + import org.elasticsearch.env.ShardLock; + import org.elasticsearch.env.ShardLockObtainFailedException; +@@ -477,28 +476,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref + } + } + +- /** +- * The returned IndexOutput validates the files checksum. +- *

+- * Note: Checksums are calculated by default since version 4.8.0. This method only adds the +- * verification against the checksum in the given metadata and does not add any significant overhead. +- */ +- public IndexOutput createVerifyingOutput(String fileName, final StoreFileMetaData metadata, +- final IOContext context) throws IOException { +- IndexOutput output = directory().createOutput(fileName, context); +- boolean success = false; +- try { +- assert metadata.writtenBy() != null; +- output = new LuceneVerifyingIndexOutput(metadata, output); +- success = true; +- } finally { +- if (success == false) { +- IOUtils.closeWhileHandlingException(output); +- } +- } +- return output; +- } +- + public static void verify(IndexOutput output) throws IOException { + if (output instanceof VerifyingIndexOutput) { + ((VerifyingIndexOutput) output).verify(); +diff --git a/server/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java b/server/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java +old mode 100644 +new mode 100755 +index 59ad749f638..4968b922205 +--- a/server/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java ++++ b/server/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java +@@ -24,6 +24,7 @@ import org.apache.lucene.util.Version; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.io.stream.Writeable; ++import org.elasticsearch.core.internal.io.IOUtils; + + import java.io.IOException; + import java.text.ParseException; +@@ -130,4 +131,29 @@ public class StoreFileMetaData implements Writeable { + public BytesRef hash() { + return hash; + } ++ ++ /** ++ * The returned IndexOutput validates the files checksum. ++ *

++ * Note: Checksums are calculated by default since version 4.8.0. This method only adds the ++ * verification against the checksum in the given metadata and does not add any significant overhead. ++ * @param fileName ++ * @param context ++ * @param store ++ */ ++ public IndexOutput createVerifyingOutput(String fileName, ++ final IOContext context, Store store) throws IOException { ++ IndexOutput output = store.directory().createOutput(fileName, context); ++ boolean success = false; ++ try { ++ assert writtenBy() != null; ++ output = new Store.LuceneVerifyingIndexOutput(this, output); ++ success = true; ++ } finally { ++ if (success == false) { ++ IOUtils.closeWhileHandlingException(output); ++ } ++ } ++ return output; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java +old mode 100644 +new mode 100755 +index 16382d15cd3..cc79bc36032 +--- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java ++++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java +@@ -35,7 +35,6 @@ import org.elasticsearch.action.admin.indices.stats.CommonStats; + import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; + import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; + import org.elasticsearch.action.admin.indices.stats.IndexShardStats; +-import org.elasticsearch.action.admin.indices.stats.ShardStats; + import org.elasticsearch.action.search.SearchType; + import org.elasticsearch.client.Client; + import org.elasticsearch.cluster.ClusterState; +@@ -86,7 +85,6 @@ import org.elasticsearch.index.IndexService; + import org.elasticsearch.index.IndexSettings; + import org.elasticsearch.index.analysis.AnalysisRegistry; + import org.elasticsearch.index.cache.request.ShardRequestCache; +-import org.elasticsearch.index.engine.CommitStats; + import org.elasticsearch.index.engine.EngineFactory; + import org.elasticsearch.index.engine.InternalEngineFactory; + import org.elasticsearch.index.engine.NoOpEngine; +@@ -100,9 +98,7 @@ import org.elasticsearch.index.query.QueryRewriteContext; + import org.elasticsearch.index.recovery.RecoveryStats; + import org.elasticsearch.index.refresh.RefreshStats; + import org.elasticsearch.index.search.stats.SearchStats; +-import org.elasticsearch.index.seqno.RetentionLeaseStats; + import org.elasticsearch.index.seqno.RetentionLeaseSyncer; +-import org.elasticsearch.index.seqno.SeqNoStats; + import org.elasticsearch.index.shard.IllegalIndexShardStateException; + import org.elasticsearch.index.shard.IndexEventListener; + import org.elasticsearch.index.shard.IndexShard; +@@ -157,7 +153,6 @@ import java.util.stream.Collectors; + import static java.util.Collections.emptyList; + import static java.util.Collections.emptyMap; + import static java.util.Collections.unmodifiableMap; +-import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; + import static org.elasticsearch.index.IndexService.IndexCreationContext.CREATE_INDEX; + import static org.elasticsearch.index.IndexService.IndexCreationContext.META_DATA_VERIFICATION; + import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; +@@ -378,7 +373,7 @@ public class IndicesService extends AbstractLifecycleComponent + for (final IndexService indexService : indicesService) { + for (final IndexShard indexShard : indexService) { + try { +- final IndexShardStats indexShardStats = indicesService.indexShardStats(indicesService, indexShard, flags); ++ final IndexShardStats indexShardStats = flags.indexShardStats(indicesService, indexShard, indicesService); + + if (indexShardStats == null) { + continue; +@@ -399,38 +394,6 @@ public class IndicesService extends AbstractLifecycleComponent + return statsByShard; + } + +- IndexShardStats indexShardStats(final IndicesService indicesService, final IndexShard indexShard, final CommonStatsFlags flags) { +- if (indexShard.routingEntry() == null) { +- return null; +- } +- +- CommitStats commitStats; +- SeqNoStats seqNoStats; +- RetentionLeaseStats retentionLeaseStats; +- try { +- commitStats = indexShard.commitStats(); +- seqNoStats = indexShard.seqNoStats(); +- retentionLeaseStats = indexShard.getRetentionLeaseStats(); +- } catch (AlreadyClosedException e) { +- // shard is closed - no stats is fine +- commitStats = null; +- seqNoStats = null; +- retentionLeaseStats = null; +- } +- +- return new IndexShardStats( +- indexShard.shardId(), +- new ShardStats[]{ +- new ShardStats( +- indexShard.routingEntry(), +- indexShard.shardPath(), +- new CommonStats(indicesService.getIndicesQueryCache(), indexShard, flags), +- commitStats, +- seqNoStats, +- retentionLeaseStats) +- }); +- } +- + /** + * Checks if changes (adding / removing) indices, shards and so on are allowed. + * +diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +old mode 100644 +new mode 100755 +index 08232a5ef62..1e636cc5846 +--- a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java ++++ b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +@@ -94,7 +94,7 @@ public class HunspellService { + public HunspellService(final Settings settings, final Environment env, final Map knownDictionaries) + throws IOException { + this.knownDictionaries = Collections.unmodifiableMap(knownDictionaries); +- this.hunspellDir = resolveHunspellDirectory(env); ++ this.hunspellDir = env.resolveHunspellDirectory(this); + this.defaultIgnoreCase = HUNSPELL_IGNORE_CASE.get(settings); + this.loadingFunction = (locale) -> { + try { +@@ -122,10 +122,6 @@ public class HunspellService { + return dictionary; + } + +- private Path resolveHunspellDirectory(Environment env) { +- return env.configFile().resolve("hunspell"); +- } +- + /** + * Scans the hunspell directory and loads all found dictionaries + */ +diff --git a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +old mode 100644 +new mode 100755 +index 48045e21925..fcce69f8b5e +--- a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java ++++ b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +@@ -198,13 +198,6 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { + logger.info("Updated breaker settings for accounting requests: {}", newAccountingSettings); + } + +- private boolean validateTotalCircuitBreakerLimit(ByteSizeValue byteSizeValue) { +- BreakerSettings newParentSettings = new BreakerSettings(CircuitBreaker.PARENT, byteSizeValue.getBytes(), 1.0, +- CircuitBreaker.Type.PARENT, null); +- validateSettings(new BreakerSettings[]{newParentSettings}); +- return true; +- } +- + private void setTotalCircuitBreakerLimit(ByteSizeValue byteSizeValue) { + BreakerSettings newParentSettings = new BreakerSettings(CircuitBreaker.PARENT, byteSizeValue.getBytes(), 1.0, + CircuitBreaker.Type.PARENT, null); +diff --git a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +old mode 100644 +new mode 100755 +index fc8c6fcef98..9a9f2140a7c +--- a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java ++++ b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +@@ -375,7 +375,7 @@ public class SyncedFlushService implements IndexEventListener { + if (node == null) { + logger.trace("{} is assigned to an unknown node. skipping for sync id [{}]. shard routing {}", shardId, syncId, shard); + results.put(shard, new ShardSyncedFlushResponse("unknown node")); +- countDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); ++ countDown.countDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, results, this); + continue; + } + final PreSyncedFlushResponse preSyncedResponse = preSyncResponses.get(shard.currentNodeId()); +@@ -383,7 +383,7 @@ public class SyncedFlushService implements IndexEventListener { + logger.trace("{} can't resolve expected commit id for current node, skipping for sync id [{}]. shard routing {}", + shardId, syncId, shard); + results.put(shard, new ShardSyncedFlushResponse("no commit id from pre-sync flush")); +- countDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); ++ countDown.countDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, results, this); + continue; + } + if (preSyncedResponse.numDocs != numDocsOnPrimary && +@@ -393,7 +393,7 @@ public class SyncedFlushService implements IndexEventListener { + shardId, syncId, shard, preSyncedResponse.numDocs, numDocsOnPrimary); + results.put(shard, new ShardSyncedFlushResponse("out of sync replica; " + + "num docs on replica [" + preSyncedResponse.numDocs + "]; num docs on primary [" + numDocsOnPrimary + "]")); +- countDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); ++ countDown.countDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, results, this); + continue; + } + logger.trace("{} sending synced flush request to {}. sync id [{}].", shardId, shard, syncId); +@@ -412,7 +412,7 @@ public class SyncedFlushService implements IndexEventListener { + ShardSyncedFlushResponse existing = results.put(shard, response); + assert existing == null : "got two answers for node [" + node + "]"; + // count after the assert so we won't decrement twice in handleException +- countDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); ++ countDown.countDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, results, this); + } + + @Override +@@ -420,7 +420,7 @@ public class SyncedFlushService implements IndexEventListener { + logger.trace(() -> new ParameterizedMessage("{} error while performing synced flush on [{}], skipping", + shardId, shard), exp); + results.put(shard, new ShardSyncedFlushResponse(exp.getMessage())); +- countDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); ++ countDown.countDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, results, this); + } + + @Override +@@ -432,19 +432,6 @@ public class SyncedFlushService implements IndexEventListener { + + } + +- private void countDownAndSendResponseIfDone(String syncId, +- List shards, +- ShardId shardId, +- int totalShards, +- ActionListener listener, +- CountDown countDown, +- Map results) { +- if (countDown.countDown()) { +- assert results.size() == shards.size(); +- listener.onResponse(new ShardsSyncedFlushResult(shardId, syncId, totalShards, results)); +- } +- } +- + /** + * send presync requests to all started copies of the given shard + */ +diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/MultiFileWriter.java b/server/src/main/java/org/elasticsearch/indices/recovery/MultiFileWriter.java +old mode 100644 +new mode 100755 +index 87a6d18671a..47b427e1b1f +--- a/server/src/main/java/org/elasticsearch/indices/recovery/MultiFileWriter.java ++++ b/server/src/main/java/org/elasticsearch/indices/recovery/MultiFileWriter.java +@@ -99,7 +99,7 @@ public class MultiFileWriter implements Releasable { + } + // add first, before it's created + tempFileNames.put(tempFileName, fileName); +- IndexOutput indexOutput = store.createVerifyingOutput(tempFileName, metaData, IOContext.DEFAULT); ++ IndexOutput indexOutput = metaData.createVerifyingOutput(tempFileName, IOContext.DEFAULT, store); + openIndexOutputs.put(fileName, indexOutput); + return indexOutput; + } +diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java +old mode 100644 +new mode 100755 +index 230f5351575..bd5885d1d86 +--- a/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java ++++ b/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java +@@ -79,30 +79,27 @@ public class RemoteRecoveryTargetHandler implements RecoveryTargetHandler { + + @Override + public void prepareForTranslogOperations(boolean fileBasedRecovery, int totalTranslogOps, ActionListener listener) { +- transportService.submitRequest(targetNode, PeerRecoveryTargetService.Actions.PREPARE_TRANSLOG, +- new RecoveryPrepareForTranslogOperationsRequest(recoveryId, shardId, totalTranslogOps, fileBasedRecovery), ++ new RecoveryPrepareForTranslogOperationsRequest(recoveryId, shardId, totalTranslogOps, fileBasedRecovery).submitRequest(targetNode, PeerRecoveryTargetService.Actions.PREPARE_TRANSLOG, + TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionTimeout()).build(), + new ActionListenerResponseHandler<>(ActionListener.map(listener, r -> null), +- in -> TransportResponse.Empty.INSTANCE, ThreadPool.Names.GENERIC)); ++ in -> TransportResponse.Empty.INSTANCE, ThreadPool.Names.GENERIC), transportService); + } + + @Override + public void finalizeRecovery(final long globalCheckpoint, final ActionListener listener) { +- transportService.submitRequest(targetNode, PeerRecoveryTargetService.Actions.FINALIZE, +- new RecoveryFinalizeRecoveryRequest(recoveryId, shardId, globalCheckpoint), ++ new RecoveryFinalizeRecoveryRequest(recoveryId, shardId, globalCheckpoint).submitRequest(targetNode, PeerRecoveryTargetService.Actions.FINALIZE, + TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionLongTimeout()).build(), + new ActionListenerResponseHandler<>(ActionListener.map(listener, r -> null), +- in -> TransportResponse.Empty.INSTANCE, ThreadPool.Names.GENERIC)); ++ in -> TransportResponse.Empty.INSTANCE, ThreadPool.Names.GENERIC), transportService); + } + + @Override + public void handoffPrimaryContext(final ReplicationTracker.PrimaryContext primaryContext) { +- transportService.submitRequest( ++ new RecoveryHandoffPrimaryContextRequest(recoveryId, shardId, primaryContext).submitRequest( + targetNode, + PeerRecoveryTargetService.Actions.HANDOFF_PRIMARY_CONTEXT, +- new RecoveryHandoffPrimaryContextRequest(recoveryId, shardId, primaryContext), +- TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionTimeout()).build(), +- EmptyTransportResponseHandler.INSTANCE_SAME).txGet(); ++ TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionTimeout()).build(), ++ EmptyTransportResponseHandler.INSTANCE_SAME, transportService).txGet(); + } + + @Override +@@ -123,9 +120,9 @@ public class RemoteRecoveryTargetHandler implements RecoveryTargetHandler { + maxSeqNoOfDeletesOrUpdatesOnPrimary, + retentionLeases, + mappingVersionOnPrimary); +- transportService.submitRequest(targetNode, PeerRecoveryTargetService.Actions.TRANSLOG_OPS, request, translogOpsRequestOptions, ++ request.submitRequest(targetNode, PeerRecoveryTargetService.Actions.TRANSLOG_OPS, translogOpsRequestOptions, + new ActionListenerResponseHandler<>(ActionListener.map(listener, r -> r.localCheckpoint), +- RecoveryTranslogOperationsResponse::new, ThreadPool.Names.GENERIC)); ++ RecoveryTranslogOperationsResponse::new, ThreadPool.Names.GENERIC), transportService); + } + + @Override +@@ -134,18 +131,17 @@ public class RemoteRecoveryTargetHandler implements RecoveryTargetHandler { + + RecoveryFilesInfoRequest recoveryInfoFilesRequest = new RecoveryFilesInfoRequest(recoveryId, shardId, + phase1FileNames, phase1FileSizes, phase1ExistingFileNames, phase1ExistingFileSizes, totalTranslogOps); +- transportService.submitRequest(targetNode, PeerRecoveryTargetService.Actions.FILES_INFO, recoveryInfoFilesRequest, +- TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionTimeout()).build(), +- EmptyTransportResponseHandler.INSTANCE_SAME).txGet(); ++ recoveryInfoFilesRequest.submitRequest(targetNode, PeerRecoveryTargetService.Actions.FILES_INFO, ++ TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionTimeout()).build(), ++ EmptyTransportResponseHandler.INSTANCE_SAME, transportService).txGet(); + + } + + @Override + public void cleanFiles(int totalTranslogOps, long globalCheckpoint, Store.MetadataSnapshot sourceMetaData) throws IOException { +- transportService.submitRequest(targetNode, PeerRecoveryTargetService.Actions.CLEAN_FILES, +- new RecoveryCleanFilesRequest(recoveryId, shardId, sourceMetaData, totalTranslogOps, globalCheckpoint), +- TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionTimeout()).build(), +- EmptyTransportResponseHandler.INSTANCE_SAME).txGet(); ++ new RecoveryCleanFilesRequest(recoveryId, shardId, sourceMetaData, totalTranslogOps, globalCheckpoint).submitRequest(targetNode, PeerRecoveryTargetService.Actions.CLEAN_FILES, ++ TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionTimeout()).build(), ++ EmptyTransportResponseHandler.INSTANCE_SAME, transportService).txGet(); + } + + @Override +@@ -173,15 +169,15 @@ public class RemoteRecoveryTargetHandler implements RecoveryTargetHandler { + throttleTimeInNanos = 0; + } + +- transportService.submitRequest(targetNode, PeerRecoveryTargetService.Actions.FILE_CHUNK, +- new RecoveryFileChunkRequest(recoveryId, shardId, fileMetaData, position, content, lastChunk, +- totalTranslogOps, +- /* we send estimateTotalOperations with every request since we collect stats on the target and that way we can +- * see how many translog ops we accumulate while copying files across the network. A future optimization +- * would be in to restart file copy again (new deltas) if we have too many translog ops are piling up. +- */ +- throttleTimeInNanos), fileChunkRequestOptions, new ActionListenerResponseHandler<>( +- ActionListener.map(listener, r -> null), in -> TransportResponse.Empty.INSTANCE)); ++ new RecoveryFileChunkRequest(recoveryId, shardId, fileMetaData, position, content, lastChunk, ++ totalTranslogOps, ++ /* we send estimateTotalOperations with every request since we collect stats on the target and that way we can ++ * see how many translog ops we accumulate while copying files across the network. A future optimization ++ * would be in to restart file copy again (new deltas) if we have too many translog ops are piling up. ++ */ ++ throttleTimeInNanos).submitRequest(targetNode, PeerRecoveryTargetService.Actions.FILE_CHUNK, ++ fileChunkRequestOptions, new ActionListenerResponseHandler<>( ++ ActionListener.map(listener, r -> null), in -> TransportResponse.Empty.INSTANCE), transportService); + } + + } +diff --git a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java +old mode 100644 +new mode 100755 +index a095d7647d9..43cb99bd270 +--- a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java ++++ b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java +@@ -152,7 +152,7 @@ public class CompoundProcessor implements Processor { + */ + boolean executeOnFailure(IngestDocument ingestDocument, ElasticsearchException exception) throws Exception { + try { +- putFailureMetadata(ingestDocument, exception); ++ ingestDocument.putFailureMetadata(exception, this); + for (Processor processor : onFailureProcessors) { + try { + if (processor.execute(ingestDocument) == null) { +@@ -168,17 +168,6 @@ public class CompoundProcessor implements Processor { + return true; + } + +- private void putFailureMetadata(IngestDocument ingestDocument, ElasticsearchException cause) { +- List processorTypeHeader = cause.getHeader("processor_type"); +- List processorTagHeader = cause.getHeader("processor_tag"); +- String failedProcessorType = (processorTypeHeader != null) ? processorTypeHeader.get(0) : null; +- String failedProcessorTag = (processorTagHeader != null) ? processorTagHeader.get(0) : null; +- Map ingestMetadata = ingestDocument.getIngestMetadata(); +- ingestMetadata.put(ON_FAILURE_MESSAGE_FIELD, cause.getRootCause().getMessage()); +- ingestMetadata.put(ON_FAILURE_PROCESSOR_TYPE_FIELD, failedProcessorType); +- ingestMetadata.put(ON_FAILURE_PROCESSOR_TAG_FIELD, failedProcessorTag); +- } +- + private void removeFailureMetadata(IngestDocument ingestDocument) { + Map ingestMetadata = ingestDocument.getIngestMetadata(); + ingestMetadata.remove(ON_FAILURE_MESSAGE_FIELD); +diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +old mode 100644 +new mode 100755 +index 90ebc8e0741..fedfb94498b +--- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java ++++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +@@ -19,6 +19,7 @@ + + package org.elasticsearch.ingest; + ++import org.elasticsearch.ElasticsearchException; + import org.elasticsearch.common.Strings; + import org.elasticsearch.index.VersionType; + import org.elasticsearch.index.mapper.IdFieldMapper; +@@ -680,6 +681,17 @@ public final class IngestDocument { + '}'; + } + ++ public void putFailureMetadata(ElasticsearchException cause, CompoundProcessor compoundProcessor) { ++ List processorTypeHeader = cause.getHeader("processor_type"); ++ List processorTagHeader = cause.getHeader("processor_tag"); ++ String failedProcessorType = (processorTypeHeader != null) ? processorTypeHeader.get(0) : null; ++ String failedProcessorTag = (processorTagHeader != null) ? processorTagHeader.get(0) : null; ++ Map ingestMetadata = getIngestMetadata(); ++ ingestMetadata.put(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD, cause.getRootCause().getMessage()); ++ ingestMetadata.put(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD, failedProcessorType); ++ ingestMetadata.put(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD, failedProcessorTag); ++ } ++ + public enum MetaData { + INDEX(IndexFieldMapper.NAME), + TYPE(TypeFieldMapper.NAME), +diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java +old mode 100644 +new mode 100755 +index 6e592ba324f..9e502d6c007 +--- a/server/src/main/java/org/elasticsearch/node/Node.java ++++ b/server/src/main/java/org/elasticsearch/node/Node.java +@@ -70,7 +70,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + import org.elasticsearch.common.lease.Releasables; + import org.elasticsearch.common.logging.DeprecationLogger; + import org.elasticsearch.common.logging.NodeAndClusterIdStateListener; +-import org.elasticsearch.common.network.NetworkAddress; + import org.elasticsearch.common.network.NetworkModule; + import org.elasticsearch.common.network.NetworkService; + import org.elasticsearch.common.settings.ClusterSettings; +@@ -80,7 +79,6 @@ import org.elasticsearch.common.settings.SettingUpgrader; + import org.elasticsearch.common.settings.Settings; + import org.elasticsearch.common.settings.SettingsModule; + import org.elasticsearch.common.transport.BoundTransportAddress; +-import org.elasticsearch.common.transport.TransportAddress; + import org.elasticsearch.common.unit.TimeValue; + import org.elasticsearch.common.util.BigArrays; + import org.elasticsearch.common.util.PageCacheRecycler; +@@ -745,9 +743,9 @@ public class Node implements Closeable { + + if (WRITE_PORTS_FILE_SETTING.get(settings())) { + TransportService transport = injector.getInstance(TransportService.class); +- writePortsFile("transport", transport.boundAddress()); ++ transport.boundAddress().writePortsFile("transport", this); + HttpServerTransport http = injector.getInstance(HttpServerTransport.class); +- writePortsFile("http", http.boundAddress()); ++ http.boundAddress().writePortsFile("http", this); + } + + logger.info("started"); +@@ -924,25 +922,6 @@ public class Node implements Closeable { + final BoundTransportAddress boundTransportAddress, List bootstrapChecks) throws NodeValidationException { + } + +- /** Writes a file to the logs dir containing the ports for the given transport type */ +- private void writePortsFile(String type, BoundTransportAddress boundAddress) { +- Path tmpPortsFile = environment.logsFile().resolve(type + ".ports.tmp"); +- try (BufferedWriter writer = Files.newBufferedWriter(tmpPortsFile, Charset.forName("UTF-8"))) { +- for (TransportAddress address : boundAddress.boundAddresses()) { +- InetAddress inetAddress = InetAddress.getByName(address.getAddress()); +- writer.write(NetworkAddress.format(new InetSocketAddress(inetAddress, address.getPort())) + "\n"); +- } +- } catch (IOException e) { +- throw new RuntimeException("Failed to write ports file", e); +- } +- Path portsFile = environment.logsFile().resolve(type + ".ports"); +- try { +- Files.move(tmpPortsFile, portsFile, StandardCopyOption.ATOMIC_MOVE); +- } catch (IOException e) { +- throw new RuntimeException("Failed to rename ports file", e); +- } +- } +- + /** + * The {@link PluginsService} used to build this node's components. + */ +diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java +old mode 100644 +new mode 100755 +index fb914a34bfb..14528162c57 +--- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java ++++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java +@@ -340,7 +340,7 @@ public class PersistentTasksClusterService implements ClusterStateListener, Clos + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { +- if (isAnyTaskUnassigned(newState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE))) { ++ if (newState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE).isAnyTaskUnassigned(this)) { + periodicRechecker.rescheduleIfNecessary(); + } + } +@@ -378,13 +378,6 @@ public class PersistentTasksClusterService implements ClusterStateListener, Clos + return false; + } + +- /** +- * Returns true if any persistent task is unassigned. +- */ +- private boolean isAnyTaskUnassigned(final PersistentTasksCustomMetaData tasks) { +- return tasks != null && tasks.tasks().stream().anyMatch(task -> task.getAssignment().isAssigned() == false); +- } +- + /** + * Evaluates the cluster state and tries to assign tasks to nodes. + * +@@ -466,7 +459,7 @@ public class PersistentTasksClusterService implements ClusterStateListener, Clos + if (clusterService.localNode().isMasterNode()) { + final ClusterState state = clusterService.state(); + logger.trace("periodic persistent task assignment check running for cluster state {}", state.getVersion()); +- if (isAnyTaskUnassigned(state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE))) { ++ if (state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE).isAnyTaskUnassigned(this)) { + reassignPersistentTasks(); + } + } +diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java +old mode 100644 +new mode 100755 +index 6c5aa741a79..482ab3ce1f9 +--- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java ++++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java +@@ -125,6 +125,14 @@ public final class PersistentTasksCustomMetaData extends AbstractNamedDiffable task.getAssignment().isAssigned() == false); ++ } ++ + /** + * Private builder used in XContent parser to build task-specific portion (params and state) + */ +diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +old mode 100644 +new mode 100755 +index 589e0432c03..fc8c582b648 +--- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java ++++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +@@ -21,12 +21,14 @@ package org.elasticsearch.repositories; + + import org.elasticsearch.ElasticsearchParseException; + import org.elasticsearch.ResourceNotFoundException; ++import org.elasticsearch.action.admin.cluster.snapshots.get.TransportGetSnapshotsAction; + import org.elasticsearch.common.Nullable; + import org.elasticsearch.common.UUIDs; + import org.elasticsearch.common.xcontent.ToXContent; + import org.elasticsearch.common.xcontent.XContentBuilder; + import org.elasticsearch.common.xcontent.XContentParser; + import org.elasticsearch.snapshots.SnapshotId; ++import org.elasticsearch.snapshots.SnapshotInfo; + import org.elasticsearch.snapshots.SnapshotState; + + import java.io.IOException; +@@ -481,4 +483,30 @@ public final class RepositoryData { + return new RepositoryData(this.genId, this.snapshotIds, this.snapshotStates, this.indexSnapshots, incompatibleSnapshotIds); + } + ++ public List buildSimpleSnapshotInfos(final Set toResolve, ++ final List currentSnapshots, TransportGetSnapshotsAction transportGetSnapshotsAction) { ++ List snapshotInfos = new ArrayList<>(); ++ for (SnapshotInfo snapshotInfo : currentSnapshots) { ++ if (toResolve.remove(snapshotInfo.snapshotId())) { ++ snapshotInfos.add(snapshotInfo.basic()); ++ } ++ } ++ Map> snapshotsToIndices = new HashMap<>(); ++ for (IndexId indexId : getIndices().values()) { ++ for (SnapshotId snapshotId : getSnapshots(indexId)) { ++ if (toResolve.contains(snapshotId)) { ++ snapshotsToIndices.computeIfAbsent(snapshotId, (k) -> new ArrayList<>()) ++ .add(indexId.getName()); ++ } ++ } ++ } ++ for (Map.Entry> entry : snapshotsToIndices.entrySet()) { ++ final List indices = entry.getValue(); ++ CollectionUtil.timSort(indices); ++ final SnapshotId snapshotId = entry.getKey(); ++ snapshotInfos.add(new SnapshotInfo(snapshotId, indices, getSnapshotState(snapshotId))); ++ } ++ CollectionUtil.timSort(snapshotInfos); ++ return Collections.unmodifiableList(snapshotInfos); ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/FileRestoreContext.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/FileRestoreContext.java +old mode 100644 +new mode 100755 +index 3abe4d7b507..cbcbe4327de +--- a/server/src/main/java/org/elasticsearch/repositories/blobstore/FileRestoreContext.java ++++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/FileRestoreContext.java +@@ -237,7 +237,7 @@ public abstract class FileRestoreContext { + boolean success = false; + + try (InputStream stream = fileInputStream(fileInfo)) { +- try (IndexOutput indexOutput = store.createVerifyingOutput(fileInfo.physicalName(), fileInfo.metadata(), IOContext.DEFAULT)) { ++ try (IndexOutput indexOutput = fileInfo.metadata().createVerifyingOutput(fileInfo.physicalName(), IOContext.DEFAULT, store)) { + final byte[] buffer = new byte[bufferSize]; + int length; + while ((length = stream.read(buffer)) > 0) { +diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +old mode 100644 +new mode 100755 +index 9e86b3a6f94..3348b6c9b1f +--- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java ++++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +@@ -24,7 +24,6 @@ import org.apache.lucene.search.spell.LevenshteinDistance; + import org.apache.lucene.util.CollectionUtil; + import org.elasticsearch.client.node.NodeClient; + import org.elasticsearch.common.CheckedConsumer; +-import org.elasticsearch.common.collect.Tuple; + import org.elasticsearch.common.settings.Setting; + import org.elasticsearch.common.settings.Setting.Property; + import org.elasticsearch.common.settings.Settings; +@@ -101,7 +100,7 @@ public abstract class BaseRestHandler implements RestHandler { + final Set candidateParams = new HashSet<>(); + candidateParams.addAll(request.consumedParams()); + candidateParams.addAll(responseParams()); +- throw new IllegalArgumentException(unrecognized(request, unconsumedParams, candidateParams, "parameter")); ++ throw new IllegalArgumentException(request.unrecognized(unconsumedParams, candidateParams, "parameter", this)); + } + + if (request.hasContent() && request.isContentConsumed() == false) { +@@ -113,53 +112,6 @@ public abstract class BaseRestHandler implements RestHandler { + action.accept(channel); + } + +- protected final String unrecognized( +- final RestRequest request, +- final Set invalids, +- final Set candidates, +- final String detail) { +- StringBuilder message = new StringBuilder(String.format( +- Locale.ROOT, +- "request [%s] contains unrecognized %s%s: ", +- request.path(), +- detail, +- invalids.size() > 1 ? "s" : "")); +- boolean first = true; +- for (final String invalid : invalids) { +- final LevenshteinDistance ld = new LevenshteinDistance(); +- final List> scoredParams = new ArrayList<>(); +- for (final String candidate : candidates) { +- final float distance = ld.getDistance(invalid, candidate); +- if (distance > 0.5f) { +- scoredParams.add(new Tuple<>(distance, candidate)); +- } +- } +- CollectionUtil.timSort(scoredParams, (a, b) -> { +- // sort by distance in reverse order, then parameter name for equal distances +- int compare = a.v1().compareTo(b.v1()); +- if (compare != 0) return -compare; +- else return a.v2().compareTo(b.v2()); +- }); +- if (first == false) { +- message.append(", "); +- } +- message.append("[").append(invalid).append("]"); +- final List keys = scoredParams.stream().map(Tuple::v2).collect(Collectors.toList()); +- if (keys.isEmpty() == false) { +- message.append(" -> did you mean "); +- if (keys.size() == 1) { +- message.append("[").append(keys.get(0)).append("]"); +- } else { +- message.append("any of ").append(keys.toString()); +- } +- message.append("?"); +- } +- first = false; +- } +- +- return message.toString(); +- } +- + /** + * REST requests are handled by preparing a channel consumer that represents the execution of + * the request against a channel. +diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java +old mode 100644 +new mode 100755 +index a55005454f6..929ef3aff0d +--- a/server/src/main/java/org/elasticsearch/rest/RestController.java ++++ b/server/src/main/java/org/elasticsearch/rest/RestController.java +@@ -89,22 +89,6 @@ public class RestController implements HttpServerTransport.Dispatcher { + this.circuitBreakerService = circuitBreakerService; + } + +- /** +- * Registers a REST handler to be executed when the provided {@code method} and {@code path} match the request. +- * +- * @param method GET, POST, etc. +- * @param path Path to handle (e.g., "/{index}/{type}/_bulk") +- * @param handler The handler to actually execute +- * @param deprecationMessage The message to log and send as a header in the response +- * @param logger The existing deprecation logger to use +- */ +- public void registerAsDeprecatedHandler(RestRequest.Method method, String path, RestHandler handler, +- String deprecationMessage, DeprecationLogger logger) { +- assert (handler instanceof DeprecationRestHandler) == false; +- +- registerHandler(method, path, new DeprecationRestHandler(handler, deprecationMessage, logger)); +- } +- + /** + * Registers a REST handler to be executed when the provided {@code method} and {@code path} match the request, or when provided + * with {@code deprecatedMethod} and {@code deprecatedPath}. Expected usage: +@@ -120,7 +104,7 @@ public class RestController implements HttpServerTransport.Dispatcher { + * replacing the deprecated REST handler ({@code deprecatedMethod} with {@code deprecatedPath}) that is using the same + * {@code handler}. + *

+- * Deprecated REST handlers without a direct replacement should be deprecated directly using {@link #registerAsDeprecatedHandler} ++ * Deprecated REST handlers without a direct replacement should be deprecated directly using {@link DeprecationLogger#registerAsDeprecatedHandler} + * and a specific message. + * + * @param method GET, POST, etc. +@@ -138,7 +122,7 @@ public class RestController implements HttpServerTransport.Dispatcher { + "[" + deprecatedMethod.name() + " " + deprecatedPath + "] is deprecated! Use [" + method.name() + " " + path + "] instead."; + + registerHandler(method, path, handler); +- registerAsDeprecatedHandler(deprecatedMethod, deprecatedPath, handler, deprecationMessage, logger); ++ logger.registerAsDeprecatedHandler(deprecatedMethod, deprecatedPath, handler, deprecationMessage, this); + } + + /** +diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java +old mode 100644 +new mode 100755 +index fe976ee4ddc..d85b151f5b0 +--- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java ++++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java +@@ -149,6 +149,52 @@ public class RestRequest implements ToXContent.Params { + return new RestRequest(xContentRegistry, params, httpRequest.uri(), httpRequest.getHeaders(), httpRequest, httpChannel); + } + ++ public final String unrecognized( ++ final Set invalids, ++ final Set candidates, ++ final String detail, BaseRestHandler baseRestHandler) { ++ StringBuilder message = new StringBuilder(String.format( ++ Locale.ROOT, ++ "request [%s] contains unrecognized %s%s: ", ++ path(), ++ detail, ++ invalids.size() > 1 ? "s" : "")); ++ boolean first = true; ++ for (final String invalid : invalids) { ++ final LevenshteinDistance ld = new LevenshteinDistance(); ++ final List> scoredParams = new ArrayList<>(); ++ for (final String candidate : candidates) { ++ final float distance = ld.getDistance(invalid, candidate); ++ if (distance > 0.5f) { ++ scoredParams.add(new Tuple<>(distance, candidate)); ++ } ++ } ++ CollectionUtil.timSort(scoredParams, (a, b) -> { ++ // sort by distance in reverse order, then parameter name for equal distances ++ int compare = a.v1().compareTo(b.v1()); ++ if (compare != 0) return -compare; ++ else return a.v2().compareTo(b.v2()); ++ }); ++ if (first == false) { ++ message.append(", "); ++ } ++ message.append("[").append(invalid).append("]"); ++ final List keys = scoredParams.stream().map(Tuple::v2).collect(Collectors.toList()); ++ if (keys.isEmpty() == false) { ++ message.append(" -> did you mean "); ++ if (keys.size() == 1) { ++ message.append("[").append(keys.get(0)).append("]"); ++ } else { ++ message.append("any of ").append(keys.toString()); ++ } ++ message.append("?"); ++ } ++ first = false; ++ } ++ ++ return message.toString(); ++ } ++ + public enum Method { + GET, POST, PUT, DELETE, OPTIONS, HEAD, PATCH, TRACE, CONNECT + } +diff --git a/server/src/main/java/org/elasticsearch/rest/RestStatus.java b/server/src/main/java/org/elasticsearch/rest/RestStatus.java +old mode 100644 +new mode 100755 +index e7c07f21147..10d5d8824ca +--- a/server/src/main/java/org/elasticsearch/rest/RestStatus.java ++++ b/server/src/main/java/org/elasticsearch/rest/RestStatus.java +@@ -20,6 +20,8 @@ + package org.elasticsearch.rest; + + import org.elasticsearch.action.ShardOperationFailedException; ++import org.elasticsearch.cluster.block.ClusterBlock; ++import org.elasticsearch.cluster.block.ClusterBlocks; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + +@@ -535,4 +537,17 @@ public enum RestStatus { + public static RestStatus fromCode(int code) { + return CODE_TO_STATUS.get(code); + } ++ ++ /** ++ * Is there a global block with the provided status? ++ * @param clusterBlocks ++ */ ++ public boolean hasGlobalBlockWithStatus(ClusterBlocks clusterBlocks) { ++ for (ClusterBlock clusterBlock : clusterBlocks.global()) { ++ if (clusterBlock.status().equals(this)) { ++ return true; ++ } ++ } ++ return false; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java +old mode 100644 +new mode 100755 +index e74c42f3c39..13a94858061 +--- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java ++++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java +@@ -126,7 +126,7 @@ public class RestNodesStatsAction extends BaseRestHandler { + } + + if (!invalidMetrics.isEmpty()) { +- throw new IllegalArgumentException(unrecognized(request, invalidMetrics, METRICS.keySet(), "metric")); ++ throw new IllegalArgumentException(request.unrecognized(invalidMetrics, METRICS.keySet(), "metric", RestNodesStatsAction.this)); + } + + // check for index specific metrics +@@ -149,7 +149,7 @@ public class RestNodesStatsAction extends BaseRestHandler { + } + + if (!invalidIndexMetrics.isEmpty()) { +- throw new IllegalArgumentException(unrecognized(request, invalidIndexMetrics, FLAGS.keySet(), "index metric")); ++ throw new IllegalArgumentException(request.unrecognized(invalidIndexMetrics, FLAGS.keySet(), "index metric", RestNodesStatsAction.this)); + } + + nodesStatsRequest.indices(flags); +diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java +old mode 100644 +new mode 100755 +index c2d16ce5ac6..d1d21a8932d +--- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java ++++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java +@@ -102,7 +102,7 @@ public class RestIndicesStatsAction extends BaseRestHandler { + } + + if (!invalidMetrics.isEmpty()) { +- throw new IllegalArgumentException(unrecognized(request, invalidMetrics, METRICS.keySet(), "metric")); ++ throw new IllegalArgumentException(request.unrecognized(invalidMetrics, METRICS.keySet(), "metric", RestIndicesStatsAction.this)); + } + } + +diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java +old mode 100644 +new mode 100755 +index 3d0158cf95f..c7386277a78 +--- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java ++++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java +@@ -49,7 +49,7 @@ public abstract class RestResizeHandler extends BaseRestHandler { + request.applyContentParser(resizeRequest::fromXContent); + resizeRequest.timeout(request.paramAsTime("timeout", resizeRequest.timeout())); + resizeRequest.masterNodeTimeout(request.paramAsTime("master_timeout", resizeRequest.masterNodeTimeout())); +- resizeRequest.setWaitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards"))); ++ ActiveShardCount.parseString(request.param("wait_for_active_shards")).setWaitForActiveShards(resizeRequest); + return channel -> client.admin().indices().resizeIndex(resizeRequest, new RestToXContentListener<>(channel)); + } + +diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +old mode 100644 +new mode 100755 +index d9beba08985..6f2994991a5 +--- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java ++++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +@@ -106,7 +106,7 @@ public class RestMultiSearchAction extends BaseRestHandler { + parseMultiLineRequest(restRequest, multiRequest.indicesOptions(), allowExplicitIndex, (searchRequest, parser) -> { + searchRequest.source(SearchSourceBuilder.fromXContent(parser, false)); + RestSearchAction.checkRestTotalHits(restRequest, searchRequest); +- multiRequest.add(searchRequest); ++ searchRequest.add(multiRequest); + }); + List requests = multiRequest.requests(); + preFilterShardSize = Math.max(1, preFilterShardSize / (requests.size()+1)); +diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java b/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java +old mode 100644 +new mode 100755 +index c7d6e889397..cd12b9ec3ae +--- a/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java ++++ b/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java +@@ -114,7 +114,7 @@ public final class ScoreScriptUtils { + } + + public double decayGeoLinear(GeoPoint docValue) { +- double distance = GeoDistance.ARC.calculate(originLat, originLon, docValue.lat(), docValue.lon(), DistanceUnit.METERS); ++ double distance = DistanceUnit.METERS.calculate(originLat, originLon, docValue.lat(), docValue.lon(), GeoDistance.ARC); + distance = Math.max(0.0d, distance - offset); + return Math.max(0.0, (scaling - distance) / scaling); + } +@@ -136,7 +136,7 @@ public final class ScoreScriptUtils { + } + + public double decayGeoExp(GeoPoint docValue) { +- double distance = GeoDistance.ARC.calculate(originLat, originLon, docValue.lat(), docValue.lon(), DistanceUnit.METERS); ++ double distance = DistanceUnit.METERS.calculate(originLat, originLon, docValue.lat(), docValue.lon(), GeoDistance.ARC); + distance = Math.max(0.0d, distance - offset); + return Math.exp(scaling * distance); + } +@@ -158,7 +158,7 @@ public final class ScoreScriptUtils { + } + + public double decayGeoGauss(GeoPoint docValue) { +- double distance = GeoDistance.ARC.calculate(originLat, originLon, docValue.lat(), docValue.lon(), DistanceUnit.METERS); ++ double distance = DistanceUnit.METERS.calculate(originLat, originLon, docValue.lat(), docValue.lon(), GeoDistance.ARC); + distance = Math.max(0.0d, distance - offset); + return Math.exp(0.5 * Math.pow(distance, 2.0) / scaling); + } +diff --git a/server/src/main/java/org/elasticsearch/script/ScriptService.java b/server/src/main/java/org/elasticsearch/script/ScriptService.java +old mode 100644 +new mode 100755 +index f1472afba93..31f6ed8aa73 +--- a/server/src/main/java/org/elasticsearch/script/ScriptService.java ++++ b/server/src/main/java/org/elasticsearch/script/ScriptService.java +@@ -46,7 +46,10 @@ import org.elasticsearch.common.settings.Setting; + import org.elasticsearch.common.settings.Setting.Property; + import org.elasticsearch.common.settings.Settings; + import org.elasticsearch.common.unit.TimeValue; ++import org.elasticsearch.common.xcontent.*; + import org.elasticsearch.core.internal.io.IOUtils; ++import org.elasticsearch.xpack.core.security.authc.support.mapper.TemplateRoleName; ++import org.elasticsearch.xpack.core.security.support.MustacheTemplateEvaluator; + + import java.io.Closeable; + import java.io.IOException; +@@ -547,6 +550,12 @@ public class ScriptService implements Closeable, ClusterStateApplier { + clusterState = event.state(); + } + ++ public String parseTemplate(Map parameters, TemplateRoleName templateRoleName) throws IOException { ++ final XContentParser parser = XContentHelper.createParser( ++ NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, templateRoleName.getTemplate(), XContentType.JSON); ++ return MustacheTemplateEvaluator.evaluate(this, parser, parameters); ++ } ++ + /** + * A small listener for the script cache that calls each + * {@code ScriptEngine}'s {@code scriptRemoved} method when the +diff --git a/server/src/main/java/org/elasticsearch/script/ScriptType.java b/server/src/main/java/org/elasticsearch/script/ScriptType.java +old mode 100644 +new mode 100755 +index 5d356bbd7cb..6f7e02df503 +--- a/server/src/main/java/org/elasticsearch/script/ScriptType.java ++++ b/server/src/main/java/org/elasticsearch/script/ScriptType.java +@@ -19,6 +19,7 @@ + + package org.elasticsearch.script; + ++import org.elasticsearch.action.update.UpdateRequest; + import org.elasticsearch.common.ParseField; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; +@@ -116,4 +117,18 @@ public enum ScriptType implements Writeable { + public String toString() { + return getName(); + } ++ ++ public void updateOrCreateScript(String scriptContent, String lang, Map params, UpdateRequest updateRequest) { ++ Script script = updateRequest.script(); ++ if (script == null) { ++ script = new Script(this == null ? INLINE : this, lang, scriptContent == null ? "" : scriptContent, params); ++ } else { ++ String newScriptContent = scriptContent == null ? script.getIdOrCode() : scriptContent; ++ ScriptType newScriptType = this == null ? script.getType() : this; ++ String newScriptLang = lang == null ? script.getLang() : lang; ++ Map newScriptParams = params == null ? script.getParams() : params; ++ script = new Script(newScriptType, newScriptLang, newScriptContent, newScriptParams); ++ } ++ updateRequest.script(script); ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java +old mode 100644 +new mode 100755 +index 27e735789f7..2ae8f8fdd48 +--- a/server/src/main/java/org/elasticsearch/search/SearchHit.java ++++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java +@@ -33,14 +33,8 @@ import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.io.stream.Writeable; + import org.elasticsearch.common.text.Text; +-import org.elasticsearch.common.xcontent.ConstructingObjectParser; +-import org.elasticsearch.common.xcontent.ObjectParser; ++import org.elasticsearch.common.xcontent.*; + import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +-import org.elasticsearch.common.xcontent.ToXContentFragment; +-import org.elasticsearch.common.xcontent.ToXContentObject; +-import org.elasticsearch.common.xcontent.XContentBuilder; +-import org.elasticsearch.common.xcontent.XContentHelper; +-import org.elasticsearch.common.xcontent.XContentParser; + import org.elasticsearch.common.xcontent.XContentParser.Token; + import org.elasticsearch.index.mapper.IgnoredFieldMapper; + import org.elasticsearch.index.mapper.MapperService; +@@ -50,6 +44,7 @@ import org.elasticsearch.index.shard.ShardId; + import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; + import org.elasticsearch.search.lookup.SourceLookup; + import org.elasticsearch.transport.RemoteClusterAware; ++import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; + + import java.io.IOException; + import java.util.ArrayList; +@@ -534,6 +529,19 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable T parseSearchHit(BiFunction objectParser, ++ Consumer errorHandler, JobResultsProvider jobResultsProvider) { ++ BytesReference source = getSourceRef(); ++ try (InputStream stream = source.streamInput(); ++ XContentParser parser = XContentFactory.xContent(XContentType.JSON) ++ .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { ++ return objectParser.apply(parser, null); ++ } catch (IOException e) { ++ errorHandler.accept(new ElasticsearchParseException("failed to parse " + getId(), e)); ++ return null; ++ } ++ } ++ + public static class Fields { + static final String _INDEX = "_index"; + static final String _TYPE = "_type"; +diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java +old mode 100644 +new mode 100755 +index e5021b47a3e..5624ee6c517 +--- a/server/src/main/java/org/elasticsearch/search/SearchHits.java ++++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java +@@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.ToXContentFragment; + import org.elasticsearch.common.xcontent.XContentBuilder; + import org.elasticsearch.common.xcontent.XContentParser; + import org.elasticsearch.rest.action.search.RestSearchAction; ++import org.elasticsearch.search.fetch.FetchSearchResult; + + import java.io.IOException; + import java.util.ArrayList; +@@ -178,6 +179,13 @@ public final class SearchHits implements Writeable, ToXContentFragment, Iterable + return Arrays.stream(getHits()).iterator(); + } + ++ public boolean assertNoSearchTarget(FetchSearchResult fetchSearchResult) { ++ for (SearchHit hit : getHits()) { ++ assert hit.getShard() == null : "expected null but got: " + hit.getShard(); ++ } ++ return true; ++ } ++ + public static final class Fields { + public static final String HITS = "hits"; + public static final String TOTAL = "total"; +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java +old mode 100644 +new mode 100755 +index 16f8aaf8f52..be7f34f2535 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java +@@ -28,7 +28,11 @@ import org.elasticsearch.common.lease.Releasable; + import org.elasticsearch.common.xcontent.DeprecationHandler; + import org.elasticsearch.common.xcontent.XContentParser; + import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; ++import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator; ++import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator; ++import org.elasticsearch.search.aggregations.support.AggregationPath; + import org.elasticsearch.search.internal.SearchContext; ++import org.elasticsearch.search.profile.aggregation.ProfilingAggregator; + + import java.io.IOException; + +@@ -39,6 +43,24 @@ import java.io.IOException; + // On the other hand, if you can remove methods from it, you are highly welcome! + public abstract class Aggregator extends BucketCollector implements Releasable { + ++ /** ++ * Resolves the aggregator pointed by this path using the given root as a point of reference. ++ * ++ * ++ * @param aggregationPath@return The aggregator pointed by this path starting from the given aggregator as a point of reference ++ */ ++ public Aggregator resolveAggregator(AggregationPath aggregationPath) { ++ Aggregator aggregator = this; ++ for (int i = 0; i < aggregationPath.getPathElements().size(); i++) { ++ AggregationPath.PathElement token = aggregationPath.getPathElements().get(i); ++ aggregator = ProfilingAggregator.unwrap(aggregator.subAggregator(token.name)); ++ assert (aggregator instanceof SingleBucketAggregator && i <= aggregationPath.getPathElements().size() - 1) ++ || (aggregator instanceof NumericMetricsAggregator && i == aggregationPath.getPathElements().size() - 1) : ++ "this should be picked up before aggregation execution - on validate"; ++ } ++ return aggregator; ++ } ++ + /** + * Parses the aggregation request and creates the appropriate aggregator factory for it. + * +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollector.java +old mode 100644 +new mode 100755 +index 367e1cce060..c193ae72bd8 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollector.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollector.java +@@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations; + + import org.apache.lucene.search.LeafCollector; + import org.apache.lucene.search.Scorable; ++import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; + + import java.io.IOException; + import java.util.stream.Stream; +@@ -86,4 +87,15 @@ public abstract class LeafBucketCollector implements LeafCollector { + public void setScorer(Scorable scorer) throws IOException { + // no-op by default + } ++ ++ /** ++ * Utility method to collect the given doc in the given bucket (identified by the bucket ordinal) ++ * @param doc ++ * @param bucketOrd ++ * @param bucketsAggregator ++ */ ++ public final void collectBucket(int doc, long bucketOrd, BucketsAggregator bucketsAggregator) throws IOException { ++ bucketsAggregator.grow(bucketOrd + 1); ++ bucketsAggregator.collectExistingBucket(this, doc, bucketOrd); ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +old mode 100644 +new mode 100755 +index 71dacc698be..740871a2984 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +@@ -69,15 +69,7 @@ public abstract class BucketsAggregator extends AggregatorBase { + } + + /** +- * Utility method to collect the given doc in the given bucket (identified by the bucket ordinal) +- */ +- public final void collectBucket(LeafBucketCollector subCollector, int doc, long bucketOrd) throws IOException { +- grow(bucketOrd + 1); +- collectExistingBucket(subCollector, doc, bucketOrd); +- } +- +- /** +- * Same as {@link #collectBucket(LeafBucketCollector, int, long)}, but doesn't check if the docCounts needs to be re-sized. ++ * Same as {@link LeafBucketCollector#collectBucket(int, long, BucketsAggregator)}, but doesn't check if the docCounts needs to be re-sized. + */ + public final void collectExistingBucket(LeafBucketCollector subCollector, int doc, long bucketOrd) throws IOException { + docCounts.increment(bucketOrd, 1); +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java +old mode 100644 +new mode 100755 +index c1bf1d648d6..d9f5a1f8ad3 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java +@@ -185,7 +185,7 @@ public class AdjacencyMatrixAggregator extends BucketsAggregator { + public void collect(int doc, long bucket) throws IOException { + for (int i = 0; i < bits.length; i++) { + if (bits[i].get(doc)) { +- collectBucket(sub, doc, bucketOrd(bucket, i)); ++ sub.collectBucket(doc, bucketOrd(bucket, i), this); + } + } + } +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java +old mode 100644 +new mode 100755 +index fc4ac58fb15..7aad1a74e6f +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java +@@ -63,7 +63,7 @@ public class FilterAggregator extends BucketsAggregator implements SingleBucketA + @Override + public void collect(int doc, long bucket) throws IOException { + if (bits.get(doc)) { +- collectBucket(sub, doc, bucket); ++ sub.collectBucket(doc, bucket, this); + } + } + }; +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java +old mode 100644 +new mode 100755 +index 80d5164a96c..36c99cf4a2f +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java +@@ -153,12 +153,12 @@ public class FiltersAggregator extends BucketsAggregator { + boolean matched = false; + for (int i = 0; i < bits.length; i++) { + if (bits[i].get(doc)) { +- collectBucket(sub, doc, bucketOrd(bucket, i)); ++ sub.collectBucket(doc, bucketOrd(bucket, i), this); + matched = true; + } + } + if (showOtherBucket && !matched) { +- collectBucket(sub, doc, bucketOrd(bucket, bits.length)); ++ sub.collectBucket(doc, bucketOrd(bucket, bits.length), this); + } + } + }; +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java +old mode 100644 +new mode 100755 +index 4935b6c6ba7..2729936525e +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java +@@ -86,7 +86,7 @@ public abstract class GeoGridAggregator extends Bucke + bucketOrdinal = -1 - bucketOrdinal; + collectExistingBucket(sub, doc, bucketOrdinal); + } else { +- collectBucket(sub, doc, bucketOrdinal); ++ sub.collectBucket(doc, bucketOrdinal, this); + } + previous = val; + } +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +old mode 100644 +new mode 100755 +index 68e07c3657f..93aec369857 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +@@ -46,7 +46,7 @@ public class GlobalAggregator extends BucketsAggregator implements SingleBucketA + @Override + public void collect(int doc, long bucket) throws IOException { + assert bucket == 0 : "global aggregator can only be a top level aggregator"; +- collectBucket(sub, doc, bucket); ++ sub.collectBucket(doc, bucket, this); + } + }; + } +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +old mode 100644 +new mode 100755 +index 1b982ea9dec..b69cf98d65b +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +@@ -124,7 +124,7 @@ class AutoDateHistogramAggregator extends DeferableBucketAggregator { + bucketOrd = -1 - bucketOrd; + collectExistingBucket(sub, doc, bucketOrd); + } else { +- collectBucket(sub, doc, bucketOrd); ++ sub.collectBucket(doc, bucketOrd, this); + while (roundingIdx < roundingInfos.length - 1 + && bucketOrds.size() > (targetBuckets * roundingInfos[roundingIdx].getMaximumInnerInterval())) { + increaseRounding(); +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +old mode 100644 +new mode 100755 +index 0c7a91505ae..6e1dde8473c +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +@@ -123,7 +123,7 @@ class DateHistogramAggregator extends BucketsAggregator { + bucketOrd = -1 - bucketOrd; + collectExistingBucket(sub, doc, bucketOrd); + } else { +- collectBucket(sub, doc, bucketOrd); ++ sub.collectBucket(doc, bucketOrd, this); + } + previousRounded = rounded; + } +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java +old mode 100644 +new mode 100755 +index 1295cec2e4b..96826b9e1f9 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java +@@ -123,7 +123,7 @@ class HistogramAggregator extends BucketsAggregator { + bucketOrd = -1 - bucketOrd; + collectExistingBucket(sub, doc, bucketOrd); + } else { +- collectBucket(sub, doc, bucketOrd); ++ sub.collectBucket(doc, bucketOrd, this); + } + previousKey = key; + } +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java +old mode 100644 +new mode 100755 +index e6ff14de31d..6b0fb2796e8 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java +@@ -64,7 +64,7 @@ public class MissingAggregator extends BucketsAggregator implements SingleBucket + @Override + public void collect(int doc, long bucket) throws IOException { + if (docsWithValue.advanceExact(doc) == false) { +- collectBucket(sub, doc, bucket); ++ sub.collectBucket(doc, bucket, this); + } + } + }; +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +old mode 100644 +new mode 100755 +index 68e46b37bb0..9fbecf96261 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +@@ -99,7 +99,7 @@ public class NestedAggregator extends BucketsAggregator implements SingleBucketA + } + + for (; childDocId < parentDoc; childDocId = childDocs.nextDoc()) { +- collectBucket(sub, childDocId, bucket); ++ sub.collectBucket(childDocId, bucket, this); + } + } + }; +@@ -196,7 +196,7 @@ public class NestedAggregator extends BucketsAggregator implements SingleBucketA + final long[] buffer = bucketBuffer.buffer; + final int size = bucketBuffer.size(); + for (int i = 0; i < size; i++) { +- collectBucket(sub, childDocId, buffer[i]); ++ sub.collectBucket(childDocId, buffer[i], this); + } + } + bucketBuffer.clear(); +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java +old mode 100644 +new mode 100755 +index 2f29f8f2cdc..0594f1e39c7 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java +@@ -81,11 +81,11 @@ public class ReverseNestedAggregator extends BucketsAggregator implements Single + if (bucketOrdToLastCollectedParentDoc.indexExists(keySlot)) { + int lastCollectedParentDoc = bucketOrdToLastCollectedParentDoc.indexGet(keySlot); + if (parentDoc > lastCollectedParentDoc) { +- collectBucket(sub, parentDoc, bucket); ++ sub.collectBucket(parentDoc, bucket, this); + bucketOrdToLastCollectedParentDoc.indexReplace(keySlot, parentDoc); + } + } else { +- collectBucket(sub, parentDoc, bucket); ++ sub.collectBucket(parentDoc, bucket, this); + bucketOrdToLastCollectedParentDoc.indexInsert(keySlot, bucket, parentDoc); + } + } +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +old mode 100644 +new mode 100755 +index b8b0cf293a3..5cbc89f08b5 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +@@ -109,7 +109,7 @@ public final class BinaryRangeAggregator extends BucketsAggregator { + return new SortedSetRangeLeafCollector(values, ranges, sub) { + @Override + protected void doCollect(LeafBucketCollector sub, int doc, long bucket) throws IOException { +- collectBucket(sub, doc, bucket); ++ sub.collectBucket(doc, bucket, this); + } + }; + } else { +@@ -118,7 +118,7 @@ public final class BinaryRangeAggregator extends BucketsAggregator { + @Override + protected void doCollect(LeafBucketCollector sub, int doc, long bucket) + throws IOException { +- collectBucket(sub, doc, bucket); ++ sub.collectBucket(doc, bucket, this); + } + }; + } +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +old mode 100644 +new mode 100755 +index c4e2d1fc439..a15bbce2b04 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +@@ -312,7 +312,7 @@ public class RangeAggregator extends BucketsAggregator { + + for (int i = startLo; i <= endHi; ++i) { + if (ranges[i].matches(value)) { +- collectBucket(sub, doc, subBucketOrdinal(owningBucketOrdinal, i)); ++ sub.collectBucket(doc, subBucketOrdinal(owningBucketOrdinal, i), this); + } + } + +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregator.java +old mode 100644 +new mode 100755 +index 7f62813278b..9ea5b9b8bb1 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregator.java +@@ -137,7 +137,7 @@ public class SignificantTextAggregator extends BucketsAggregator { + bucketOrdinal = -1 - bucketOrdinal; + collectExistingBucket(sub, doc, bucketOrdinal); + } else { +- collectBucket(sub, doc, bucketOrdinal); ++ sub.collectBucket(doc, bucketOrdinal, this); + } + } + } +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +old mode 100644 +new mode 100755 +index 03eb00337e9..4e2d1011160 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +@@ -116,7 +116,7 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr + bucketOrd = -1 - bucketOrd; + collectExistingBucket(sub, doc, bucketOrd); + } else { +- collectBucket(sub, doc, bucketOrd); ++ sub.collectBucket(doc, bucketOrd, this); + } + } + } +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java +old mode 100644 +new mode 100755 +index 69539e8a11b..35f4b94c274 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java +@@ -95,7 +95,7 @@ public class LongTermsAggregator extends TermsAggregator { + bucketOrdinal = -1 - bucketOrdinal; + collectExistingBucket(sub, doc, bucketOrdinal); + } else { +- collectBucket(sub, doc, bucketOrdinal); ++ sub.collectBucket(doc, bucketOrdinal, this); + } + } + +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java +old mode 100644 +new mode 100755 +index 20162fd1bc7..8f0f50c4028 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java +@@ -101,7 +101,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator { + bucketOrdinal = -1 - bucketOrdinal; + collectExistingBucket(sub, doc, bucketOrdinal); + } else { +- collectBucket(sub, doc, bucketOrdinal); ++ sub.collectBucket(doc, bucketOrdinal, this); + } + previous.copyBytes(bytes); + } +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java +old mode 100644 +new mode 100755 +index 189c2ee796e..5b21bcf32d9 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java +@@ -249,7 +249,7 @@ public abstract class TermsAggregator extends DeferableBucketAggregator { + */ + public Comparator bucketComparator(AggregationPath path, boolean asc) { + +- final Aggregator aggregator = path.resolveAggregator(this); ++ final Aggregator aggregator = resolveAggregator(path); + final String key = path.lastPathElement().key; + + if (aggregator instanceof SingleBucketAggregator) { +diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java +old mode 100644 +new mode 100755 +index c7474fb800f..b1956b28217 +--- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java ++++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java +@@ -247,24 +247,6 @@ public class AggregationPath { + return value; + } + +- /** +- * Resolves the aggregator pointed by this path using the given root as a point of reference. +- * +- * @param root The point of reference of this path +- * @return The aggregator pointed by this path starting from the given aggregator as a point of reference +- */ +- public Aggregator resolveAggregator(Aggregator root) { +- Aggregator aggregator = root; +- for (int i = 0; i < pathElements.size(); i++) { +- AggregationPath.PathElement token = pathElements.get(i); +- aggregator = ProfilingAggregator.unwrap(aggregator.subAggregator(token.name)); +- assert (aggregator instanceof SingleBucketAggregator && i <= pathElements.size() - 1) +- || (aggregator instanceof NumericMetricsAggregator && i == pathElements.size() - 1) : +- "this should be picked up before aggregation execution - on validate"; +- } +- return aggregator; +- } +- + /** + * Resolves the topmost aggregator pointed by this path using the given root as a point of reference. + * +diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +old mode 100644 +new mode 100755 +index c23be0f4cb9..221f3ee7512 +--- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java ++++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +@@ -229,7 +229,7 @@ public class FetchPhase implements SearchPhase { + int subDocId, + Map> storedToRequestedFields, + LeafReaderContext subReaderContext) { +- loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); ++ fieldsVisitor.loadStoredFields(context, subReaderContext, subDocId, this); + fieldsVisitor.postProcess(context.mapperService()); + + if (fieldsVisitor.fields().isEmpty()) { +@@ -266,7 +266,7 @@ public class FetchPhase implements SearchPhase { + final boolean needSource = context.sourceRequested() || context.highlight() != null; + if (needSource || (context instanceof InnerHitsContext.InnerHitSubContext == false)) { + FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource); +- loadStoredFields(context, subReaderContext, rootFieldsVisitor, rootSubDocId); ++ rootFieldsVisitor.loadStoredFields(context, subReaderContext, rootSubDocId, this); + rootFieldsVisitor.postProcess(context.mapperService()); + uid = rootFieldsVisitor.uid(); + source = rootFieldsVisitor.source(); +@@ -287,7 +287,7 @@ public class FetchPhase implements SearchPhase { + SourceLookup sourceLookup = context.lookup().source(); + sourceLookup.setSegmentAndDocument(subReaderContext, nestedSubDocId); + +- ObjectMapper nestedObjectMapper = documentMapper.findNestedObjectMapper(nestedSubDocId, context, subReaderContext); ++ ObjectMapper nestedObjectMapper = context.findNestedObjectMapper(nestedSubDocId, subReaderContext, documentMapper); + assert nestedObjectMapper != null; + SearchHit.NestedIdentity nestedIdentity = + getInternalNestedIdentity(context, nestedSubDocId, subReaderContext, context.mapperService(), nestedObjectMapper); +@@ -355,7 +355,7 @@ public class FetchPhase implements SearchPhase { + final IndexSettings indexSettings = context.getQueryShardContext().getIndexSettings(); + do { + Query parentFilter; +- nestedParentObjectMapper = current.getParentObjectMapper(mapperService); ++ nestedParentObjectMapper = mapperService.getParentObjectMapper(current); + if (nestedParentObjectMapper != null) { + if (nestedParentObjectMapper.nested().isNested() == false) { + current = nestedParentObjectMapper; +@@ -419,12 +419,4 @@ public class FetchPhase implements SearchPhase { + return nestedIdentity; + } + +- private void loadStoredFields(SearchContext searchContext, LeafReaderContext readerContext, FieldsVisitor fieldVisitor, int docId) { +- fieldVisitor.reset(); +- try { +- readerContext.reader().document(docId, fieldVisitor); +- } catch (IOException e) { +- throw new FetchPhaseExecutionException(searchContext, "Failed to fetch doc id [" + docId + "]", e); +- } +- } + } +diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java +old mode 100644 +new mode 100755 +index 6e183f11483..b201be7e421 +--- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java ++++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java +@@ -21,7 +21,6 @@ package org.elasticsearch.search.fetch; + + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; +-import org.elasticsearch.search.SearchHit; + import org.elasticsearch.search.SearchHits; + import org.elasticsearch.search.SearchPhaseResult; + import org.elasticsearch.search.SearchShardTarget; +@@ -60,17 +59,10 @@ public final class FetchSearchResult extends SearchPhaseResult { + } + + public void hits(SearchHits hits) { +- assert assertNoSearchTarget(hits); ++ assert hits.assertNoSearchTarget(this); + this.hits = hits; + } + +- private boolean assertNoSearchTarget(SearchHits hits) { +- for (SearchHit hit : hits.getHits()) { +- assert hit.getShard() == null : "expected null but got: " + hit.getShard(); +- } +- return true; +- } +- + public SearchHits hits() { + return hits; + } +diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java +old mode 100644 +new mode 100755 +index 5298e7eca05..0f459fb514c +--- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java ++++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java +@@ -19,10 +19,7 @@ + + package org.elasticsearch.search.fetch.subphase; + +-import org.elasticsearch.common.Booleans; +-import org.elasticsearch.common.ParseField; +-import org.elasticsearch.common.ParsingException; +-import org.elasticsearch.common.Strings; ++import org.elasticsearch.common.*; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.io.stream.Writeable; +@@ -30,6 +27,8 @@ import org.elasticsearch.common.xcontent.ToXContentObject; + import org.elasticsearch.common.xcontent.XContentBuilder; + import org.elasticsearch.common.xcontent.XContentParser; + import org.elasticsearch.common.xcontent.support.XContentMapValues; ++import org.elasticsearch.index.get.ShardGetService; ++import org.elasticsearch.index.mapper.SourceFieldMapper; + import org.elasticsearch.rest.RestRequest; + + import java.io.IOException; +@@ -234,4 +233,24 @@ public class FetchSourceContext implements Writeable, ToXContentObject { + } + return filter; + } ++ ++ /** ++ * decides what needs to be done based on the request input and always returns a valid non-null FetchSourceContext ++ * @param gFields ++ * @param shardGetService ++ */ ++ public FetchSourceContext normalizeFetchSourceContent(@Nullable String[] gFields, ShardGetService shardGetService) { ++ if (this != null) { ++ return this; ++ } ++ if (gFields == null) { ++ return FETCH_SOURCE; ++ } ++ for (String field : gFields) { ++ if (SourceFieldMapper.NAME.equals(field)) { ++ return FETCH_SOURCE; ++ } ++ } ++ return DO_NOT_FETCH_SOURCE; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +old mode 100644 +new mode 100755 +index fba80d5f3c6..46fd0931fe9 +--- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java ++++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +@@ -34,6 +34,7 @@ import org.elasticsearch.common.util.concurrent.RefCounted; + import org.elasticsearch.common.util.iterable.Iterables; + import org.elasticsearch.index.cache.bitset.BitsetFilterCache; + import org.elasticsearch.index.fielddata.IndexFieldData; ++import org.elasticsearch.index.mapper.DocumentMapper; + import org.elasticsearch.index.mapper.MappedFieldType; + import org.elasticsearch.index.mapper.MapperService; + import org.elasticsearch.index.mapper.ObjectMapper; +@@ -403,6 +404,44 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas + /** Return a view of the additional query collectors that should be run for this context. */ + public abstract Map, Collector> queryCollectors(); + ++ /** ++ * Returns the best nested {@link ObjectMapper} instances that is in the scope of the specified nested docId. ++ * @param nestedDocId ++ * @param context ++ * @param documentMapper ++ */ ++ public ObjectMapper findNestedObjectMapper(int nestedDocId, LeafReaderContext context, DocumentMapper documentMapper) throws IOException { ++ ObjectMapper nestedObjectMapper = null; ++ for (ObjectMapper objectMapper : documentMapper.objectMappers().values()) { ++ if (!objectMapper.nested().isNested()) { ++ continue; ++ } ++ ++ Query filter = objectMapper.nestedTypeFilter(); ++ if (filter == null) { ++ continue; ++ } ++ // We can pass down 'null' as acceptedDocs, because nestedDocId is a doc to be fetched and ++ // therefor is guaranteed to be a live doc. ++ final Weight nestedWeight = filter.createWeight(searcher(), ScoreMode.COMPLETE_NO_SCORES, 1f); ++ Scorer scorer = nestedWeight.scorer(context); ++ if (scorer == null) { ++ continue; ++ } ++ ++ if (scorer.iterator().advance(nestedDocId) == nestedDocId) { ++ if (nestedObjectMapper == null) { ++ nestedObjectMapper = objectMapper; ++ } else { ++ if (nestedObjectMapper.fullPath().length() < objectMapper.fullPath().length()) { ++ nestedObjectMapper = objectMapper; ++ } ++ } ++ } ++ } ++ return nestedObjectMapper; ++ } ++ + /** + * The life time of an object that is used during search execution. + */ +diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortOrder.java b/server/src/main/java/org/elasticsearch/search/sort/SortOrder.java +old mode 100644 +new mode 100755 +index c6805cae58f..5710ff9d50b +--- a/server/src/main/java/org/elasticsearch/search/sort/SortOrder.java ++++ b/server/src/main/java/org/elasticsearch/search/sort/SortOrder.java +@@ -22,6 +22,7 @@ package org.elasticsearch.search.sort; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.io.stream.Writeable; ++import org.elasticsearch.index.reindex.ReindexRequest; + + import java.io.IOException; + import java.util.Locale; +@@ -63,4 +64,14 @@ public enum SortOrder implements Writeable { + public static SortOrder fromString(String op) { + return valueOf(op.toUpperCase(Locale.ROOT)); + } ++ ++ /** ++ * Add a sort against the given field name. ++ * @param name The name of the field to sort by ++ * @param reindexRequest ++ */ ++ public ReindexRequest addSortField(String name, ReindexRequest reindexRequest) { ++ reindexRequest.getSearchRequest().source().sort(name, this); ++ return reindexRequest; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java b/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java +old mode 100644 +new mode 100755 +index 2847af386b2..831eb3c4388 +--- a/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java ++++ b/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java +@@ -19,6 +19,7 @@ + + package org.elasticsearch.snapshots; + ++import org.elasticsearch.cluster.SnapshotsInProgress; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.io.stream.Writeable; +@@ -99,4 +100,13 @@ public final class Snapshot implements Writeable { + snapshotId.writeTo(out); + } + ++ public SnapshotsInProgress.Entry snapshot(SnapshotsInProgress snapshotsInProgress) { ++ for (SnapshotsInProgress.Entry entry : snapshotsInProgress.entries()) { ++ final Snapshot curr = entry.snapshot(); ++ if (curr.equals(this)) { ++ return entry; ++ } ++ } ++ return null; ++ } + } +diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +old mode 100644 +new mode 100755 +index 65e1191211e..2166d00bd01 +--- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java ++++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +@@ -219,7 +219,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements + while (it.hasNext()) { + final Map.Entry> entry = it.next(); + final Snapshot snapshot = entry.getKey(); +- if (snapshotsInProgress == null || snapshotsInProgress.snapshot(snapshot) == null) { ++ if (snapshotsInProgress == null || snapshot.snapshot(snapshotsInProgress) == null) { + // abort any running snapshots of shards for the removed entry; + // this could happen if for some reason the cluster state update for aborting + // running shards is missed, then the snapshot is removed is a subsequent cluster +diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +old mode 100644 +new mode 100755 +index a6138b8f605..04201482997 +--- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java ++++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +@@ -498,7 +498,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus + if (hadAbortedInitializations) { + final SnapshotsInProgress snapshotsInProgress = newState.custom(SnapshotsInProgress.TYPE); + assert snapshotsInProgress != null; +- final SnapshotsInProgress.Entry entry = snapshotsInProgress.snapshot(snapshot.snapshot()); ++ final SnapshotsInProgress.Entry entry = snapshot.snapshot().snapshot(snapshotsInProgress); + assert entry != null; + endSnapshot(entry); + } +@@ -1156,7 +1156,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus + } + ClusterState.Builder clusterStateBuilder = ClusterState.builder(currentState); + SnapshotsInProgress snapshots = currentState.custom(SnapshotsInProgress.TYPE); +- SnapshotsInProgress.Entry snapshotEntry = snapshots != null ? snapshots.snapshot(snapshot) : null; ++ SnapshotsInProgress.Entry snapshotEntry = snapshots != null ? snapshot.snapshot(snapshots) : null; + if (snapshotEntry == null) { + // This snapshot is not running - delete + if (snapshots != null && !snapshots.entries().isEmpty()) { +diff --git a/server/src/main/java/org/elasticsearch/tasks/Task.java b/server/src/main/java/org/elasticsearch/tasks/Task.java +old mode 100644 +new mode 100755 +index 1f89a7d88b9..275ee282181 +--- a/server/src/main/java/org/elasticsearch/tasks/Task.java ++++ b/server/src/main/java/org/elasticsearch/tasks/Task.java +@@ -20,10 +20,8 @@ + + package org.elasticsearch.tasks; + +-import org.elasticsearch.action.ActionResponse; + import org.elasticsearch.cluster.node.DiscoveryNode; + import org.elasticsearch.common.io.stream.NamedWriteable; +-import org.elasticsearch.common.xcontent.ToXContent; + import org.elasticsearch.common.xcontent.ToXContentObject; + + import java.io.IOException; +@@ -182,11 +180,4 @@ public class Task { + return new TaskResult(taskInfo(node.getId(), true), error); + } + +- public TaskResult result(DiscoveryNode node, ActionResponse response) throws IOException { +- if (response instanceof ToXContent) { +- return new TaskResult(taskInfo(node.getId(), true), (ToXContent) response); +- } else { +- throw new IllegalStateException("response has to implement ToXContent to be able to store the results"); +- } +- } + } +diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java +old mode 100644 +new mode 100755 +index 92c86a04cdb..689c034d3d6 +--- a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java ++++ b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java +@@ -227,7 +227,7 @@ public class TaskManager implements ClusterStateApplier { + } + final TaskResult taskResult; + try { +- taskResult = task.result(localNode, response); ++ taskResult = localNode.result(response, task); + } catch (IOException ex) { + logger.warn(() -> new ParameterizedMessage("couldn't store response {}", response), ex); + listener.onFailure(ex); +diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java +old mode 100644 +new mode 100755 +index 77a873316a5..3d8b7f8521a +--- a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java ++++ b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java +@@ -36,7 +36,6 @@ import org.elasticsearch.client.OriginSettingClient; + import org.elasticsearch.client.Requests; + import org.elasticsearch.cluster.ClusterState; + import org.elasticsearch.cluster.metadata.IndexMetaData; +-import org.elasticsearch.cluster.metadata.MappingMetaData; + import org.elasticsearch.cluster.service.ClusterService; + import org.elasticsearch.common.inject.Inject; + import org.elasticsearch.common.settings.Settings; +@@ -130,7 +129,7 @@ public class TaskResultsService { + }); + } else { + IndexMetaData metaData = state.getMetaData().index(TASK_INDEX); +- if (getTaskResultMappingVersion(metaData) < TASK_RESULT_MAPPING_VERSION) { ++ if (metaData.getTaskResultMappingVersion(this) < TASK_RESULT_MAPPING_VERSION) { + // The index already exists but doesn't have our mapping + client.admin().indices().preparePutMapping(TASK_INDEX).setType(TASK_TYPE) + .setSource(taskResultIndexMapping(), XContentType.JSON) +@@ -152,18 +151,6 @@ public class TaskResultsService { + } + } + +- private int getTaskResultMappingVersion(IndexMetaData metaData) { +- MappingMetaData mappingMetaData = metaData.getMappings().get(TASK_TYPE); +- if (mappingMetaData == null) { +- return 0; +- } +- @SuppressWarnings("unchecked") Map meta = (Map) mappingMetaData.sourceAsMap().get("_meta"); +- if (meta == null || meta.containsKey(TASK_RESULT_MAPPING_VERSION_META_FIELD) == false) { +- return 1; // The mapping was created before meta field was introduced +- } +- return (int) meta.get(TASK_RESULT_MAPPING_VERSION_META_FIELD); +- } +- + private void doStoreResult(TaskResult taskResult, ActionListener listener) { + IndexRequestBuilder index = client.prepareIndex(TASK_INDEX, TASK_TYPE, taskResult.getTask().getTaskId().toString()); + try (XContentBuilder builder = XContentFactory.contentBuilder(Requests.INDEX_CONTENT_TYPE)) { +diff --git a/server/src/main/java/org/elasticsearch/transport/ConnectionProfile.java b/server/src/main/java/org/elasticsearch/transport/ConnectionProfile.java +old mode 100644 +new mode 100755 +index 66db091557f..428950e58f9 +--- a/server/src/main/java/org/elasticsearch/transport/ConnectionProfile.java ++++ b/server/src/main/java/org/elasticsearch/transport/ConnectionProfile.java +@@ -145,6 +145,11 @@ public final class ConnectionProfile { + this.compressionEnabled = compressionEnabled; + } + ++ public boolean connectionProfileChanged(ConnectionProfile newProfile, RemoteClusterService remoteClusterService) { ++ return Objects.equals(getCompressionEnabled(), newProfile.getCompressionEnabled()) == false ++ || Objects.equals(getPingInterval(), newProfile.getPingInterval()) == false; ++ } ++ + /** + * A builder to build a new {@link ConnectionProfile} + */ +diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +old mode 100644 +new mode 100755 +index 4f690d12acf..0a43a1192be +--- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java ++++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +@@ -183,7 +183,7 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl + remote = new RemoteClusterConnection(settings, clusterAlias, seedList, transportService, numRemoteConnections, + getNodePredicate(settings), proxyAddress, connectionProfile); + remoteClusters.put(clusterAlias, remote); +- } else if (connectionProfileChanged(remote.getConnectionManager().getConnectionProfile(), connectionProfile)) { ++ } else if (remote.getConnectionManager().getConnectionProfile().connectionProfileChanged(connectionProfile, this)) { + // New ConnectionProfile. Must tear down existing connection + try { + IOUtils.close(remote); +@@ -416,11 +416,6 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl + return remoteClusters.values().stream().map(RemoteClusterConnection::getConnectionInfo); + } + +- private boolean connectionProfileChanged(ConnectionProfile oldProfile, ConnectionProfile newProfile) { +- return Objects.equals(oldProfile.getCompressionEnabled(), newProfile.getCompressionEnabled()) == false +- || Objects.equals(oldProfile.getPingInterval(), newProfile.getPingInterval()) == false; +- } +- + /** + * Collects all nodes of the given clusters and returns / passes a (clusterAlias, nodeId) to {@link DiscoveryNode} + * function on success. +diff --git a/server/src/main/java/org/elasticsearch/transport/TransportRequest.java b/server/src/main/java/org/elasticsearch/transport/TransportRequest.java +old mode 100644 +new mode 100755 +index d6072fc9d0a..43f000de9e0 +--- a/server/src/main/java/org/elasticsearch/transport/TransportRequest.java ++++ b/server/src/main/java/org/elasticsearch/transport/TransportRequest.java +@@ -19,6 +19,7 @@ + + package org.elasticsearch.transport; + ++import org.elasticsearch.cluster.node.DiscoveryNode; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.tasks.TaskAwareRequest; +@@ -27,6 +28,20 @@ import org.elasticsearch.tasks.TaskId; + import java.io.IOException; + + public abstract class TransportRequest extends TransportMessage implements TaskAwareRequest { ++ public TransportFuture submitRequest(DiscoveryNode node, String action, ++ TransportRequestOptions options, ++ TransportResponseHandler handler, TransportService transportService) throws TransportException { ++ PlainTransportFuture futureHandler = new PlainTransportFuture<>(handler); ++ try { ++ Transport.Connection connection = transportService.getConnection(node); ++ transportService.sendRequest(connection, action, this, options, futureHandler); ++ } catch (NodeNotConnectedException ex) { ++ // the caller might not handle this so we invoke the handler ++ futureHandler.handleException(ex); ++ } ++ return futureHandler; ++ } ++ + public static class Empty extends TransportRequest { + public static final Empty INSTANCE = new Empty(); + } +diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java +old mode 100644 +new mode 100755 +index a89784945db..46efa6017c2 +--- a/server/src/main/java/org/elasticsearch/transport/TransportService.java ++++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java +@@ -501,21 +501,7 @@ public class TransportService extends AbstractLifecycleComponent implements Tran + + public TransportFuture submitRequest(DiscoveryNode node, String action, TransportRequest request, + TransportResponseHandler handler) throws TransportException { +- return submitRequest(node, action, request, TransportRequestOptions.EMPTY, handler); +- } +- +- public TransportFuture submitRequest(DiscoveryNode node, String action, TransportRequest request, +- TransportRequestOptions options, +- TransportResponseHandler handler) throws TransportException { +- PlainTransportFuture futureHandler = new PlainTransportFuture<>(handler); +- try { +- Transport.Connection connection = getConnection(node); +- sendRequest(connection, action, request, options, futureHandler); +- } catch (NodeNotConnectedException ex) { +- // the caller might not handle this so we invoke the handler +- futureHandler.handleException(ex); +- } +- return futureHandler; ++ return request.submitRequest(node, action, TransportRequestOptions.EMPTY, handler, this); + } + + public void sendRequest(final DiscoveryNode node, final String action, +diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequestTests.java +old mode 100644 +new mode 100755 +index 52a5e7d3ce3..5eeec765d30 +--- a/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequestTests.java ++++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequestTests.java +@@ -70,17 +70,17 @@ public class AddVotingConfigExclusionsRequestTests extends ESTestCase { + final ClusterState clusterState = ClusterState.builder(new ClusterName("cluster")).nodes(new Builder() + .add(localNode).add(otherNode1).add(otherNode2).add(otherDataNode).localNodeId(localNode.getId())).build(); + +- assertThat(makeRequest().resolveVotingConfigExclusions(clusterState), ++ assertThat(clusterState.resolveVotingConfigExclusions(makeRequest()), + containsInAnyOrder(localNodeExclusion, otherNode1Exclusion, otherNode2Exclusion)); +- assertThat(makeRequest("_all").resolveVotingConfigExclusions(clusterState), ++ assertThat(clusterState.resolveVotingConfigExclusions(makeRequest("_all")), + containsInAnyOrder(localNodeExclusion, otherNode1Exclusion, otherNode2Exclusion)); +- assertThat(makeRequest("_local").resolveVotingConfigExclusions(clusterState), ++ assertThat(clusterState.resolveVotingConfigExclusions(makeRequest("_local")), + contains(localNodeExclusion)); +- assertThat(makeRequest("other*").resolveVotingConfigExclusions(clusterState), ++ assertThat(clusterState.resolveVotingConfigExclusions(makeRequest("other*")), + containsInAnyOrder(otherNode1Exclusion, otherNode2Exclusion)); + + assertThat(expectThrows(IllegalArgumentException.class, +- () -> makeRequest("not-a-node").resolveVotingConfigExclusions(clusterState)).getMessage(), ++ () -> clusterState.resolveVotingConfigExclusions(makeRequest("not-a-node"))).getMessage(), + equalTo("add voting config exclusions request for [not-a-node] matched no master-eligible nodes")); + } + +diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java +old mode 100644 +new mode 100755 +index 1999a18a92b..0c7f86b82f1 +--- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java ++++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java +@@ -61,7 +61,7 @@ public class ClusterRerouteResponseTests extends ESTestCase { + ClusterState clusterState = ClusterState.builder(new ClusterName("test")).nodes(nodes).metaData(metaData).build(); + + RoutingExplanations routingExplanations = new RoutingExplanations(); +- routingExplanations.add(new RerouteExplanation(new AllocateReplicaAllocationCommand("index", 0, "node0"), Decision.YES)); ++ new RerouteExplanation(new AllocateReplicaAllocationCommand("index", 0, "node0"), Decision.YES).add(routingExplanations); + ClusterRerouteResponse clusterRerouteResponse = new ClusterRerouteResponse(true, clusterState, routingExplanations); + { + XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); +diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +old mode 100644 +new mode 100755 +index b0c2e34c306..d6457359bf0 +--- a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java ++++ b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +@@ -126,7 +126,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { + } + }; + registry = new AnalysisModule(environment, singletonList(plugin)).getAnalysisRegistry(); +- indexAnalyzers = registry.build(idxSettings); ++ indexAnalyzers = idxSettings.build(registry); + maxTokenCount = IndexSettings.MAX_TOKEN_COUNT_SETTING.getDefault(settings); + idxMaxTokenCount = idxSettings.getMaxTokenCount(); + } +diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java +old mode 100644 +new mode 100755 +index 1c279349274..310c1fc4362 +--- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java ++++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java +@@ -94,7 +94,7 @@ public class CreateIndexRequestTests extends ESTestCase { + alias.routing("1"); + alias.filter("{\"term\":{\"year\":2016}}"); + alias.writeIndex(true); +- request.alias(alias); ++ alias.alias(request); + + Settings.Builder settings = Settings.builder(); + settings.put(SETTING_NUMBER_OF_SHARDS, 10); +diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java +old mode 100644 +new mode 100755 +index b14bdd0ed98..544332ac561 +--- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java ++++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java +@@ -331,7 +331,7 @@ public class ShrinkIndexIT extends ESIntegTestCase { + assertHitCount(client().prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 2 * docs); + assertHitCount(client().prepareSearch("source").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")).get(), docs); + GetSettingsResponse target = client().admin().indices().prepareGetSettings("target").get(); +- assertEquals(version, target.getIndexToSettings().get("target").getAsVersion("index.version.created", null)); ++ assertEquals(version, null.getAsVersion("index.version.created", target.getIndexToSettings().get("target"))); + + // clean up + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put( +diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java +old mode 100644 +new mode 100755 +index 0fecff449f9..7c72328b986 +--- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java ++++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java +@@ -441,7 +441,7 @@ public class SplitIndexIT extends ESIntegTestCase { + 2 * docs); + assertHitCount(client().prepareSearch("source").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")).get(), docs); + GetSettingsResponse target = client().admin().indices().prepareGetSettings("target").get(); +- assertEquals(version, target.getIndexToSettings().get("target").getAsVersion("index.version.created", null)); ++ assertEquals(version, null.getAsVersion("index.version.created", target.getIndexToSettings().get("target"))); + } finally { + // clean up + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put( +diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java +old mode 100644 +new mode 100755 +index ffbab5805c0..b6f3c278117 +--- a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java ++++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java +@@ -73,7 +73,7 @@ public class ResizeRequestTests extends ESTestCase { + alias.routing("1"); + alias.filter("{\"term\":{\"year\":2016}}"); + alias.writeIndex(true); +- target.alias(alias); ++ alias.alias(target); + Settings.Builder settings = Settings.builder(); + settings.put(SETTING_NUMBER_OF_SHARDS, 10); + target.settings(settings); +diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeActionTests.java +old mode 100644 +new mode 100755 +index 580ea764293..423dd540df3 +--- a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeActionTests.java ++++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeActionTests.java +@@ -204,7 +204,7 @@ public class TransportResizeActionTests extends ESTestCase { + DocsStats stats = new DocsStats(between(0, (IndexWriter.MAX_DOCS) / numSourceShards), between(1, 1000), between(1, 10000)); + ResizeRequest target = new ResizeRequest("target", indexName); + final ActiveShardCount activeShardCount = randomBoolean() ? ActiveShardCount.ALL : ActiveShardCount.ONE; +- target.setWaitForActiveShards(activeShardCount); ++ activeShardCount.setWaitForActiveShards(target); + CreateIndexClusterStateUpdateRequest request = TransportResizeAction.prepareCreateIndexRequest( + target, clusterState, (i) -> stats, indexName, "target"); + assertNotNull(request.recoverFrom()); +diff --git a/server/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/server/src/test/java/org/elasticsearch/action/bulk/RetryTests.java +old mode 100644 +new mode 100755 +index decee8ceab7..7fdd36c098a +--- a/server/src/test/java/org/elasticsearch/action/bulk/RetryTests.java ++++ b/server/src/test/java/org/elasticsearch/action/bulk/RetryTests.java +@@ -84,8 +84,7 @@ public class RetryTests extends ESTestCase { + BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL); + + BulkRequest bulkRequest = createBulkRequest(); +- BulkResponse response = new Retry(backoff, bulkClient.threadPool()) +- .withBackoff(bulkClient::bulk, bulkRequest) ++ BulkResponse response = bulkRequest.withBackoff(bulkClient::bulk, new Retry(backoff, bulkClient.threadPool())) + .actionGet(); + + assertFalse(response.hasFailures()); +@@ -96,8 +95,7 @@ public class RetryTests extends ESTestCase { + BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL - 1); + + BulkRequest bulkRequest = createBulkRequest(); +- BulkResponse response = new Retry(backoff, bulkClient.threadPool()) +- .withBackoff(bulkClient::bulk, bulkRequest) ++ BulkResponse response = bulkRequest.withBackoff(bulkClient::bulk, new Retry(backoff, bulkClient.threadPool())) + .actionGet(); + + assertTrue(response.hasFailures()); +diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +old mode 100644 +new mode 100755 +index 01f1109ef3b..3620c931b94 +--- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java ++++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +@@ -104,7 +104,7 @@ public class MultiSearchActionTookTests extends ESTestCase { + } + + private void runTestTook(boolean controlledClock) throws Exception { +- MultiSearchRequest multiSearchRequest = new MultiSearchRequest().add(new SearchRequest()); ++ MultiSearchRequest multiSearchRequest = new SearchRequest().add(new MultiSearchRequest()); + AtomicLong expected = new AtomicLong(); + + TransportMultiSearchAction action = createTransportMultiSearchAction(controlledClock, expected); +diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +old mode 100644 +new mode 100755 +index afe957e2bf3..f8c35e406ff +--- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java ++++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +@@ -244,7 +244,7 @@ public class MultiSearchRequestTests extends ESTestCase { + RestMultiSearchAction.parseMultiLineRequest(restRequest, SearchRequest.DEFAULT_INDICES_OPTIONS, true, + (searchRequest, parser) -> { + searchRequest.source(SearchSourceBuilder.fromXContent(parser, false)); +- request.add(searchRequest); ++ searchRequest.add(request); + }); + return request; + } +@@ -269,7 +269,7 @@ public class MultiSearchRequestTests extends ESTestCase { + if (searchSourceBuilder.equals(new SearchSourceBuilder()) == false) { + r.source(searchSourceBuilder); + } +- parsedRequest.add(r); ++ r.add(parsedRequest); + }; + MultiSearchRequest.readMultiLineFormat(new BytesArray(originalBytes), xContentType.xContent(), + consumer, null, null, null, null, null, null, xContentRegistry(), true); +@@ -287,7 +287,7 @@ public class MultiSearchRequestTests extends ESTestCase { + mutators.add(() -> mutation.indicesOptions(randomValueOtherThan(searchRequest.indicesOptions(), + () -> IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())))); + mutators.add(() -> mutation.maxConcurrentSearchRequests(randomIntBetween(1, 32))); +- mutators.add(() -> mutation.add(createSimpleSearchRequest())); ++ mutators.add(() -> createSimpleSearchRequest().add(mutation)); + randomFrom(mutators).run(); + return mutation; + } +@@ -299,7 +299,7 @@ public class MultiSearchRequestTests extends ESTestCase { + } + copy.indicesOptions(request.indicesOptions()); + for (SearchRequest searchRequest : request.requests()) { +- copy.add(searchRequest); ++ searchRequest.add(copy); + } + return copy; + } +@@ -326,7 +326,7 @@ public class MultiSearchRequestTests extends ESTestCase { + msearchDefault.forbidClosedIndices(), msearchDefault.ignoreAliases(), msearchDefault.ignoreThrottled() + )); + +- request.add(searchRequest); ++ searchRequest.add(request); + } + return request; + } +diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java +old mode 100644 +new mode 100755 +index 7ecc172924b..8a8b56cded0 +--- a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java ++++ b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java +@@ -137,7 +137,7 @@ public class TransportMultiSearchActionTests extends ESTestCase { + MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + multiSearchRequest.maxConcurrentSearchRequests(maxAllowedConcurrentSearches); + for (int i = 0; i < numSearchRequests; i++) { +- multiSearchRequest.add(new SearchRequest()); ++ new SearchRequest().add(multiSearchRequest); + } + + MultiSearchResponse response = ActionTestUtils.executeBlocking(action, multiSearchRequest); +diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java +old mode 100644 +new mode 100755 +index c959e3ed45d..36f3304ad84 +--- a/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java ++++ b/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java +@@ -131,7 +131,7 @@ public class ReplicationOperationTests extends ESTestCase { + assertThat(shardInfo.getFailures(), arrayWithSize(reportedFailures.size())); + assertThat(shardInfo.getSuccessful(), equalTo(1 + expectedReplicas.size() - simulatedFailures.size())); + final List unassignedShards = +- indexShardRoutingTable.shardsWithState(ShardRoutingState.UNASSIGNED); ++ ShardRoutingState.UNASSIGNED.shardsWithState(indexShardRoutingTable); + final int totalShards = 1 + expectedReplicas.size() + unassignedShards.size() + untrackedShards.size(); + assertThat(replicationGroup.toString(), shardInfo.getTotal(), equalTo(totalShards)); + +diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +old mode 100644 +new mode 100755 +index 4459aa55569..510189c9887 +--- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java ++++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +@@ -463,8 +463,7 @@ public class TransportReplicationActionTests extends ESTestCase { + assertTrue(request.isRetrySet.get()); + + // finish relocation +- ShardRouting relocationTarget = clusterService.state().getRoutingTable().shardRoutingTable(shardId) +- .shardsWithState(ShardRoutingState.INITIALIZING).get(0); ++ ShardRouting relocationTarget = ShardRoutingState.INITIALIZING.shardsWithState(clusterService.state().getRoutingTable().shardRoutingTable(shardId)).get(0); + AllocationService allocationService = ESAllocationTestCase.createAllocationService(); + ClusterState updatedState = allocationService.applyStartedShards(state, Collections.singletonList(relocationTarget)); + +diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java +old mode 100644 +new mode 100755 +index 5daa863402b..e6200ea9d13 +--- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java ++++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java +@@ -70,7 +70,6 @@ import org.elasticsearch.discovery.DiscoveryModule; + import org.elasticsearch.discovery.SeedHostsProvider.HostsResolver; + import org.elasticsearch.env.NodeEnvironment; + import org.elasticsearch.gateway.MetaStateService; +-import org.elasticsearch.gateway.MockGatewayMetaState; + import org.elasticsearch.indices.cluster.FakeThreadPoolMasterService; + import org.elasticsearch.test.ESTestCase; + import org.elasticsearch.test.MockLogAppender; +@@ -1719,8 +1718,7 @@ public class CoordinatorTests extends ESTestCase { + if (rarely()) { + nodeEnvironment = newNodeEnvironment(); + nodeEnvironments.add(nodeEnvironment); +- delegate = new MockGatewayMetaState(Settings.EMPTY, nodeEnvironment, xContentRegistry(), localNode) +- .getPersistedState(Settings.EMPTY, null); ++ delegate = null.getPersistedState(Settings.EMPTY); + } else { + nodeEnvironment = null; + delegate = new InMemoryPersistedState(0L, +@@ -1748,8 +1746,7 @@ public class CoordinatorTests extends ESTestCase { + new Manifest(updatedTerm, manifest.getClusterStateVersion(), manifest.getGlobalGeneration(), + manifest.getIndexGenerations())); + } +- delegate = new MockGatewayMetaState(Settings.EMPTY, nodeEnvironment, xContentRegistry(), newLocalNode) +- .getPersistedState(Settings.EMPTY, null); ++ delegate = null.getPersistedState(Settings.EMPTY); + } else { + nodeEnvironment = null; + BytesStreamOutput outStream = new BytesStreamOutput(); +diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java +old mode 100644 +new mode 100755 +index b96d5eacb15..9086ca0637a +--- a/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java ++++ b/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java +@@ -146,7 +146,7 @@ public class AutoExpandReplicasTests extends ESTestCase { + while (state.routingTable().index("index").shard(0).allShardsStarted() == false) { + logger.info(state); + state = cluster.applyStartedShards(state, +- state.routingTable().index("index").shard(0).shardsWithState(ShardRoutingState.INITIALIZING)); ++ ShardRoutingState.INITIALIZING.shardsWithState(state.routingTable().index("index").shard(0))); + state = cluster.reroute(state, new ClusterRerouteRequest()); + } + +diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java +old mode 100644 +new mode 100755 +index 851fe9c5502..6f77619a4c4 +--- a/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java ++++ b/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java +@@ -334,7 +334,7 @@ public class RoutingTableTests extends ESAllocationTestCase { + indexMetaData = updateActiveAllocations(indexRoutingTable, indexMetaData); + MetaData metaData = MetaData.builder().put(indexMetaData, true).build(); + // test no validation errors +- assertTrue(indexRoutingTable.validate(metaData)); ++ assertTrue(metaData.validate(indexRoutingTable)); + // test wrong number of shards causes validation errors + indexMetaData = IndexMetaData.builder(indexName) + .settings(settings(Version.CURRENT)) +@@ -342,7 +342,7 @@ public class RoutingTableTests extends ESAllocationTestCase { + .numberOfReplicas(numReplicas) + .build(); + final MetaData metaData2 = MetaData.builder().put(indexMetaData, true).build(); +- expectThrows(IllegalStateException.class, () -> indexRoutingTable.validate(metaData2)); ++ expectThrows(IllegalStateException.class, () -> metaData2.validate(indexRoutingTable)); + // test wrong number of replicas causes validation errors + indexMetaData = IndexMetaData.builder(indexName) + .settings(settings(Version.CURRENT)) +@@ -350,7 +350,7 @@ public class RoutingTableTests extends ESAllocationTestCase { + .numberOfReplicas(numReplicas + 1) + .build(); + final MetaData metaData3 = MetaData.builder().put(indexMetaData, true).build(); +- expectThrows(IllegalStateException.class, () -> indexRoutingTable.validate(metaData3)); ++ expectThrows(IllegalStateException.class, () -> metaData3.validate(indexRoutingTable)); + // test wrong number of shards and replicas causes validation errors + indexMetaData = IndexMetaData.builder(indexName) + .settings(settings(Version.CURRENT)) +@@ -358,7 +358,7 @@ public class RoutingTableTests extends ESAllocationTestCase { + .numberOfReplicas(numReplicas + 1) + .build(); + final MetaData metaData4 = MetaData.builder().put(indexMetaData, true).build(); +- expectThrows(IllegalStateException.class, () -> indexRoutingTable.validate(metaData4)); ++ expectThrows(IllegalStateException.class, () -> metaData4.validate(indexRoutingTable)); + } + + public void testDistinctNodes() { +diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java +old mode 100644 +new mode 100755 +index 2e56ae6297b..bd6eda97511 +--- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java ++++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java +@@ -184,7 +184,7 @@ public class FailedNodeRoutingTests extends ESAllocationTestCase { + Version newPrimaryVersion = getNodeVersion(newPrimary, compareState); + + logger.info("--> new primary is on version {}: {}", newPrimaryVersion, newPrimary); +- compareState.routingTable().shardRoutingTable(newPrimary.shardId()).shardsWithState(STARTED) ++ STARTED.shardsWithState(compareState.routingTable().shardRoutingTable(newPrimary.shardId())) + .stream() + .forEach(sr -> { + Version candidateVer = getNodeVersion(sr, compareState); +diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +old mode 100644 +new mode 100755 +index 24838b22d47..36bedba72f4 +--- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java ++++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +@@ -633,7 +633,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { + usages.put("node2", new DiskUsage("node2", "n2", "/dev/null", 100, 50)); // 50% used + usages.put("node3", new DiskUsage("node3", "n3", "/dev/null", 100, 0)); // 100% used + +- DiskUsage node1Usage = decider.averageUsage(rn, usages.build()); ++ DiskUsage node1Usage = rn.averageUsage(usages.build(), decider); + assertThat(node1Usage.getTotalBytes(), equalTo(100L)); + assertThat(node1Usage.getFreeBytes(), equalTo(25L)); + } +diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java +old mode 100644 +new mode 100755 +index 2ca7ba7d1a3..a5cc9434925 +--- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java ++++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java +@@ -97,7 +97,7 @@ public class FilterAllocationDeciderTests extends ESAllocationTestCase { + assertEquals(routingTable.index("idx").shard(0).primaryShard().state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).primaryShard().currentNodeId(), "node2"); + +- state = service.applyStartedShards(state, routingTable.index("idx").shard(0).shardsWithState(INITIALIZING)); ++ state = service.applyStartedShards(state, INITIALIZING.shardsWithState(routingTable.index("idx").shard(0))); + routingTable = state.routingTable(); + + // ok now we are started and can be allocated anywhere!! lets see... +diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java +old mode 100644 +new mode 100755 +index 7442d3c8d80..d662d5a7165 +--- a/server/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java ++++ b/server/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java +@@ -84,11 +84,11 @@ public class GeoDistanceTests extends ESTestCase { + } + + private static double arcDistance(GeoPoint p1, GeoPoint p2) { +- return GeoDistance.ARC.calculate(p1.lat(), p1.lon(), p2.lat(), p2.lon(), DistanceUnit.METERS); ++ return DistanceUnit.METERS.calculate(p1.lat(), p1.lon(), p2.lat(), p2.lon(), GeoDistance.ARC); + } + + private static double planeDistance(GeoPoint p1, GeoPoint p2) { +- return GeoDistance.PLANE.calculate(p1.lat(), p1.lon(), p2.lat(), p2.lon(), DistanceUnit.METERS); ++ return DistanceUnit.METERS.calculate(p1.lat(), p1.lon(), p2.lat(), p2.lon(), GeoDistance.PLANE); + } + + public void testArcDistanceVsPlane() { +diff --git a/server/src/test/java/org/elasticsearch/document/AliasedIndexDocumentActionsIT.java b/server/src/test/java/org/elasticsearch/document/AliasedIndexDocumentActionsIT.java +old mode 100644 +new mode 100755 +index 8d9cacd5f0e..8113995fbc1 +--- a/server/src/test/java/org/elasticsearch/document/AliasedIndexDocumentActionsIT.java ++++ b/server/src/test/java/org/elasticsearch/document/AliasedIndexDocumentActionsIT.java +@@ -34,9 +34,8 @@ public class AliasedIndexDocumentActionsIT extends DocumentActionsIT { + // ignore + } + logger.info("--> creating index test"); +- client().admin().indices().create(createIndexRequest("test1") +- .mapping("type1", "name", "type=keyword,store=true") +- .alias(new Alias("test"))).actionGet(); ++ client().admin().indices().create(new Alias("test").alias(createIndexRequest("test1") ++ .mapping("type1", "name", "type=keyword,store=true"))).actionGet(); + } + + @Override +diff --git a/server/src/test/java/org/elasticsearch/gateway/ClusterStateUpdatersTests.java b/server/src/test/java/org/elasticsearch/gateway/ClusterStateUpdatersTests.java +old mode 100644 +new mode 100755 +index cf4067694c2..d861734ba6e +--- a/server/src/test/java/org/elasticsearch/gateway/ClusterStateUpdatersTests.java ++++ b/server/src/test/java/org/elasticsearch/gateway/ClusterStateUpdatersTests.java +@@ -209,11 +209,11 @@ public class ClusterStateUpdatersTests extends ESTestCase { + .builder(ClusterState.EMPTY_STATE) + .metaData(MetaData.builder().put(metaData, false).build()) + .build(); +- assertFalse(initialState.routingTable().hasIndex(index)); ++ assertFalse(index.hasIndex(initialState.routingTable())); + + { + final ClusterState newState = updateRoutingTable(initialState); +- assertTrue(newState.routingTable().hasIndex(index)); ++ assertTrue(index.hasIndex(newState.routingTable())); + assertThat(newState.routingTable().version(), is(0L)); + assertThat(newState.routingTable().allShards(index.getName()).size(), is(numOfShards)); + } +@@ -224,7 +224,7 @@ public class ClusterStateUpdatersTests extends ESTestCase { + .state(IndexMetaData.State.CLOSE)) + .build()) + .build()); +- assertFalse(newState.routingTable().hasIndex(index)); ++ assertFalse(index.hasIndex(newState.routingTable())); + } + { + final ClusterState newState = updateRoutingTable(ClusterState.builder(initialState) +@@ -237,7 +237,7 @@ public class ClusterStateUpdatersTests extends ESTestCase { + .build()) + ).build()) + .build()); +- assertTrue(newState.routingTable().hasIndex(index)); ++ assertTrue(index.hasIndex(newState.routingTable())); + assertThat(newState.routingTable().version(), is(0L)); + assertThat(newState.routingTable().allShards(index.getName()).size(), is(numOfShards)); + } +diff --git a/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java b/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java +old mode 100644 +new mode 100755 +index f7437ad8ec5..456bed83fd8 +--- a/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java ++++ b/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java +@@ -88,7 +88,7 @@ public class AnalysisRegistryTests extends ESTestCase { + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); +- IndexAnalyzers indexAnalyzers = emptyRegistry.build(idxSettings); ++ IndexAnalyzers indexAnalyzers = idxSettings.build(emptyRegistry); + assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); + assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); + assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); +@@ -186,8 +186,8 @@ public class AnalysisRegistryTests extends ESTestCase { + return singletonMap("mock", MockFactory::new); + } + }; +- IndexAnalyzers indexAnalyzers = new AnalysisModule(TestEnvironment.newEnvironment(settings), +- singletonList(plugin)).getAnalysisRegistry().build(idxSettings); ++ IndexAnalyzers indexAnalyzers = idxSettings.build(new AnalysisModule(TestEnvironment.newEnvironment(settings), ++ singletonList(plugin)).getAnalysisRegistry()); + + // This shouldn't contain English stopwords + try (NamedAnalyzer custom_analyser = indexAnalyzers.get("custom_analyzer_with_camel_case")) { +@@ -223,8 +223,8 @@ public class AnalysisRegistryTests extends ESTestCase { + Settings indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); +- IndexAnalyzers indexAnalyzers = emptyAnalysisRegistry(settings).build(idxSettings); +- IndexAnalyzers otherIndexAnalyzers = emptyAnalysisRegistry(settings).build(idxSettings); ++ IndexAnalyzers indexAnalyzers = idxSettings.build(emptyAnalysisRegistry(settings)); ++ IndexAnalyzers otherIndexAnalyzers = idxSettings.build(emptyAnalysisRegistry(settings)); + final int numIters = randomIntBetween(5, 20); + for (int i = 0; i < numIters; i++) { + PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(random(), PreBuiltAnalyzers.values()); +@@ -243,12 +243,12 @@ public class AnalysisRegistryTests extends ESTestCase { + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + +- IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> emptyAnalysisRegistry(settings).build(idxSettings)); ++ IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> idxSettings.build(emptyAnalysisRegistry(settings))); + assertThat(e.getMessage(), equalTo("analyzer [test_analyzer] must specify either an analyzer type, or a tokenizer")); + } + + public void testCloseIndexAnalyzersMultipleTimes() throws IOException { +- IndexAnalyzers indexAnalyzers = emptyRegistry.build(indexSettingsOfCurrentVersion(Settings.builder())); ++ IndexAnalyzers indexAnalyzers = indexSettingsOfCurrentVersion(Settings.builder()).build(emptyRegistry); + indexAnalyzers.close(); + indexAnalyzers.close(); + } +diff --git a/server/src/test/java/org/elasticsearch/index/analysis/NamedAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/NamedAnalyzerTests.java +old mode 100644 +new mode 100755 +index e0f4a37c57f..429c866aaf7 +--- a/server/src/test/java/org/elasticsearch/index/analysis/NamedAnalyzerTests.java ++++ b/server/src/test/java/org/elasticsearch/index/analysis/NamedAnalyzerTests.java +@@ -29,29 +29,29 @@ public class NamedAnalyzerTests extends ESTestCase { + public void testCheckAllowedInMode() { + try (NamedAnalyzer testAnalyzer = new NamedAnalyzer("my_analyzer", AnalyzerScope.INDEX, + createAnalyzerWithMode("my_analyzer", AnalysisMode.INDEX_TIME), Integer.MIN_VALUE)) { +- testAnalyzer.checkAllowedInMode(AnalysisMode.INDEX_TIME); +- MapperException ex = expectThrows(MapperException.class, () -> testAnalyzer.checkAllowedInMode(AnalysisMode.SEARCH_TIME)); ++ AnalysisMode.INDEX_TIME.checkAllowedInMode(testAnalyzer); ++ MapperException ex = expectThrows(MapperException.class, () -> AnalysisMode.SEARCH_TIME.checkAllowedInMode(testAnalyzer)); + assertEquals("analyzer [my_analyzer] contains filters [my_analyzer] that are not allowed to run in search time mode.", + ex.getMessage()); +- ex = expectThrows(MapperException.class, () -> testAnalyzer.checkAllowedInMode(AnalysisMode.ALL)); ++ ex = expectThrows(MapperException.class, () -> AnalysisMode.ALL.checkAllowedInMode(testAnalyzer)); + assertEquals("analyzer [my_analyzer] contains filters [my_analyzer] that are not allowed to run in all mode.", ex.getMessage()); + } + + try (NamedAnalyzer testAnalyzer = new NamedAnalyzer("my_analyzer", AnalyzerScope.INDEX, + createAnalyzerWithMode("my_analyzer", AnalysisMode.SEARCH_TIME), Integer.MIN_VALUE)) { +- testAnalyzer.checkAllowedInMode(AnalysisMode.SEARCH_TIME); +- MapperException ex = expectThrows(MapperException.class, () -> testAnalyzer.checkAllowedInMode(AnalysisMode.INDEX_TIME)); ++ AnalysisMode.SEARCH_TIME.checkAllowedInMode(testAnalyzer); ++ MapperException ex = expectThrows(MapperException.class, () -> AnalysisMode.INDEX_TIME.checkAllowedInMode(testAnalyzer)); + assertEquals("analyzer [my_analyzer] contains filters [my_analyzer] that are not allowed to run in index time mode.", + ex.getMessage()); +- ex = expectThrows(MapperException.class, () -> testAnalyzer.checkAllowedInMode(AnalysisMode.ALL)); ++ ex = expectThrows(MapperException.class, () -> AnalysisMode.ALL.checkAllowedInMode(testAnalyzer)); + assertEquals("analyzer [my_analyzer] contains filters [my_analyzer] that are not allowed to run in all mode.", ex.getMessage()); + } + + try (NamedAnalyzer testAnalyzer = new NamedAnalyzer("my_analyzer", AnalyzerScope.INDEX, + createAnalyzerWithMode("my_analyzer", AnalysisMode.ALL), Integer.MIN_VALUE)) { +- testAnalyzer.checkAllowedInMode(AnalysisMode.ALL); +- testAnalyzer.checkAllowedInMode(AnalysisMode.INDEX_TIME); +- testAnalyzer.checkAllowedInMode(AnalysisMode.SEARCH_TIME); ++ AnalysisMode.ALL.checkAllowedInMode(testAnalyzer); ++ AnalysisMode.INDEX_TIME.checkAllowedInMode(testAnalyzer); ++ AnalysisMode.SEARCH_TIME.checkAllowedInMode(testAnalyzer); + } + } + +diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java +old mode 100644 +new mode 100755 +index f53610d23aa..17c84a5aa55 +--- a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java ++++ b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java +@@ -55,7 +55,7 @@ public class IdFieldTypeTests extends FieldTypeTestCase { + IndexMetaData indexMetaData = IndexMetaData.builder(IndexMetaData.INDEX_UUID_NA_VALUE).settings(indexSettings).build(); + IndexSettings mockSettings = new IndexSettings(indexMetaData, Settings.EMPTY); + Mockito.when(context.getIndexSettings()).thenReturn(mockSettings); +- Mockito.when(context.indexVersionCreated()).thenReturn(indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null)); ++ Mockito.when(context.indexVersionCreated()).thenReturn(null.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, indexSettings)); + + MapperService mapperService = Mockito.mock(MapperService.class); + Collection types = Collections.emptySet(); +diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +old mode 100644 +new mode 100755 +index 4a77160ce36..ad892995ead +--- a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java ++++ b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +@@ -489,9 +489,9 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { + + shardStats.add(successfulShardStats); + +- when(mockIndicesService.indexShardStats(mockIndicesService, shard, CommonStatsFlags.ALL)).thenReturn(successfulShardStats); ++ when(CommonStatsFlags.ALL.indexShardStats(mockIndicesService, shard, mockIndicesService)).thenReturn(successfulShardStats); + } else { +- when(mockIndicesService.indexShardStats(mockIndicesService, shard, CommonStatsFlags.ALL)).thenThrow(expectedException); ++ when(CommonStatsFlags.ALL.indexShardStats(mockIndicesService, shard, mockIndicesService)).thenThrow(expectedException); + } + } + +diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +old mode 100644 +new mode 100755 +index a56834a4caf..23320e181e8 +--- a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java ++++ b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +@@ -92,7 +92,7 @@ public class AnalysisModuleTests extends ESTestCase { + + public IndexAnalyzers getIndexAnalyzers(AnalysisRegistry registry, Settings settings) throws IOException { + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); +- return registry.build(idxSettings); ++ return idxSettings.build(registry); + } + + public AnalysisRegistry getNewRegistry(Settings settings) { +diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +old mode 100644 +new mode 100755 +index b00e89575cc..986f27507ba +--- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java ++++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +@@ -164,7 +164,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { + int totalTranslogOps, ActionListener listener) { + try { + if (position == 0) { +- out = new IndexOutputOutputStream(targetStore.createVerifyingOutput(md.name(), md, IOContext.DEFAULT)) { ++ out = new IndexOutputOutputStream(md.createVerifyingOutput(md.name(), IOContext.DEFAULT, targetStore)) { + @Override + public void close() throws IOException { + super.close(); +@@ -351,7 +351,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { + int totalTranslogOps, ActionListener listener) { + try { + if (position == 0) { +- out = new IndexOutputOutputStream(targetStore.createVerifyingOutput(md.name(), md, IOContext.DEFAULT)) { ++ out = new IndexOutputOutputStream(md.createVerifyingOutput(md.name(), IOContext.DEFAULT, targetStore)) { + @Override + public void close() throws IOException { + super.close(); +@@ -718,7 +718,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { + digest.update(buffer, 0, buffer.length); + StoreFileMetaData md = new StoreFileMetaData("test-" + i, buffer.length + 8, + Store.digestToString(digest.getValue()), org.apache.lucene.util.Version.LATEST); +- try (OutputStream out = new IndexOutputOutputStream(store.createVerifyingOutput(md.name(), md, IOContext.DEFAULT))) { ++ try (OutputStream out = new IndexOutputOutputStream(md.createVerifyingOutput(md.name(), IOContext.DEFAULT, store))) { + out.write(buffer); + out.write(Numbers.longToBytes(digest.getValue())); + } +diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +old mode 100644 +new mode 100755 +index c04dc7be026..fb051110f76 +--- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java ++++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +@@ -171,7 +171,7 @@ public class RestControllerTests extends ESTestCase { + // don't want to test everything -- just that it actually wraps the handler + doCallRealMethod().when(controller).registerAsDeprecatedHandler(method, path, handler, deprecationMessage, logger); + +- controller.registerAsDeprecatedHandler(method, path, handler, deprecationMessage, logger); ++ logger.registerAsDeprecatedHandler(method, path, handler, deprecationMessage, controller); + + verify(controller).registerHandler(eq(method), eq(path), any(DeprecationRestHandler.class)); + } +diff --git a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java +old mode 100644 +new mode 100755 +index 114e9de5700..e98ed820df1 +--- a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java ++++ b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java +@@ -100,9 +100,9 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { + .get(); + assertOrderedSearchHits(searchResponse, "d1", "d2"); + assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(2, 2, 3, 2, DistanceUnit.METERS), 10d)); ++ closeTo(DistanceUnit.METERS.calculate(2, 2, 3, 2, GeoDistance.ARC), 10d)); + assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(2, 1, 5, 1, DistanceUnit.METERS), 10d)); ++ closeTo(DistanceUnit.METERS.calculate(2, 1, 5, 1, GeoDistance.ARC), 10d)); + + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) +@@ -110,9 +110,9 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { + .get(); + assertOrderedSearchHits(searchResponse, "d2", "d1"); + assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(2, 1, 5, 1, DistanceUnit.METERS), 10d)); ++ closeTo(DistanceUnit.METERS.calculate(2, 1, 5, 1, GeoDistance.ARC), 10d)); + assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(2, 2, 3, 2, DistanceUnit.METERS), 10d)); ++ closeTo(DistanceUnit.METERS.calculate(2, 2, 3, 2, GeoDistance.ARC), 10d)); + + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) +@@ -120,9 +120,9 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { + .get(); + assertOrderedSearchHits(searchResponse, "d1", "d2"); + assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(2, 2, 4, 1, DistanceUnit.METERS), 10d)); ++ closeTo(DistanceUnit.METERS.calculate(2, 2, 4, 1, GeoDistance.ARC), 10d)); + assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(2, 1, 6, 2, DistanceUnit.METERS), 10d)); ++ closeTo(DistanceUnit.METERS.calculate(2, 1, 6, 2, GeoDistance.ARC), 10d)); + + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) +@@ -130,9 +130,9 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { + .get(); + assertOrderedSearchHits(searchResponse, "d2", "d1"); + assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(2, 1, 6, 2, DistanceUnit.METERS), 10d)); ++ closeTo(DistanceUnit.METERS.calculate(2, 1, 6, 2, GeoDistance.ARC), 10d)); + assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(2, 2, 4, 1, DistanceUnit.METERS), 10d)); ++ closeTo(DistanceUnit.METERS.calculate(2, 2, 4, 1, GeoDistance.ARC), 10d)); + } + + public void testSingeToManyAvgMedian() throws ExecutionException, InterruptedException, IOException { +@@ -167,9 +167,9 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { + .get(); + assertOrderedSearchHits(searchResponse, "d2", "d1"); + assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(0, 0, 0, 4, DistanceUnit.METERS), 10d)); ++ closeTo(DistanceUnit.METERS.calculate(0, 0, 0, 4, GeoDistance.ARC), 10d)); + assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(0, 0, 0, 5, DistanceUnit.METERS), 10d)); ++ closeTo(DistanceUnit.METERS.calculate(0, 0, 0, 5, GeoDistance.ARC), 10d)); + + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) +@@ -177,9 +177,9 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { + .get(); + assertOrderedSearchHits(searchResponse, "d1", "d2"); + assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(0, 0, 0, 4, DistanceUnit.METERS), 10d)); ++ closeTo(DistanceUnit.METERS.calculate(0, 0, 0, 4, GeoDistance.ARC), 10d)); + assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(0, 0, 0, 5, DistanceUnit.METERS), 10d)); ++ closeTo(DistanceUnit.METERS.calculate(0, 0, 0, 5, GeoDistance.ARC), 10d)); + } + + protected void createShuffeldJSONArray(XContentBuilder builder, GeoPoint[] pointsArray) throws IOException { +@@ -241,9 +241,9 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { + .get(); + assertOrderedSearchHits(searchResponse, "d1", "d2"); + assertThat((Double) searchResponse.getHits().getAt(0).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(2.5, 1, 2, 1, DistanceUnit.METERS), 1.e-1)); ++ closeTo(DistanceUnit.METERS.calculate(2.5, 1, 2, 1, GeoDistance.ARC), 1.e-1)); + assertThat((Double) searchResponse.getHits().getAt(1).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(4.5, 1, 2, 1, DistanceUnit.METERS), 1.e-1)); ++ closeTo(DistanceUnit.METERS.calculate(4.5, 1, 2, 1, GeoDistance.ARC), 1.e-1)); + + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) +@@ -251,9 +251,9 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { + .get(); + assertOrderedSearchHits(searchResponse, "d1", "d2"); + assertThat((Double) searchResponse.getHits().getAt(0).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(3.25, 4, 2, 1, DistanceUnit.METERS), 1.e-1)); ++ closeTo(DistanceUnit.METERS.calculate(3.25, 4, 2, 1, GeoDistance.ARC), 1.e-1)); + assertThat((Double) searchResponse.getHits().getAt(1).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(5.25, 4, 2, 1, DistanceUnit.METERS), 1.e-1)); ++ closeTo(DistanceUnit.METERS.calculate(5.25, 4, 2, 1, GeoDistance.ARC), 1.e-1)); + + } + +@@ -324,9 +324,9 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { + private static void checkCorrectSortOrderForGeoSort(SearchResponse searchResponse) { + assertOrderedSearchHits(searchResponse, "d2", "d1"); + assertThat((Double) searchResponse.getHits().getAt(0).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(2, 2, 1, 2, DistanceUnit.METERS), 1.e-1)); ++ closeTo(DistanceUnit.METERS.calculate(2, 2, 1, 2, GeoDistance.ARC), 1.e-1)); + assertThat((Double) searchResponse.getHits().getAt(1).getSortValues()[0], +- closeTo(GeoDistance.ARC.calculate(2, 2, 1, 1, DistanceUnit.METERS), 1.e-1)); ++ closeTo(DistanceUnit.METERS.calculate(2, 2, 1, 1, GeoDistance.ARC), 1.e-1)); + } + + public void testCrossIndexIgnoreUnmapped() throws Exception { +diff --git a/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java b/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java +old mode 100644 +new mode 100755 +index a5b64597181..ed58ff2bd08 +--- a/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java ++++ b/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java +@@ -290,7 +290,7 @@ public class ElasticsearchGeoAssertions { + } + + private static double distance(double lat1, double lon1, double lat2, double lon2) { +- return GeoDistance.ARC.calculate(lat1, lon1, lat2, lon2, DistanceUnit.DEFAULT); ++ return DistanceUnit.DEFAULT.calculate(lat1, lon1, lat2, lon2, GeoDistance.ARC); + } + + public static void assertValidException(XContentParser parser, Class expectedException) { +diff --git a/test/framework/src/main/java/org/elasticsearch/index/RandomCreateIndexGenerator.java b/test/framework/src/main/java/org/elasticsearch/index/RandomCreateIndexGenerator.java +old mode 100644 +new mode 100755 +index 9732504cac6..f751a25e169 +--- a/test/framework/src/main/java/org/elasticsearch/index/RandomCreateIndexGenerator.java ++++ b/test/framework/src/main/java/org/elasticsearch/index/RandomCreateIndexGenerator.java +@@ -122,7 +122,7 @@ public final class RandomCreateIndexGenerator { + public static void randomAliases(CreateIndexRequest request) { + int aliasesNo = randomIntBetween(0, 2); + for (int i = 0; i < aliasesNo; i++) { +- request.alias(randomAlias()); ++ randomAlias().alias(request); + } + } + +diff --git a/test/framework/src/main/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java b/test/framework/src/main/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java +old mode 100644 +new mode 100755 +index a7153f904b1..018e43c2ef6 +--- a/test/framework/src/main/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java ++++ b/test/framework/src/main/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java +@@ -64,7 +64,7 @@ public class AnalysisTestsHelper { + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", actualSettings); + final AnalysisRegistry analysisRegistry = + new AnalysisModule(new Environment(actualSettings, configPath), Arrays.asList(plugins)).getAnalysisRegistry(); +- return new ESTestCase.TestAnalysis(analysisRegistry.build(indexSettings), ++ return new ESTestCase.TestAnalysis(indexSettings.build(analysisRegistry), + analysisRegistry.buildTokenFilterFactories(indexSettings), + analysisRegistry.buildTokenizerFactories(indexSettings), + analysisRegistry.buildCharFilterFactories(indexSettings)); +diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +old mode 100644 +new mode 100755 +index 61298ce0e84..e1604239fab +--- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java ++++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +@@ -362,7 +362,7 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { + IndexScopedSettings indexScopedSettings = settingsModule.getIndexScopedSettings(); + idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings, indexScopedSettings); + AnalysisModule analysisModule = new AnalysisModule(TestEnvironment.newEnvironment(nodeSettings), emptyList()); +- IndexAnalyzers indexAnalyzers = analysisModule.getAnalysisRegistry().build(idxSettings); ++ IndexAnalyzers indexAnalyzers = idxSettings.build(analysisModule.getAnalysisRegistry()); + scriptService = scriptModule.getScriptService(); + similarityService = new SimilarityService(idxSettings, null, Collections.emptyMap()); + MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); +diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +old mode 100644 +new mode 100755 +index 6b36f985c21..32cd9ba4dcd +--- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java ++++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +@@ -159,7 +159,6 @@ import java.util.stream.Stream; + + import static java.util.Collections.emptyMap; + import static java.util.Collections.singletonList; +-import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; + import static org.hamcrest.Matchers.empty; + import static org.hamcrest.Matchers.equalTo; + import static org.hamcrest.Matchers.hasItem; +@@ -1368,7 +1367,7 @@ public abstract class ESTestCase extends LuceneTestCase { + Environment env = TestEnvironment.newEnvironment(nodeSettings); + AnalysisModule analysisModule = new AnalysisModule(env, Arrays.asList(analysisPlugins)); + AnalysisRegistry analysisRegistry = analysisModule.getAnalysisRegistry(); +- return new TestAnalysis(analysisRegistry.build(indexSettings), ++ return new TestAnalysis(indexSettings.build(analysisRegistry), + analysisRegistry.buildTokenFilterFactories(indexSettings), + analysisRegistry.buildTokenizerFactories(indexSettings), + analysisRegistry.buildCharFilterFactories(indexSettings)); +diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +old mode 100644 +new mode 100755 +index 441044328be..b0ce91b002e +--- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java ++++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +@@ -265,7 +265,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { + assertThat(e.getMessage(), false, equalTo(true)); + } + +- res = serviceB.submitRequest(nodeA, "internal:sayHello", new StringMessageRequest("moshe"), ++ res = new StringMessageRequest("moshe").submitRequest(nodeA, "internal:sayHello", + TransportRequestOptions.EMPTY, new TransportResponseHandler() { + @Override + public StringMessageResponse read(StreamInput in) throws IOException { +@@ -543,8 +543,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { + serviceC.connectToNode(serviceA.getLocalDiscoNode(), connectionProfile); + + +- TransportFuture res = serviceC.submitRequest(nodeA, "internal:sayHello", +- TransportRequest.Empty.INSTANCE, TransportRequestOptions.EMPTY, ++ TransportFuture res = TransportRequest.Empty.INSTANCE.submitRequest(nodeA, "internal:sayHello", ++ TransportRequestOptions.EMPTY, + new TransportResponseHandler() { + @Override + public TransportResponse.Empty read(StreamInput in) { +@@ -596,8 +596,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { + ConnectionProfile connectionProfile = ConnectionProfile.buildDefaultConnectionProfile(settingsWithCompress); + serviceC.connectToNode(serviceA.getLocalDiscoNode(), connectionProfile); + +- TransportFuture res = serviceC.submitRequest(nodeA, "internal:sayHello", +- new StringMessageRequest("moshe"), TransportRequestOptions.EMPTY, ++ TransportFuture res = new StringMessageRequest("moshe").submitRequest(nodeA, "internal:sayHello", ++ TransportRequestOptions.EMPTY, + new TransportResponseHandler() { + @Override + public StringMessageResponse read(StreamInput in) throws IOException { +@@ -826,8 +826,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { + latch3.countDown(); + } + }); +- TransportFuture foobar = serviceB.submitRequest(nodeA, "internal:foobar", +- new StringMessageRequest(""), TransportRequestOptions.EMPTY, EmptyTransportResponseHandler.INSTANCE_SAME); ++ TransportFuture foobar = new StringMessageRequest("").submitRequest(nodeA, "internal:foobar", ++ TransportRequestOptions.EMPTY, EmptyTransportResponseHandler.INSTANCE_SAME); + latch2.countDown(); + try { + foobar.txGet(); +@@ -852,8 +852,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { + } + }); + +- TransportFuture res = serviceB.submitRequest(nodeA, "internal:sayHelloTimeoutNoResponse", +- new StringMessageRequest("moshe"), TransportRequestOptions.builder().withTimeout(100).build(), ++ TransportFuture res = new StringMessageRequest("moshe").submitRequest(nodeA, "internal:sayHelloTimeoutNoResponse", ++ TransportRequestOptions.builder().withTimeout(100).build(), + new TransportResponseHandler() { + @Override + public StringMessageResponse read(StreamInput in) throws IOException { +@@ -916,8 +916,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { + } + }); + final CountDownLatch latch = new CountDownLatch(1); +- TransportFuture res = serviceB.submitRequest(nodeA, "internal:sayHelloTimeoutDelayedResponse", +- new StringMessageRequest("forever"), TransportRequestOptions.builder().withTimeout(100).build(), ++ TransportFuture res = new StringMessageRequest("forever").submitRequest(nodeA, "internal:sayHelloTimeoutDelayedResponse", ++ TransportRequestOptions.builder().withTimeout(100).build(), + new TransportResponseHandler() { + @Override + public StringMessageResponse read(StreamInput in) throws IOException { +@@ -954,8 +954,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { + for (int i = 0; i < 10; i++) { + final int counter = i; + // now, try and send another request, this times, with a short timeout +- TransportFuture result = serviceB.submitRequest(nodeA, "internal:sayHelloTimeoutDelayedResponse", +- new StringMessageRequest(counter + "ms"), TransportRequestOptions.builder().withTimeout(3000).build(), ++ TransportFuture result = new StringMessageRequest(counter + "ms").submitRequest(nodeA, "internal:sayHelloTimeoutDelayedResponse", ++ TransportRequestOptions.builder().withTimeout(3000).build(), + new TransportResponseHandler() { + @Override + public StringMessageResponse read(StreamInput in) throws IOException { +@@ -1505,8 +1505,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { + + serviceB.addUnresponsiveRule(serviceA); + +- TransportFuture res = serviceB.submitRequest(nodeA, "internal:sayHello", +- new StringMessageRequest("moshe"), TransportRequestOptions.builder().withTimeout(100).build(), ++ TransportFuture res = new StringMessageRequest("moshe").submitRequest(nodeA, "internal:sayHello", ++ TransportRequestOptions.builder().withTimeout(100).build(), + new TransportResponseHandler() { + @Override + public StringMessageResponse read(StreamInput in) throws IOException { +diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java +old mode 100644 +new mode 100755 +index a0a81b1a516..600ad9e567f +--- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java ++++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java +@@ -33,7 +33,6 @@ import org.elasticsearch.index.engine.CommitStats; + import org.elasticsearch.index.engine.Engine; + import org.elasticsearch.index.shard.ShardId; + import org.elasticsearch.license.RemoteClusterLicenseChecker; +-import org.elasticsearch.license.XPackLicenseState; + import org.elasticsearch.rest.RestStatus; + import org.elasticsearch.xpack.ccr.action.ShardChangesAction; + import org.elasticsearch.xpack.ccr.action.ShardFollowTask; +@@ -116,12 +115,11 @@ public final class CcrLicenseChecker { + request.clear(); + request.metaData(true); + request.indices(leaderIndex); +- checkRemoteClusterLicenseAndFetchClusterState( ++ request.checkRemoteClusterLicenseAndFetchClusterState( + client, + clusterAlias, + client.getRemoteClusterClient(clusterAlias), +- request, +- onFailure, ++ onFailure, + remoteClusterStateResponse -> { + ClusterState remoteClusterState = remoteClusterStateResponse.getState(); + IndexMetaData leaderIndexMetaData = remoteClusterState.getMetaData().index(leaderIndex); +@@ -141,7 +139,7 @@ public final class CcrLicenseChecker { + }); + }, + licenseCheck -> indexMetadataNonCompliantRemoteLicense(leaderIndex, licenseCheck), +- e -> indexMetadataUnknownRemoteLicense(leaderIndex, clusterAlias, e)); ++ e -> indexMetadataUnknownRemoteLicense(leaderIndex, clusterAlias, e), this); + } + + /** +@@ -164,15 +162,14 @@ public final class CcrLicenseChecker { + final Consumer leaderClusterStateConsumer) { + try { + Client remoteClient = systemClient(client.getRemoteClusterClient(clusterAlias)); +- checkRemoteClusterLicenseAndFetchClusterState( ++ request.checkRemoteClusterLicenseAndFetchClusterState( + client, + clusterAlias, + remoteClient, +- request, + onFailure, + leaderClusterStateConsumer, + CcrLicenseChecker::clusterStateNonCompliantRemoteLicense, +- e -> clusterStateUnknownRemoteLicense(clusterAlias, e)); ++ e -> clusterStateUnknownRemoteLicense(clusterAlias, e), this); + } catch (Exception e) { + // client.getRemoteClusterClient(...) can fail with a IllegalArgumentException if remote + // connection is unknown +@@ -180,55 +177,6 @@ public final class CcrLicenseChecker { + } + } + +- /** +- * Fetches the leader cluster state from the remote cluster by the specified cluster state request. Before fetching the cluster state, +- * the remote cluster is checked for license compliance with CCR. If the remote cluster is not licensed for CCR, +- * the {@code onFailure} consumer is invoked. Otherwise, the specified consumer is invoked with the leader cluster state fetched from +- * the remote cluster. +- * +- * @param client the client +- * @param clusterAlias the remote cluster alias +- * @param remoteClient the remote client to use to execute cluster state API +- * @param request the cluster state request +- * @param onFailure the failure consumer +- * @param leaderClusterStateConsumer the leader cluster state consumer +- * @param nonCompliantLicense the supplier for when the license state of the remote cluster is non-compliant +- * @param unknownLicense the supplier for when the license state of the remote cluster is unknown due to failure +- */ +- private void checkRemoteClusterLicenseAndFetchClusterState( +- final Client client, +- final String clusterAlias, +- final Client remoteClient, +- final ClusterStateRequest request, +- final Consumer onFailure, +- final Consumer leaderClusterStateConsumer, +- final Function nonCompliantLicense, +- final Function unknownLicense) { +- // we have to check the license on the remote cluster +- new RemoteClusterLicenseChecker(client, XPackLicenseState::isCcrAllowedForOperationMode).checkRemoteClusterLicenses( +- Collections.singletonList(clusterAlias), +- new ActionListener() { +- +- @Override +- public void onResponse(final RemoteClusterLicenseChecker.LicenseCheck licenseCheck) { +- if (licenseCheck.isSuccess()) { +- final ActionListener clusterStateListener = +- ActionListener.wrap(leaderClusterStateConsumer::accept, onFailure); +- // following an index in remote cluster, so use remote client to fetch leader index metadata +- remoteClient.admin().cluster().state(request, clusterStateListener); +- } else { +- onFailure.accept(nonCompliantLicense.apply(licenseCheck)); +- } +- } +- +- @Override +- public void onFailure(final Exception e) { +- onFailure.accept(unknownLicense.apply(e)); +- } +- +- }); +- } +- + /** + * Fetches the history UUIDs for leader index on per shard basis using the specified remoteClient. + * +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java +old mode 100644 +new mode 100755 +index ee5d3a279a5..9edb2b5169e +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java +@@ -5,14 +5,17 @@ + */ + package org.elasticsearch.protocol.xpack.graph; + ++import org.elasticsearch.ElasticsearchParseException; + import org.elasticsearch.action.ActionRequestValidationException; + import org.elasticsearch.action.ValidateActions; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.xcontent.ToXContentFragment; + import org.elasticsearch.common.xcontent.XContentBuilder; ++import org.elasticsearch.common.xcontent.XContentParser; + import org.elasticsearch.index.query.QueryBuilder; + import org.elasticsearch.index.query.QueryBuilders; ++import org.elasticsearch.xpack.graph.rest.action.RestGraphAction; + + import java.io.IOException; + import java.util.ArrayList; +@@ -157,4 +160,137 @@ public class Hop implements ToXContentFragment{ + } + return builder; + } ++ ++ public void parseVertices(XContentParser parser, RestGraphAction restGraphAction) ++ throws IOException { ++ XContentParser.Token token; ++ ++ String fieldName = null; ++ ++ while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { ++ if (token == XContentParser.Token.START_OBJECT) { ++ String field = null; ++ Map includes = null; ++ HashSet excludes = null; ++ int size = 10; ++ int minDocCount = 3; ++ int shardMinDocCount = 2; ++ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { ++ if (token == XContentParser.Token.FIELD_NAME) { ++ fieldName = parser.currentName(); ++ token = parser.nextToken(); ++ } ++ if (token == XContentParser.Token.START_ARRAY) { ++ if (RestGraphAction.INCLUDE_FIELD.match(fieldName, parser.getDeprecationHandler())) { ++ if (excludes != null) { ++ throw new ElasticsearchParseException( ++ "Graph vertices definition cannot contain both "+ RestGraphAction.INCLUDE_FIELD.getPreferredName()+" and " ++ + RestGraphAction.EXCLUDE_FIELD.getPreferredName()+" clauses", token.name()); ++ } ++ includes = new HashMap<>(); ++ while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { ++ if (token == XContentParser.Token.START_OBJECT) { ++ String includeTerm = null; ++ float boost = 1f; ++ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { ++ if (token == XContentParser.Token.FIELD_NAME) { ++ fieldName = parser.currentName(); ++ } else { ++ if (token == XContentParser.Token.VALUE_STRING) { ++ if (RestGraphAction.TERM_FIELD.match(fieldName, parser.getDeprecationHandler())) { ++ includeTerm = parser.text(); ++ } else { ++ throw new ElasticsearchParseException( ++ "Graph vertices definition " + RestGraphAction.INCLUDE_FIELD.getPreferredName() + ++ " clause has invalid property:" + fieldName); ++ } ++ } else if (token == XContentParser.Token.VALUE_NUMBER) { ++ if (RestGraphAction.BOOST_FIELD.match(fieldName, parser.getDeprecationHandler())) { ++ boost = parser.floatValue(); ++ } else { ++ throw new ElasticsearchParseException( ++ "Graph vertices definition " + RestGraphAction.INCLUDE_FIELD.getPreferredName() + ++ " clause has invalid property:" + fieldName); ++ } ++ } else { ++ throw new ElasticsearchParseException( ++ "Graph vertices definition " + RestGraphAction.INCLUDE_FIELD.getPreferredName() + ++ " clause has invalid property type:"+ token.name()); ++ ++ } ++ } ++ } ++ if (includeTerm == null) { ++ throw new ElasticsearchParseException( ++ "Graph vertices definition " + RestGraphAction.INCLUDE_FIELD.getPreferredName() + ++ " clause has missing object property for term"); ++ } ++ includes.put(includeTerm, new GraphExploreRequest.TermBoost(includeTerm, boost)); ++ } else if (token == XContentParser.Token.VALUE_STRING) { ++ String term = parser.text(); ++ includes.put(term, new GraphExploreRequest.TermBoost(term, 1f)); ++ } else { ++ throw new ElasticsearchParseException( ++ "Graph vertices definition " + RestGraphAction.INCLUDE_FIELD.getPreferredName() + ++ " clauses must be string terms or Objects with terms and boosts, not" ++ + token.name()); ++ } ++ } ++ } else if (RestGraphAction.EXCLUDE_FIELD.match(fieldName, parser.getDeprecationHandler())) { ++ if (includes != null) { ++ throw new ElasticsearchParseException( ++ "Graph vertices definition cannot contain both "+ RestGraphAction.INCLUDE_FIELD.getPreferredName()+ ++ " and "+ RestGraphAction.EXCLUDE_FIELD.getPreferredName()+" clauses", token.name()); ++ } ++ excludes = new HashSet<>(); ++ while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { ++ excludes.add(parser.text()); ++ } ++ } else { ++ throw new ElasticsearchParseException("Illegal property in graph vertices definition " + fieldName, ++ token.name()); ++ } ++ } ++ if (token == XContentParser.Token.VALUE_STRING) { ++ if (RestGraphAction.FIELD_NAME_FIELD.match(fieldName, parser.getDeprecationHandler())) { ++ field = parser.text(); ++ } else { ++ throw new ElasticsearchParseException("Unknown string property: [" + fieldName + "]"); ++ } ++ } ++ if (token == XContentParser.Token.VALUE_NUMBER) { ++ if (RestGraphAction.SIZE_FIELD.match(fieldName, parser.getDeprecationHandler())) { ++ size = parser.intValue(); ++ } else if (RestGraphAction.MIN_DOC_COUNT_FIELD.match(fieldName, parser.getDeprecationHandler())) { ++ minDocCount = parser.intValue(); ++ } else if (RestGraphAction.SHARD_MIN_DOC_COUNT_FIELD.match(fieldName, parser.getDeprecationHandler())) { ++ shardMinDocCount = parser.intValue(); ++ } else { ++ throw new ElasticsearchParseException("Unknown numeric property: [" + fieldName + "]"); ++ } ++ } ++ } ++ if (field == null) { ++ throw new ElasticsearchParseException("Missing field name in graph vertices definition", token.name()); ++ } ++ VertexRequest vr = addVertexRequest(field); ++ if (includes != null) { ++ for (GraphExploreRequest.TermBoost tb : includes.values()) { ++ vr.addInclude(tb.getTerm(), tb.getBoost()); ++ } ++ } ++ if (excludes != null) { ++ for (String term : excludes) { ++ vr.addExclude(term); ++ } ++ } ++ vr.size(size); ++ vr.minDocCount(minDocCount); ++ vr.shardMinDocCount(shardMinDocCount); ++ ++ } ++ ++ } ++ ++ } + } +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/SourceConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/SourceConfig.java +old mode 100644 +new mode 100755 +index 19671fd552c..5fff7c2cb3f +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/SourceConfig.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/SourceConfig.java +@@ -6,6 +6,7 @@ + + package org.elasticsearch.xpack.core.dataframe.transforms; + ++import org.elasticsearch.action.search.SearchRequest; + import org.elasticsearch.common.ParseField; + import org.elasticsearch.common.Strings; + import org.elasticsearch.common.io.stream.StreamInput; +@@ -15,7 +16,10 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; + import org.elasticsearch.common.xcontent.ToXContentObject; + import org.elasticsearch.common.xcontent.XContentBuilder; + import org.elasticsearch.common.xcontent.XContentParser; ++import org.elasticsearch.index.query.QueryBuilder; ++import org.elasticsearch.search.builder.SearchSourceBuilder; + import org.elasticsearch.xpack.core.dataframe.utils.ExceptionsHelper; ++import org.elasticsearch.xpack.dataframe.transforms.pivot.Pivot; + + import java.io.IOException; + import java.util.Arrays; +@@ -136,4 +140,17 @@ public class SourceConfig implements Writeable, ToXContentObject { + public static SourceConfig fromXContent(final XContentParser parser, boolean lenient) throws IOException { + return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); + } ++ ++ public SearchRequest buildSearchRequest(Map position, int pageSize, Pivot pivot) { ++ QueryBuilder queryBuilder = getQueryConfig().getQuery(); ++ ++ SearchRequest searchRequest = new SearchRequest(getIndex()); ++ SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); ++ sourceBuilder.aggregation(pivot.buildAggregation(position, pageSize)); ++ sourceBuilder.size(0); ++ sourceBuilder.query(queryBuilder); ++ searchRequest.source(sourceBuilder); ++ return searchRequest; ++ ++ } + } +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +old mode 100644 +new mode 100755 +index 1cb44f9625c..f36227fee4c +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +@@ -605,6 +605,87 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO + return compatibleTypes; + } + ++ /** ++ * Updates {@code source} with the new values in this object returning a new {@link Job}. ++ * ++ * @param maxModelMemoryLimit The maximum model memory allowed ++ * @param jobUpdate ++ * @return A new job equivalent to {@code source} updated. ++ */ ++ public Job mergeWithJob(ByteSizeValue maxModelMemoryLimit, JobUpdate jobUpdate) { ++ Builder builder = new Builder(this); ++ AnalysisConfig currentAnalysisConfig = getAnalysisConfig(); ++ AnalysisConfig.Builder newAnalysisConfig = new AnalysisConfig.Builder(currentAnalysisConfig); ++ ++ if (jobUpdate.getGroups() != null) { ++ builder.setGroups(jobUpdate.getGroups()); ++ } ++ if (jobUpdate.getDescription() != null) { ++ builder.setDescription(jobUpdate.getDescription()); ++ } ++ if (jobUpdate.getDetectorUpdates() != null && jobUpdate.getDetectorUpdates().isEmpty() == false) { ++ int numDetectors = currentAnalysisConfig.getDetectors().size(); ++ for (JobUpdate.DetectorUpdate dd : jobUpdate.getDetectorUpdates()) { ++ if (dd.getDetectorIndex() >= numDetectors) { ++ throw ExceptionsHelper.badRequestException("Supplied detector_index [{}] is >= the number of detectors [{}]", ++ dd.getDetectorIndex(), numDetectors); ++ } ++ ++ Detector.Builder detectorBuilder = new Detector.Builder(currentAnalysisConfig.getDetectors().get(dd.getDetectorIndex())); ++ if (dd.getDescription() != null) { ++ detectorBuilder.setDetectorDescription(dd.getDescription()); ++ } ++ if (dd.getRules() != null) { ++ detectorBuilder.setRules(dd.getRules()); ++ } ++ ++ newAnalysisConfig.setDetector(dd.getDetectorIndex(), detectorBuilder.build()); ++ } ++ } ++ if (jobUpdate.getModelPlotConfig() != null) { ++ builder.setModelPlotConfig(jobUpdate.getModelPlotConfig()); ++ } ++ if (jobUpdate.getAnalysisLimits() != null) { ++ AnalysisLimits validatedLimits = AnalysisLimits.validateAndSetDefaults(jobUpdate.getAnalysisLimits(), maxModelMemoryLimit, ++ AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB); ++ builder.setAnalysisLimits(validatedLimits); ++ } ++ if (jobUpdate.getRenormalizationWindowDays() != null) { ++ builder.setRenormalizationWindowDays(jobUpdate.getRenormalizationWindowDays()); ++ } ++ if (jobUpdate.getBackgroundPersistInterval() != null) { ++ builder.setBackgroundPersistInterval(jobUpdate.getBackgroundPersistInterval()); ++ } ++ if (jobUpdate.getModelSnapshotRetentionDays() != null) { ++ builder.setModelSnapshotRetentionDays(jobUpdate.getModelSnapshotRetentionDays()); ++ } ++ if (jobUpdate.getResultsRetentionDays() != null) { ++ builder.setResultsRetentionDays(jobUpdate.getResultsRetentionDays()); ++ } ++ if (jobUpdate.getCategorizationFilters() != null) { ++ newAnalysisConfig.setCategorizationFilters(jobUpdate.getCategorizationFilters()); ++ } ++ if (jobUpdate.getCustomSettings() != null) { ++ builder.setCustomSettings(jobUpdate.getCustomSettings()); ++ } ++ if (jobUpdate.getModelSnapshotId() != null) { ++ builder.setModelSnapshotId(jobUpdate.getModelSnapshotId()); ++ } ++ if (jobUpdate.getModelSnapshotMinVersion() != null) { ++ builder.setModelSnapshotMinVersion(jobUpdate.getModelSnapshotMinVersion()); ++ } ++ if (jobUpdate.getJobVersion() != null) { ++ builder.setJobVersion(jobUpdate.getJobVersion()); ++ } ++ ++ if (jobUpdate.getClearJobFinishTime() != null && jobUpdate.getClearJobFinishTime()) { ++ builder.setFinishedTime(null); ++ } ++ ++ builder.setAnalysisConfig(newAnalysisConfig); ++ return builder.build(); ++ } ++ + public static class Builder implements Writeable, ToXContentObject { + + private String id; +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java +old mode 100644 +new mode 100755 +index b50b7d2fa51..5649a6d360e +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java +@@ -11,13 +11,11 @@ import org.elasticsearch.common.ParseField; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.io.stream.Writeable; +-import org.elasticsearch.common.unit.ByteSizeValue; + import org.elasticsearch.common.unit.TimeValue; + import org.elasticsearch.common.xcontent.ConstructingObjectParser; + import org.elasticsearch.common.xcontent.ObjectParser; + import org.elasticsearch.common.xcontent.ToXContentObject; + import org.elasticsearch.common.xcontent.XContentBuilder; +-import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + + import java.io.IOException; + import java.util.Arrays; +@@ -353,87 +351,6 @@ public class JobUpdate implements Writeable, ToXContentObject { + return updateFields; + } + +- /** +- * Updates {@code source} with the new values in this object returning a new {@link Job}. +- * +- * @param source Source job to be updated +- * @param maxModelMemoryLimit The maximum model memory allowed +- * @return A new job equivalent to {@code source} updated. +- */ +- public Job mergeWithJob(Job source, ByteSizeValue maxModelMemoryLimit) { +- Job.Builder builder = new Job.Builder(source); +- AnalysisConfig currentAnalysisConfig = source.getAnalysisConfig(); +- AnalysisConfig.Builder newAnalysisConfig = new AnalysisConfig.Builder(currentAnalysisConfig); +- +- if (groups != null) { +- builder.setGroups(groups); +- } +- if (description != null) { +- builder.setDescription(description); +- } +- if (detectorUpdates != null && detectorUpdates.isEmpty() == false) { +- int numDetectors = currentAnalysisConfig.getDetectors().size(); +- for (DetectorUpdate dd : detectorUpdates) { +- if (dd.getDetectorIndex() >= numDetectors) { +- throw ExceptionsHelper.badRequestException("Supplied detector_index [{}] is >= the number of detectors [{}]", +- dd.getDetectorIndex(), numDetectors); +- } +- +- Detector.Builder detectorBuilder = new Detector.Builder(currentAnalysisConfig.getDetectors().get(dd.getDetectorIndex())); +- if (dd.getDescription() != null) { +- detectorBuilder.setDetectorDescription(dd.getDescription()); +- } +- if (dd.getRules() != null) { +- detectorBuilder.setRules(dd.getRules()); +- } +- +- newAnalysisConfig.setDetector(dd.getDetectorIndex(), detectorBuilder.build()); +- } +- } +- if (modelPlotConfig != null) { +- builder.setModelPlotConfig(modelPlotConfig); +- } +- if (analysisLimits != null) { +- AnalysisLimits validatedLimits = AnalysisLimits.validateAndSetDefaults(analysisLimits, maxModelMemoryLimit, +- AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB); +- builder.setAnalysisLimits(validatedLimits); +- } +- if (renormalizationWindowDays != null) { +- builder.setRenormalizationWindowDays(renormalizationWindowDays); +- } +- if (backgroundPersistInterval != null) { +- builder.setBackgroundPersistInterval(backgroundPersistInterval); +- } +- if (modelSnapshotRetentionDays != null) { +- builder.setModelSnapshotRetentionDays(modelSnapshotRetentionDays); +- } +- if (resultsRetentionDays != null) { +- builder.setResultsRetentionDays(resultsRetentionDays); +- } +- if (categorizationFilters != null) { +- newAnalysisConfig.setCategorizationFilters(categorizationFilters); +- } +- if (customSettings != null) { +- builder.setCustomSettings(customSettings); +- } +- if (modelSnapshotId != null) { +- builder.setModelSnapshotId(modelSnapshotId); +- } +- if (modelSnapshotMinVersion != null) { +- builder.setModelSnapshotMinVersion(modelSnapshotMinVersion); +- } +- if (jobVersion != null) { +- builder.setJobVersion(jobVersion); +- } +- +- if (clearJobFinishTime != null && clearJobFinishTime) { +- builder.setFinishedTime(null); +- } +- +- builder.setAnalysisConfig(newAnalysisConfig); +- return builder.build(); +- } +- + boolean isNoop(Job job) { + return (groups == null || Objects.equals(groups, job.getGroups())) + && (description == null || Objects.equals(description, job.getDescription())) +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java +old mode 100644 +new mode 100755 +index 8d542ce25af..975976665ba +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java +@@ -11,15 +11,19 @@ import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.io.stream.Writeable; + import org.elasticsearch.common.xcontent.ConstructingObjectParser; + import org.elasticsearch.common.xcontent.ObjectParser.ValueType; ++import org.elasticsearch.common.xcontent.ToXContent; + import org.elasticsearch.common.xcontent.ToXContentObject; + import org.elasticsearch.common.xcontent.XContentBuilder; + import org.elasticsearch.xpack.core.ml.job.config.Job; + import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; ++import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; + + import java.io.IOException; + import java.util.Date; + import java.util.Objects; + ++import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; ++ + /** + * Job processed record counts. + *

+@@ -604,4 +608,9 @@ public class DataCounts implements ToXContentObject, Writeable { + outOfOrderTimeStampCount, lastDataTimeStamp, emptyBucketCount, sparseBucketCount, bucketCount, + latestRecordTimeStamp, earliestRecordTimeStamp, latestEmptyBucketTimeStamp, latestSparseBucketTimeStamp); + } ++ ++ public XContentBuilder serialiseCounts(JobDataCountsPersister jobDataCountsPersister) throws IOException { ++ XContentBuilder builder = jsonBuilder(); ++ return toXContent(builder, EMPTY_PARAMS); ++ } + } +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyCause.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyCause.java +old mode 100644 +new mode 100755 +index 50efe24ab0f..06dd8d5583f +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyCause.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyCause.java +@@ -354,4 +354,10 @@ public class AnomalyCause implements ToXContentObject, Writeable { + } + + ++ public void addCause(AnomalyRecord anomalyRecord) { ++ if (anomalyRecord.getCauses() == null) { ++ anomalyRecord.setCauses(new ArrayList<>()); ++ } ++ anomalyRecord.getCauses().add(this); ++ } + } +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java +old mode 100644 +new mode 100755 +index 3c099e30924..9fd1fc154dd +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java +@@ -514,13 +514,6 @@ public class AnomalyRecord implements ToXContentObject, Writeable { + this.causes = causes; + } + +- public void addCause(AnomalyCause cause) { +- if (causes == null) { +- causes = new ArrayList<>(); +- } +- causes.add(cause); +- } +- + public List getInfluencers() { + return influences; + } +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/MonitoredSystem.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/MonitoredSystem.java +old mode 100644 +new mode 100755 +index 7567c275156..9bb040d8f3f +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/MonitoredSystem.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/MonitoredSystem.java +@@ -5,6 +5,13 @@ + */ + package org.elasticsearch.xpack.core.monitoring; + ++import org.elasticsearch.action.bulk.BulkRequestParser; ++import org.elasticsearch.common.bytes.BytesReference; ++import org.elasticsearch.common.xcontent.XContentType; ++import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkDoc; ++import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkRequest; ++import org.elasticsearch.xpack.core.monitoring.action.MonitoringIndex; ++ + import java.util.Arrays; + import java.util.Locale; + import java.util.stream.Stream; +@@ -54,4 +61,41 @@ public enum MonitoredSystem { + public static Stream allSystems() { + return Arrays.stream(MonitoredSystem.values()).filter(s -> s != MonitoredSystem.UNKNOWN); + } ++ ++ /** ++ * Parses a monitoring bulk request and builds the list of documents to be indexed. ++ * @param content ++ * @param xContentType ++ * @param timestamp ++ * @param intervalMillis ++ * @param monitoringBulkRequest ++ */ ++ public MonitoringBulkRequest add(final BytesReference content, ++ final XContentType xContentType, ++ final long timestamp, ++ final long intervalMillis, MonitoringBulkRequest monitoringBulkRequest) throws IOException { ++ ++ // MonitoringBulkRequest accepts a body request that has the same format as the BulkRequest ++ new BulkRequestParser(false).parse(content, null, null, null, null, true, xContentType, ++ indexRequest -> { ++ // we no longer accept non-timestamped indexes from Kibana, LS, or Beats because we do not use the data ++ // and it was duplicated anyway; by simply dropping it, we allow BWC for older clients that still send it ++ if (MonitoringIndex.from(indexRequest.index()) != MonitoringIndex.TIMESTAMPED) { ++ return; ++ } ++ final BytesReference source = indexRequest.source(); ++ if (source.length() == 0) { ++ throw new IllegalArgumentException("source is missing for monitoring document [" ++ + indexRequest.index() + "][" + indexRequest.type() + "][" + indexRequest.id() + "]"); ++ } ++ ++ // builds a new monitoring document based on the index request ++ monitoringBulkRequest.add(new MonitoringBulkDoc(this, indexRequest.type(), indexRequest.id(), timestamp, intervalMillis, source, ++ xContentType)); ++ }, ++ updateRequest -> { throw new IllegalArgumentException("monitoring bulk requests should only contain index requests"); }, ++ deleteRequest -> { throw new IllegalArgumentException("monitoring bulk requests should only contain index requests"); }); ++ ++ return monitoringBulkRequest; ++ } + } +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequest.java +old mode 100644 +new mode 100755 +index 12c4e0b7ee3..13605a76e4a +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequest.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequest.java +@@ -7,11 +7,8 @@ package org.elasticsearch.xpack.core.monitoring.action; + + import org.elasticsearch.action.ActionRequest; + import org.elasticsearch.action.ActionRequestValidationException; +-import org.elasticsearch.action.bulk.BulkRequestParser; +-import org.elasticsearch.common.bytes.BytesReference; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; +-import org.elasticsearch.common.xcontent.XContentType; + import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; + + import java.io.IOException; +@@ -62,39 +59,6 @@ public class MonitoringBulkRequest extends ActionRequest { + return this; + } + +- /** +- * Parses a monitoring bulk request and builds the list of documents to be indexed. +- */ +- public MonitoringBulkRequest add(final MonitoredSystem system, +- final BytesReference content, +- final XContentType xContentType, +- final long timestamp, +- final long intervalMillis) throws IOException { +- +- // MonitoringBulkRequest accepts a body request that has the same format as the BulkRequest +- new BulkRequestParser(false).parse(content, null, null, null, null, true, xContentType, +- indexRequest -> { +- // we no longer accept non-timestamped indexes from Kibana, LS, or Beats because we do not use the data +- // and it was duplicated anyway; by simply dropping it, we allow BWC for older clients that still send it +- if (MonitoringIndex.from(indexRequest.index()) != MonitoringIndex.TIMESTAMPED) { +- return; +- } +- final BytesReference source = indexRequest.source(); +- if (source.length() == 0) { +- throw new IllegalArgumentException("source is missing for monitoring document [" +- + indexRequest.index() + "][" + indexRequest.type() + "][" + indexRequest.id() + "]"); +- } +- +- // builds a new monitoring document based on the index request +- add(new MonitoringBulkDoc(system, indexRequest.type(), indexRequest.id(), timestamp, intervalMillis, source, +- xContentType)); +- }, +- updateRequest -> { throw new IllegalArgumentException("monitoring bulk requests should only contain index requests"); }, +- deleteRequest -> { throw new IllegalArgumentException("monitoring bulk requests should only contain index requests"); }); +- +- return this; +- } +- + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequestBuilder.java +old mode 100644 +new mode 100755 +index 904aac453fd..1d72390a929 +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequestBuilder.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkRequestBuilder.java +@@ -30,7 +30,7 @@ public class MonitoringBulkRequestBuilder + final XContentType xContentType, + final long timestamp, + final long intervalMillis) throws IOException { +- request.add(system, content, xContentType, timestamp, intervalMillis); ++ system.add(content, xContentType, timestamp, intervalMillis, request); + return this; + } + +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/CreateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/CreateApiKeyRequest.java +old mode 100644 +new mode 100755 +index 28a872c2222..823b8b9fd77 +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/CreateApiKeyRequest.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/CreateApiKeyRequest.java +@@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.unit.TimeValue; + import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; ++import org.elasticsearch.xpack.security.authc.ApiKeyService; + + import java.io.IOException; + import java.util.Collections; +@@ -129,4 +130,12 @@ public final class CreateApiKeyRequest extends ActionRequest { + public void readFrom(StreamInput in) { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } ++ ++ public Instant getApiKeyExpiration(Instant now, ApiKeyService apiKeyService) { ++ if (getExpiration() != null) { ++ return now.plusSeconds(getExpiration().getSeconds()); ++ } else { ++ return null; ++ } ++ } + } +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequest.java +old mode 100644 +new mode 100755 +index 93ac7ff45dd..d171c8da575 +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequest.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequest.java +@@ -9,8 +9,11 @@ import org.elasticsearch.action.ActionRequest; + import org.elasticsearch.action.ActionRequestValidationException; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; ++import org.elasticsearch.example.CustomAuthorizationEngine; ++import org.elasticsearch.xpack.core.security.authc.Authentication; + import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; + import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; ++import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivileges; + import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; + + import java.io.IOException; +@@ -123,4 +126,50 @@ public class HasPrivilegesRequest extends ActionRequest implements UserRequest { + out.writeArray(ApplicationResourcePrivileges::write, applicationPrivileges); + } + ++ public HasPrivilegesResponse getHasPrivilegesResponse(Authentication authentication, ++ boolean authorized, CustomAuthorizationEngine customAuthorizationEngine) { ++ Map clusterPrivMap = new HashMap<>(); ++ for (String clusterPriv : clusterPrivileges()) { ++ clusterPrivMap.put(clusterPriv, authorized); ++ } ++ final Map indices = new LinkedHashMap<>(); ++ for (RoleDescriptor.IndicesPrivileges check : indexPrivileges()) { ++ for (String index : check.getIndices()) { ++ final Map privileges = new HashMap<>(); ++ final ResourcePrivileges existing = indices.get(index); ++ if (existing != null) { ++ privileges.putAll(existing.getPrivileges()); ++ } ++ for (String privilege : check.getPrivileges()) { ++ privileges.put(privilege, authorized); ++ } ++ indices.put(index, ResourcePrivileges.builder(index).addPrivileges(privileges).build()); ++ } ++ } ++ final Map> privilegesByApplication = new HashMap<>(); ++ Set applicationNames = Arrays.stream(applicationPrivileges()) ++ .map(ApplicationResourcePrivileges::getApplication) ++ .collect(Collectors.toSet()); ++ for (String applicationName : applicationNames) { ++ final Map appPrivilegesByResource = new LinkedHashMap<>(); ++ for (ApplicationResourcePrivileges p : applicationPrivileges()) { ++ if (applicationName.equals(p.getApplication())) { ++ for (String resource : p.getResources()) { ++ final Map privileges = new HashMap<>(); ++ final ResourcePrivileges existing = appPrivilegesByResource.get(resource); ++ if (existing != null) { ++ privileges.putAll(existing.getPrivileges()); ++ } ++ for (String privilege : p.getPrivileges()) { ++ privileges.put(privilege, authorized); ++ } ++ appPrivilegesByResource.put(resource, ResourcePrivileges.builder(resource).addPrivileges(privileges).build()); ++ } ++ } ++ } ++ privilegesByApplication.put(applicationName, appPrivilegesByResource.values()); ++ } ++ return new HasPrivilegesResponse(authentication.getUser().principal(), authorized, clusterPrivMap, indices.values(), ++ privilegesByApplication); ++ } + } +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java +old mode 100644 +new mode 100755 +index 17c3e05a772..5eb0c86f6aa +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java +@@ -128,21 +128,12 @@ public class Authentication implements ToXContentObject { + * {@link IllegalStateException} will be thrown + */ + public void writeToContext(ThreadContext ctx) throws IOException, IllegalArgumentException { +- ensureContextDoesNotContainAuthentication(ctx); ++ ctx.ensureContextDoesNotContainAuthentication(this); + String header = encode(); + ctx.putTransient(AuthenticationField.AUTHENTICATION_KEY, this); + ctx.putHeader(AuthenticationField.AUTHENTICATION_KEY, header); + } + +- void ensureContextDoesNotContainAuthentication(ThreadContext ctx) { +- if (ctx.getTransient(AuthenticationField.AUTHENTICATION_KEY) != null) { +- if (ctx.getHeader(AuthenticationField.AUTHENTICATION_KEY) == null) { +- throw new IllegalStateException("authentication present as a transient but not a header"); +- } +- throw new IllegalStateException("authentication is already present in the context"); +- } +- } +- + public String encode() throws IOException { + BytesStreamOutput output = new BytesStreamOutput(); + output.setVersion(version); +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java +old mode 100644 +new mode 100755 +index adc79279a70..91d79f0e0fb +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java +@@ -13,6 +13,8 @@ import org.elasticsearch.license.XPackLicenseState; + import org.elasticsearch.xpack.core.security.authc.support.DelegatedAuthorizationSettings; + import org.elasticsearch.xpack.core.XPackField; + import org.elasticsearch.xpack.core.security.user.User; ++import org.elasticsearch.xpack.security.action.realm.TransportClearRealmCacheAction; ++import org.elasticsearch.xpack.security.authc.support.CachingRealm; + + import java.util.Collections; + import java.util.HashMap; +@@ -150,6 +152,21 @@ public abstract class Realm implements Comparable { + public void initialize(Iterable realms, XPackLicenseState licenseState) { + } + ++ public void clearCache(String[] usernames, TransportClearRealmCacheAction transportClearRealmCacheAction) { ++ if (!(this instanceof CachingRealm)) { ++ return; ++ } ++ CachingRealm cachingRealm = (CachingRealm) this; ++ ++ if (usernames != null && usernames.length != 0) { ++ for (String username : usernames) { ++ cachingRealm.expire(username); ++ } ++ } else { ++ cachingRealm.expireAll(); ++ } ++ } ++ + /** + * A factory interface to construct a security realm. + */ +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/TemplateRoleName.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/TemplateRoleName.java +old mode 100644 +new mode 100755 +index 59f9eafec1c..e84e4e71cca +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/TemplateRoleName.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/TemplateRoleName.java +@@ -19,14 +19,12 @@ import org.elasticsearch.common.xcontent.ObjectParser; + import org.elasticsearch.common.xcontent.ToXContentObject; + import org.elasticsearch.common.xcontent.XContentBuilder; + import org.elasticsearch.common.xcontent.XContentFactory; +-import org.elasticsearch.common.xcontent.XContentHelper; + import org.elasticsearch.common.xcontent.XContentParseException; + import org.elasticsearch.common.xcontent.XContentParser; + import org.elasticsearch.common.xcontent.XContentType; + import org.elasticsearch.common.xcontent.json.JsonXContent; + import org.elasticsearch.script.ScriptService; + import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionModel; +-import org.elasticsearch.xpack.core.security.support.MustacheTemplateEvaluator; + + import java.io.IOException; + import java.io.UncheckedIOException; +@@ -83,7 +81,7 @@ public class TemplateRoleName implements ToXContentObject, Writeable { + + public List getRoleNames(ScriptService scriptService, ExpressionModel model) { + try { +- final String evaluation = parseTemplate(scriptService, model.asMap()); ++ final String evaluation = scriptService.parseTemplate(model.asMap(), this); + switch (format) { + case STRING: + return Collections.singletonList(evaluation); +@@ -123,12 +121,6 @@ public class TemplateRoleName implements ToXContentObject, Writeable { + } + } + +- private String parseTemplate(ScriptService scriptService, Map parameters) throws IOException { +- final XContentParser parser = XContentHelper.createParser( +- NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, template, XContentType.JSON); +- return MustacheTemplateEvaluator.evaluate(scriptService, parser, parameters); +- } +- + private static BytesReference extractTemplate(XContentParser parser, Void ignore) throws IOException { + if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { + return new BytesArray(parser.text()); +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/expressiondsl/ExpressionParser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/expressiondsl/ExpressionParser.java +old mode 100644 +new mode 100755 +index d2392b3d172..f6c52ce3334 +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/expressiondsl/ExpressionParser.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/expressiondsl/ExpressionParser.java +@@ -56,7 +56,7 @@ public final class ExpressionParser { + */ + public RoleMapperExpression parse(String name, XContentSource content) throws IOException { + try (InputStream stream = content.getBytes().streamInput()) { +- return parse(name, content.parser(NamedXContentRegistry.EMPTY, stream)); ++ return parse(name, NamedXContentRegistry.EMPTY.parser(stream, content)); + } + } + +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/User.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/User.java +old mode 100644 +new mode 100755 +index e9bca56dee1..c9ae174131f +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/User.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/User.java +@@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; + import org.elasticsearch.common.xcontent.ToXContentObject; + import org.elasticsearch.common.xcontent.XContentBuilder; ++import org.elasticsearch.xpack.security.authz.AuthorizationService; + + import java.io.IOException; + import java.util.Arrays; +@@ -220,6 +221,10 @@ public class User implements ToXContentObject { + output.writeBoolean(user.enabled); + } + ++ public boolean isInternalUser(AuthorizationService authorizationService) { ++ return SystemUser.is(this) || XPackUser.is(this) || XPackSecurityUser.is(this); ++ } ++ + public interface Fields { + ParseField USERNAME = new ParseField("username"); + ParseField PASSWORD = new ParseField("password"); +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java +old mode 100644 +new mode 100755 +index 52d0f648f99..9028bfa2dfc +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java +@@ -14,7 +14,10 @@ import org.elasticsearch.xpack.core.watcher.actions.ActionWrapperResult; + import org.elasticsearch.xpack.core.watcher.condition.Condition; + import org.elasticsearch.xpack.core.watcher.history.WatchRecord; + import org.elasticsearch.xpack.core.watcher.input.Input; ++import org.elasticsearch.xpack.core.watcher.transform.ExecutableTransform; + import org.elasticsearch.xpack.core.watcher.transform.Transform; ++import org.elasticsearch.xpack.core.watcher.transform.chain.ChainTransform; ++import org.elasticsearch.xpack.core.watcher.transform.chain.ExecutableChainTransform; + import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; + import org.elasticsearch.xpack.core.watcher.watch.Payload; + import org.elasticsearch.xpack.core.watcher.watch.Watch; +@@ -268,4 +271,17 @@ public abstract class WatchExecutionContext { + } + return null; + } ++ ++ public ChainTransform.Result doExecute(Payload payload, List results, ExecutableChainTransform executableChainTransform) throws IOException { ++ for (ExecutableTransform transform : executableChainTransform.executableTransforms()) { ++ Transform.Result result = transform.execute(this, payload); ++ results.add(result); ++ if (result.status() == Transform.Result.Status.FAILURE) { ++ return new ChainTransform.Result(format("failed to execute [{}] transform for [{}]. failed to execute sub-transform [{}]", ++ ChainTransform.TYPE, id(), transform.type()), results); ++ } ++ payload = result.payload(); ++ } ++ return new ChainTransform.Result(payload, results); ++ } + } +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java +old mode 100644 +new mode 100755 +index d8836d9c338..da3290458e3 +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java +@@ -23,6 +23,7 @@ import org.elasticsearch.xpack.core.watcher.support.xcontent.WatcherParams; + import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; + import org.elasticsearch.xpack.core.watcher.watch.Watch; + import org.elasticsearch.xpack.core.watcher.watch.WatchField; ++import org.elasticsearch.xpack.watcher.execution.ExecutionService; + + import java.io.IOException; + import java.util.Collection; +@@ -211,6 +212,19 @@ public abstract class WatchRecord implements ToXContentObject { + return id.toString(); + } + ++ public WatchRecord createWatchRecord(WatchExecutionContext ctx, Exception e, ExecutionService executionService) { ++ // it is possible that the watch store update failed, the execution phase is finished ++ if (ctx.executionPhase().sealed()) { ++ if (this == null) { ++ return new ExceptionWatchRecord(ctx, e); ++ } else { ++ return new ExceptionWatchRecord(this, e); ++ } ++ } else { ++ return ctx.abortFailedExecution(e); ++ } ++ } ++ + public static class MessageWatchRecord extends WatchRecord { + @Nullable private final String[] messages; + +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/XContentSource.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/XContentSource.java +old mode 100644 +new mode 100755 +index 55362390216..2d83a413f52 +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/XContentSource.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/XContentSource.java +@@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchParseException; + import org.elasticsearch.common.bytes.BytesReference; + import org.elasticsearch.common.io.stream.StreamInput; + import org.elasticsearch.common.io.stream.StreamOutput; +-import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; + import org.elasticsearch.common.xcontent.NamedXContentRegistry; + import org.elasticsearch.common.xcontent.ObjectPath; + import org.elasticsearch.common.xcontent.ToXContent; +@@ -108,17 +107,13 @@ public class XContentSource implements ToXContent { + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + // EMPTY is safe here because we never use namedObject + try (InputStream stream = bytes.streamInput(); +- XContentParser parser = parser(NamedXContentRegistry.EMPTY, stream)) { ++ XContentParser parser = NamedXContentRegistry.EMPTY.parser(stream, this)) { + parser.nextToken(); + builder.generator().copyCurrentStructure(parser); + return builder; + } + } + +- public XContentParser parser(NamedXContentRegistry xContentRegistry, InputStream stream) throws IOException { +- return contentType.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, stream); +- } +- + public static XContentSource readFrom(StreamInput in) throws IOException { + return new XContentSource(in.readBytesReference(), in.readEnum(XContentType.class)); + } +@@ -132,7 +127,7 @@ public class XContentSource implements ToXContent { + if (data == null) { + // EMPTY is safe here because we never use namedObject + try (InputStream stream = bytes.streamInput(); +- XContentParser parser = parser(NamedXContentRegistry.EMPTY, stream)) { ++ XContentParser parser = NamedXContentRegistry.EMPTY.parser(stream, this)) { + data = XContentUtils.readValue(parser, parser.nextToken()); + } catch (IOException ex) { + throw new ElasticsearchException("failed to read value", ex); +diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ExecutableChainTransform.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ExecutableChainTransform.java +old mode 100644 +new mode 100755 +index 5887a2bdd6d..8dd3d842973 +--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ExecutableChainTransform.java ++++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ExecutableChainTransform.java +@@ -43,7 +43,7 @@ public class ExecutableChainTransform extends ExecutableTransform results = new ArrayList<>(); + try { +- return doExecute(ctx, payload, results); ++ return ctx.doExecute(payload, results, this); + } catch (Exception e) { + logger.error((Supplier) () -> new ParameterizedMessage("failed to execute [{}] transform for [{}]", TYPE, ctx.id()), e); + return new ChainTransform.Result(e, results); +@@ -51,17 +51,4 @@ public class ExecutableChainTransform extends ExecutableTransform results) throws IOException { +- for (ExecutableTransform transform : transforms) { +- Transform.Result result = transform.execute(ctx, payload); +- results.add(result); +- if (result.status() == Transform.Result.Status.FAILURE) { +- return new ChainTransform.Result(format("failed to execute [{}] transform for [{}]. failed to execute sub-transform [{}]", +- ChainTransform.TYPE, ctx.id(), transform.type()), results); +- } +- payload = result.payload(); +- } +- return new ChainTransform.Result(payload, results); +- } +- + } +diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java +old mode 100644 +new mode 100755 +index eb4f2c0bbc2..792a8d1c355 +--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java ++++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java +@@ -230,7 +230,7 @@ public class JobUpdateTests extends AbstractSerializingTestCase { + jobBuilder.setCreateTime(new Date()); + Job job = jobBuilder.build(); + +- Job updatedJob = update.mergeWithJob(job, new ByteSizeValue(0L)); ++ Job updatedJob = job.mergeWithJob(new ByteSizeValue(0L), update); + + assertEquals(update.getGroups(), updatedJob.getGroups()); + assertEquals(update.getDescription(), updatedJob.getDescription()); +@@ -262,7 +262,7 @@ public class JobUpdateTests extends AbstractSerializingTestCase { + update = createRandom(job.getId(), job); + } + +- Job updatedJob = update.mergeWithJob(job, new ByteSizeValue(0L)); ++ Job updatedJob = job.mergeWithJob(new ByteSizeValue(0L), update); + + assertThat(job, not(equalTo(updatedJob))); + } +@@ -295,7 +295,7 @@ public class JobUpdateTests extends AbstractSerializingTestCase { + JobUpdate update = new JobUpdate.Builder("foo").setAnalysisLimits(new AnalysisLimits(1024L, null)).build(); + + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, +- () -> update.mergeWithJob(jobBuilder.build(), new ByteSizeValue(512L, ByteSizeUnit.MB))); ++ () -> jobBuilder.build().mergeWithJob(new ByteSizeValue(512L, ByteSizeUnit.MB), update)); + assertEquals("model_memory_limit [1gb] must be less than the value of the xpack.ml.max_model_memory_limit setting [512mb]", + e.getMessage()); + } +@@ -310,17 +310,17 @@ public class JobUpdateTests extends AbstractSerializingTestCase { + jobBuilder.validateAnalysisLimitsAndSetDefaults(null); + + JobUpdate update = new JobUpdate.Builder("foo").setAnalysisLimits(new AnalysisLimits(2048L, 5L)).build(); +- Job updated = update.mergeWithJob(jobBuilder.build(), new ByteSizeValue(0L)); ++ Job updated = jobBuilder.build().mergeWithJob(new ByteSizeValue(0L), update); + assertThat(updated.getAnalysisLimits().getModelMemoryLimit(), equalTo(2048L)); + assertThat(updated.getAnalysisLimits().getCategorizationExamplesLimit(), equalTo(5L)); + + JobUpdate updateAboveMaxLimit = new JobUpdate.Builder("foo").setAnalysisLimits(new AnalysisLimits(8000L, null)).build(); + + Exception e = expectThrows(ElasticsearchStatusException.class, +- () -> updateAboveMaxLimit.mergeWithJob(jobBuilder.build(), new ByteSizeValue(5000L, ByteSizeUnit.MB))); ++ () -> jobBuilder.build().mergeWithJob(new ByteSizeValue(5000L, ByteSizeUnit.MB), updateAboveMaxLimit)); + assertEquals("model_memory_limit [7.8gb] must be less than the value of the xpack.ml.max_model_memory_limit setting [4.8gb]", + e.getMessage()); + +- updateAboveMaxLimit.mergeWithJob(jobBuilder.build(), new ByteSizeValue(10000L, ByteSizeUnit.MB)); ++ jobBuilder.build().mergeWithJob(new ByteSizeValue(10000L, ByteSizeUnit.MB), updateAboveMaxLimit); + } + } +diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPreviewDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPreviewDataFrameTransformAction.java +old mode 100644 +new mode 100755 +index f4b93cc6ac4..6f9953688f3 +--- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPreviewDataFrameTransformAction.java ++++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPreviewDataFrameTransformAction.java +@@ -99,7 +99,7 @@ public class TransportPreviewDataFrameTransformAction extends + ClientHelper.DATA_FRAME_ORIGIN, + client, + SearchAction.INSTANCE, +- pivot.buildSearchRequest(source, null, NUMBER_OF_PREVIEW_BUCKETS), ++ source.buildSearchRequest(null, NUMBER_OF_PREVIEW_BUCKETS, pivot), + ActionListener.wrap( + r -> { + final CompositeAggregation agg = r.getAggregations().get(COMPOSITE_AGGREGATION_NAME); +diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java +old mode 100644 +new mode 100755 +index f2fc71da7f0..a3309fcdf75 +--- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java ++++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java +@@ -185,7 +185,7 @@ public abstract class DataFrameIndexer extends AsyncTwoPhaseIndexer position, int pageSize) { +- QueryBuilder queryBuilder = sourceConfig.getQueryConfig().getQuery(); +- +- SearchRequest searchRequest = new SearchRequest(sourceConfig.getIndex()); +- SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); +- sourceBuilder.aggregation(buildAggregation(position, pageSize)); +- sourceBuilder.size(0); +- sourceBuilder.query(queryBuilder); +- searchRequest.source(sourceBuilder); +- return searchRequest; +- +- } +- + public AggregationBuilder buildAggregation(Map position, int pageSize) { + cachedCompositeAggregation.aggregateAfter(position); + cachedCompositeAggregation.size(pageSize); +@@ -124,7 +109,7 @@ public class Pivot { + } + + private void runTestQuery(Client client, SourceConfig sourceConfig, final ActionListener listener) { +- SearchRequest searchRequest = buildSearchRequest(sourceConfig, null, TEST_QUERY_PAGE_SIZE); ++ SearchRequest searchRequest = sourceConfig.buildSearchRequest(null, TEST_QUERY_PAGE_SIZE, this); + + client.execute(SearchAction.INSTANCE, searchRequest, ActionListener.wrap(response -> { + if (response == null) { +diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/RestDeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/RestDeprecationInfoAction.java +old mode 100644 +new mode 100755 +index f1670694387..db90f979d0f +--- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/RestDeprecationInfoAction.java ++++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/RestDeprecationInfoAction.java +@@ -8,15 +8,11 @@ package org.elasticsearch.xpack.deprecation; + import org.apache.logging.log4j.LogManager; + import org.apache.logging.log4j.Logger; + import org.elasticsearch.client.node.NodeClient; +-import org.elasticsearch.common.Strings; + import org.elasticsearch.common.logging.DeprecationLogger; + import org.elasticsearch.common.settings.Settings; + import org.elasticsearch.rest.BaseRestHandler; + import org.elasticsearch.rest.RestController; + import org.elasticsearch.rest.RestRequest; +-import org.elasticsearch.rest.action.RestToXContentListener; +-import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction; +-import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction.Request; + + import java.io.IOException; + +@@ -42,14 +38,10 @@ public class RestDeprecationInfoAction extends BaseRestHandler { + @Override + public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + if (request.method().equals(RestRequest.Method.GET)) { +- return handleGet(request, client); ++ return client.handleGet(request, this); + } else { + throw new IllegalArgumentException("illegal method [" + request.method() + "] for request [" + request.path() + "]"); + } + } + +- private RestChannelConsumer handleGet(final RestRequest request, NodeClient client) { +- Request infoRequest = new Request(Strings.splitStringByCommaToArray(request.param("index"))); +- return channel -> client.execute(DeprecationInfoAction.INSTANCE, infoRequest, new RestToXContentListener<>(channel)); +- } + } +diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java +old mode 100644 +new mode 100755 +index 130d6deed56..0a04ffb43ac +--- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java ++++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java +@@ -16,9 +16,7 @@ import org.elasticsearch.common.settings.Settings; + import org.elasticsearch.common.unit.TimeValue; + import org.elasticsearch.common.xcontent.XContentParser; + import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; +-import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest.TermBoost; + import org.elasticsearch.protocol.xpack.graph.Hop; +-import org.elasticsearch.protocol.xpack.graph.VertexRequest; + import org.elasticsearch.rest.RestController; + import org.elasticsearch.rest.RestRequest; + import org.elasticsearch.rest.action.RestToXContentListener; +@@ -132,7 +130,7 @@ public class RestGraphAction extends XPackRestHandler { + + if (token == XContentParser.Token.START_ARRAY) { + if (VERTICES_FIELD.match(fieldName, parser.getDeprecationHandler())) { +- parseVertices(parser, currentHop); ++ currentHop.parseVertices(parser, this); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (QUERY_FIELD.match(fieldName, parser.getDeprecationHandler())) { +@@ -155,139 +153,6 @@ public class RestGraphAction extends XPackRestHandler { + } + } + +- private void parseVertices(XContentParser parser, Hop currentHop) +- throws IOException { +- XContentParser.Token token; +- +- String fieldName = null; +- +- while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { +- if (token == XContentParser.Token.START_OBJECT) { +- String field = null; +- Map includes = null; +- HashSet excludes = null; +- int size = 10; +- int minDocCount = 3; +- int shardMinDocCount = 2; +- while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { +- if (token == XContentParser.Token.FIELD_NAME) { +- fieldName = parser.currentName(); +- token = parser.nextToken(); +- } +- if (token == XContentParser.Token.START_ARRAY) { +- if (INCLUDE_FIELD.match(fieldName, parser.getDeprecationHandler())) { +- if (excludes != null) { +- throw new ElasticsearchParseException( +- "Graph vertices definition cannot contain both "+INCLUDE_FIELD.getPreferredName()+" and " +- +EXCLUDE_FIELD.getPreferredName()+" clauses", token.name()); +- } +- includes = new HashMap<>(); +- while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { +- if (token == XContentParser.Token.START_OBJECT) { +- String includeTerm = null; +- float boost = 1f; +- while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { +- if (token == XContentParser.Token.FIELD_NAME) { +- fieldName = parser.currentName(); +- } else { +- if (token == XContentParser.Token.VALUE_STRING) { +- if (TERM_FIELD.match(fieldName, parser.getDeprecationHandler())) { +- includeTerm = parser.text(); +- } else { +- throw new ElasticsearchParseException( +- "Graph vertices definition " + INCLUDE_FIELD.getPreferredName() + +- " clause has invalid property:" + fieldName); +- } +- } else if (token == XContentParser.Token.VALUE_NUMBER) { +- if (BOOST_FIELD.match(fieldName, parser.getDeprecationHandler())) { +- boost = parser.floatValue(); +- } else { +- throw new ElasticsearchParseException( +- "Graph vertices definition " + INCLUDE_FIELD.getPreferredName() + +- " clause has invalid property:" + fieldName); +- } +- } else { +- throw new ElasticsearchParseException( +- "Graph vertices definition " + INCLUDE_FIELD.getPreferredName() + +- " clause has invalid property type:"+ token.name()); +- +- } +- } +- } +- if (includeTerm == null) { +- throw new ElasticsearchParseException( +- "Graph vertices definition " + INCLUDE_FIELD.getPreferredName() + +- " clause has missing object property for term"); +- } +- includes.put(includeTerm, new TermBoost(includeTerm, boost)); +- } else if (token == XContentParser.Token.VALUE_STRING) { +- String term = parser.text(); +- includes.put(term, new TermBoost(term, 1f)); +- } else { +- throw new ElasticsearchParseException( +- "Graph vertices definition " + INCLUDE_FIELD.getPreferredName() + +- " clauses must be string terms or Objects with terms and boosts, not" +- + token.name()); +- } +- } +- } else if (EXCLUDE_FIELD.match(fieldName, parser.getDeprecationHandler())) { +- if (includes != null) { +- throw new ElasticsearchParseException( +- "Graph vertices definition cannot contain both "+ INCLUDE_FIELD.getPreferredName()+ +- " and "+EXCLUDE_FIELD.getPreferredName()+" clauses", token.name()); +- } +- excludes = new HashSet<>(); +- while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { +- excludes.add(parser.text()); +- } +- } else { +- throw new ElasticsearchParseException("Illegal property in graph vertices definition " + fieldName, +- token.name()); +- } +- } +- if (token == XContentParser.Token.VALUE_STRING) { +- if (FIELD_NAME_FIELD.match(fieldName, parser.getDeprecationHandler())) { +- field = parser.text(); +- } else { +- throw new ElasticsearchParseException("Unknown string property: [" + fieldName + "]"); +- } +- } +- if (token == XContentParser.Token.VALUE_NUMBER) { +- if (SIZE_FIELD.match(fieldName, parser.getDeprecationHandler())) { +- size = parser.intValue(); +- } else if (MIN_DOC_COUNT_FIELD.match(fieldName, parser.getDeprecationHandler())) { +- minDocCount = parser.intValue(); +- } else if (SHARD_MIN_DOC_COUNT_FIELD.match(fieldName, parser.getDeprecationHandler())) { +- shardMinDocCount = parser.intValue(); +- } else { +- throw new ElasticsearchParseException("Unknown numeric property: [" + fieldName + "]"); +- } +- } +- } +- if (field == null) { +- throw new ElasticsearchParseException("Missing field name in graph vertices definition", token.name()); +- } +- VertexRequest vr = currentHop.addVertexRequest(field); +- if (includes != null) { +- for (TermBoost tb : includes.values()) { +- vr.addInclude(tb.getTerm(), tb.getBoost()); +- } +- } +- if (excludes != null) { +- for (String term : excludes) { +- vr.addExclude(term); +- } +- } +- vr.size(size); +- vr.minDocCount(minDocCount); +- vr.shardMinDocCount(shardMinDocCount); +- +- } +- +- } +- +- } +- + + private void parseControls(XContentParser parser, GraphExploreRequest graphRequest) throws IOException { + XContentParser.Token token; +diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +old mode 100644 +new mode 100755 +index 3f8321fa4b1..1c14f8331eb +--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java ++++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +@@ -61,7 +61,6 @@ import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; + import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; + import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState; + import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; +-import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; + import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + import org.elasticsearch.xpack.ml.MlConfigMigrationEligibilityCheck; + import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider; +@@ -304,7 +303,7 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction deleteStateHandler = ActionListener.wrap( +- bulkResponse -> deleteQuantiles(parentTaskClient, jobId, deleteQuantilesHandler), ++ bulkResponse -> parentTaskClient.deleteQuantiles(jobId, deleteQuantilesHandler, this), + failureHandler); + + // Step 1. Delete the model state + deleteModelState(parentTaskClient, jobId, deleteStateHandler); + } + +- private void deleteQuantiles(ParentTaskAssigningClient parentTaskClient, String jobId, ActionListener finishedHandler) { +- // The quantiles type and doc ID changed in v5.5 so delete both the old and new format +- DeleteByQueryRequest request = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexPattern()); +- // Just use ID here, not type, as trying to delete different types spams the logs with an exception stack trace +- IdsQueryBuilder query = new IdsQueryBuilder().addIds(Quantiles.documentId(jobId)); +- request.setQuery(query); +- request.setIndicesOptions(MlIndicesUtils.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen())); +- request.setAbortOnVersionConflict(false); +- request.setRefresh(true); +- +- executeAsyncWithOrigin(parentTaskClient, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, ActionListener.wrap( +- response -> finishedHandler.onResponse(true), +- e -> { +- // It's not a problem for us if the index wasn't found - it's equivalent to document not found +- if (e instanceof IndexNotFoundException) { +- finishedHandler.onResponse(true); +- } else { +- finishedHandler.onFailure(e); +- } +- })); +- } +- + private void deleteModelState(ParentTaskAssigningClient parentTaskClient, String jobId, ActionListener listener) { + GetModelSnapshotsAction.Request request = new GetModelSnapshotsAction.Request(jobId, null); + request.setPageParams(new PageParams(0, MAX_SNAPSHOTS_TO_DELETE)); +@@ -447,7 +424,7 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction fieldStats, TimeoutChecker timeoutChecker) { + Collection values = new ArrayList<>(); + for (String snippet : snippets) { +- Map captures = timeoutChecker.grokCaptures(grok, snippet, "full message Grok pattern field extraction"); ++ Map captures = grok.grokCaptures(snippet, "full message Grok pattern field extraction", timeoutChecker); + // If the pattern doesn't match then captures will be null + if (captures == null) { + throw new IllegalStateException("[%{" + grokPatternName + "}] does not match snippet [" + snippet + "]"); +@@ -687,8 +687,8 @@ public final class GrokPatternCreator { + Map> valuesPerField = new HashMap<>(); + + for (String sampleMessage : sampleMessages) { +- Map captures = timeoutChecker.grokCaptures(grok, sampleMessage, +- "full message Grok pattern field extraction"); ++ Map captures = grok.grokCaptures(sampleMessage, ++ "full message Grok pattern field extraction", timeoutChecker); + // If the pattern doesn't match then captures will be null + if (captures == null) { + throw new IllegalStateException("[" + grokPattern + "] does not match snippet [" + sampleMessage + "]"); +diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutChecker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutChecker.java +old mode 100644 +new mode 100755 +index f8a9368b842..e01bff62979 +--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutChecker.java ++++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutChecker.java +@@ -84,26 +84,6 @@ public class TimeoutChecker implements Closeable { + } + } + +- /** +- * Wrapper around {@link Grok#captures} that translates any timeout exception +- * to the style thrown by this class's {@link #check} method. +- * @param grok The grok pattern from which captures are to be extracted. +- * @param text The text to match and extract values from. +- * @param where Which stage of the operation is currently in progress? +- * @return A map containing field names and their respective coerced values that matched. +- * @throws ElasticsearchTimeoutException If the operation is found to have taken longer than the permitted time. +- */ +- public Map grokCaptures(Grok grok, String text, String where) { +- +- try { +- return grok.captures(text); +- } finally { +- // If a timeout has occurred then this check will overwrite any timeout exception thrown by Grok.captures() and this +- // is intentional - the exception from this class makes more sense in the context of the find file structure API +- check(where); +- } +- } +- + private void setTimeoutExceeded() { + timeoutExceeded = true; + timeoutCheckerWatchdog.interruptLongRunningThreadIfRegistered(checkedThread); +diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java +old mode 100644 +new mode 100755 +index 0283437d648..e29d2e15c3f +--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java ++++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java +@@ -386,8 +386,8 @@ public final class TimestampFormatFinder { + private static TimestampMatch checkCandidate(CandidateTimestampFormat candidate, String text, @Nullable BitSet numberPosBitSet, + boolean requireFullMatch, TimeoutChecker timeoutChecker) { + if (requireFullMatch) { +- Map captures = timeoutChecker.grokCaptures(candidate.strictFullMatchGrok, text, +- "timestamp format determination"); ++ Map captures = candidate.strictFullMatchGrok.grokCaptures(text, ++ "timestamp format determination", timeoutChecker); + if (captures != null) { + return new TimestampMatch(candidate, "", text, ""); + } +@@ -399,8 +399,8 @@ public final class TimestampFormatFinder { + if (boundsForCandidate.v1() >= 0) { + assert boundsForCandidate.v2() > boundsForCandidate.v1(); + String matchIn = text.substring(boundsForCandidate.v1(), Math.min(boundsForCandidate.v2(), text.length())); +- Map captures = timeoutChecker.grokCaptures(candidate.strictSearchGrok, matchIn, +- "timestamp format determination"); ++ Map captures = candidate.strictSearchGrok.grokCaptures(matchIn, ++ "timestamp format determination", timeoutChecker); + if (captures != null) { + StringBuilder prefaceBuilder = new StringBuilder(); + if (boundsForCandidate.v1() > 0) { +diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java +old mode 100644 +new mode 100755 +index 585b4d1f87d..2cd634b4982 +--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java ++++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java +@@ -248,7 +248,7 @@ public class JobConfigProvider { + Job updatedJob; + try { + // Applying the update may result in a validation error +- updatedJob = update.mergeWithJob(jobBuilder.build(), maxModelMemoryLimit); ++ updatedJob = jobBuilder.build().mergeWithJob(maxModelMemoryLimit, update); + } catch (Exception e) { + updatedJobListener.onFailure(e); + return; +@@ -314,7 +314,7 @@ public class JobConfigProvider { + Job updatedJob; + try { + // Applying the update may result in a validation error +- updatedJob = update.mergeWithJob(originalJob, maxModelMemoryLimit); ++ updatedJob = originalJob.mergeWithJob(maxModelMemoryLimit, update); + } catch (Exception e) { + updatedJobListener.onFailure(e); + return; +diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataCountsPersister.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataCountsPersister.java +old mode 100644 +new mode 100755 +index a0017af4b8c..a831232b2b0 +--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataCountsPersister.java ++++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataCountsPersister.java +@@ -14,7 +14,6 @@ import org.elasticsearch.action.index.IndexAction; + import org.elasticsearch.action.index.IndexRequest; + import org.elasticsearch.action.index.IndexResponse; + import org.elasticsearch.client.Client; +-import org.elasticsearch.common.xcontent.ToXContent; + import org.elasticsearch.common.xcontent.XContentBuilder; + import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; + import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; +@@ -39,11 +38,6 @@ public class JobDataCountsPersister { + this.client = client; + } + +- private XContentBuilder serialiseCounts(DataCounts counts) throws IOException { +- XContentBuilder builder = jsonBuilder(); +- return counts.toXContent(builder, ToXContent.EMPTY_PARAMS); +- } +- + /** + * Update the job's data counts stats and figures. + * +@@ -52,7 +46,7 @@ public class JobDataCountsPersister { + * @param listener Action response listener + */ + public void persistDataCounts(String jobId, DataCounts counts, ActionListener listener) { +- try (XContentBuilder content = serialiseCounts(counts)) { ++ try (XContentBuilder content = counts.serialiseCounts(this)) { + final IndexRequest request = new IndexRequest(AnomalyDetectorsIndex.resultsWriteAlias(jobId)) + .id(DataCounts.documentId(jobId)) + .source(content); +diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java +old mode 100644 +new mode 100755 +index 9380be0c15b..3176bc4fb48 +--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java ++++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java +@@ -72,9 +72,8 @@ public class JobDataDeleter { + indices.add(AnomalyDetectorsIndex.jobResultsAliasedName(modelSnapshot.getJobId())); + } + +- DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(indices.toArray(new String[0])) +- .setRefresh(true) +- .setIndicesOptions(IndicesOptions.lenientExpandOpen()) ++ DeleteByQueryRequest deleteByQueryRequest = IndicesOptions.lenientExpandOpen().setIndicesOptions(new DeleteByQueryRequest(indices.toArray(new String[0])) ++ .setRefresh(true)) + .setQuery(new IdsQueryBuilder().addIds(idsToDelete.toArray(new String[0]))); + + try { +@@ -97,7 +96,7 @@ public class JobDataDeleter { + QueryBuilder query = QueryBuilders.boolQuery() + .filter(QueryBuilders.existsQuery(Result.RESULT_TYPE.getPreferredName())) + .filter(QueryBuilders.rangeQuery(Result.TIMESTAMP.getPreferredName()).gte(cutoffEpochMs)); +- deleteByQueryHolder.dbqRequest.setIndicesOptions(IndicesOptions.lenientExpandOpen()); ++ IndicesOptions.lenientExpandOpen().setIndicesOptions(deleteByQueryHolder.dbqRequest); + deleteByQueryHolder.dbqRequest.setQuery(query); + executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, deleteByQueryHolder.dbqRequest, + ActionListener.wrap(r -> listener.onResponse(true), listener::onFailure)); +@@ -110,7 +109,7 @@ public class JobDataDeleter { + DeleteByQueryHolder deleteByQueryHolder = new DeleteByQueryHolder(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)); + deleteByQueryHolder.dbqRequest.setRefresh(false); + +- deleteByQueryHolder.dbqRequest.setIndicesOptions(IndicesOptions.lenientExpandOpen()); ++ IndicesOptions.lenientExpandOpen().setIndicesOptions(deleteByQueryHolder.dbqRequest); + QueryBuilder qb = QueryBuilders.termQuery(Result.IS_INTERIM.getPreferredName(), true); + deleteByQueryHolder.dbqRequest.setQuery(new ConstantScoreQueryBuilder(qb)); + +diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +old mode 100644 +new mode 100755 +index e5d343bb304..161e0be2c70 +--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java ++++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +@@ -503,35 +503,22 @@ public class JobResultsProvider { + Consumer errorHandler) { + String hitId = hit.getId(); + if (DataCounts.documentId(jobId).equals(hitId)) { +- paramsBuilder.setDataCounts(parseSearchHit(hit, DataCounts.PARSER, errorHandler)); ++ paramsBuilder.setDataCounts(hit.parseSearchHit(DataCounts.PARSER, errorHandler, this)); + } else if (hitId.startsWith(ModelSizeStats.documentIdPrefix(jobId))) { +- ModelSizeStats.Builder modelSizeStats = parseSearchHit(hit, ModelSizeStats.LENIENT_PARSER, errorHandler); ++ ModelSizeStats.Builder modelSizeStats = hit.parseSearchHit(ModelSizeStats.LENIENT_PARSER, errorHandler, this); + paramsBuilder.setModelSizeStats(modelSizeStats == null ? null : modelSizeStats.build()); + } else if (hitId.startsWith(ModelSnapshot.documentIdPrefix(jobId))) { +- ModelSnapshot.Builder modelSnapshot = parseSearchHit(hit, ModelSnapshot.LENIENT_PARSER, errorHandler); ++ ModelSnapshot.Builder modelSnapshot = hit.parseSearchHit(ModelSnapshot.LENIENT_PARSER, errorHandler, this); + paramsBuilder.setModelSnapshot(modelSnapshot == null ? null : modelSnapshot.build()); + } else if (Quantiles.documentId(jobId).equals(hit.getId())) { +- paramsBuilder.setQuantiles(parseSearchHit(hit, Quantiles.LENIENT_PARSER, errorHandler)); ++ paramsBuilder.setQuantiles(hit.parseSearchHit(Quantiles.LENIENT_PARSER, errorHandler, this)); + } else if (hitId.startsWith(MlFilter.DOCUMENT_ID_PREFIX)) { +- paramsBuilder.addFilter(parseSearchHit(hit, MlFilter.LENIENT_PARSER, errorHandler).build()); ++ paramsBuilder.addFilter(hit.parseSearchHit(MlFilter.LENIENT_PARSER, errorHandler, this).build()); + } else { + errorHandler.accept(new IllegalStateException("Unexpected Id [" + hitId + "]")); + } + } + +- private T parseSearchHit(SearchHit hit, BiFunction objectParser, +- Consumer errorHandler) { +- BytesReference source = hit.getSourceRef(); +- try (InputStream stream = source.streamInput(); +- XContentParser parser = XContentFactory.xContent(XContentType.JSON) +- .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { +- return objectParser.apply(parser, null); +- } catch (IOException e) { +- errorHandler.accept(new ElasticsearchParseException("failed to parse " + hit.getId(), e)); +- return null; +- } +- } +- + /** + * Search for buckets with the parameters in the {@link BucketsQueryBuilder} + * Uses the internal client, so runs as the _xpack user +@@ -976,7 +963,7 @@ public class JobResultsProvider { + LOGGER.trace("No {} for job with id {}", resultDescription, jobId); + handler.accept(new Result<>(null, notFoundSupplier.get())); + } else if (hits.length == 1) { +- handler.accept(new Result<>(hits[0].getIndex(), parseSearchHit(hits[0], objectParser, errorHandler))); ++ handler.accept(new Result<>(hits[0].getIndex(), hits[0].parseSearchHit(objectParser, errorHandler, this))); + } else { + errorHandler.accept(new IllegalStateException("Search for unique [" + resultDescription + "] returned [" + + hits.length + "] hits even though size was 1")); +@@ -1115,7 +1102,7 @@ public class JobResultsProvider { + List events = new ArrayList<>(); + SearchHit[] hits = response.getHits().getHits(); + for (SearchHit hit : hits) { +- ScheduledEvent.Builder event = parseSearchHit(hit, ScheduledEvent.LENIENT_PARSER, handler::onFailure); ++ ScheduledEvent.Builder event = hit.parseSearchHit(ScheduledEvent.LENIENT_PARSER, handler::onFailure, this); + event.eventId(hit.getId()); + events.add(event.build()); + } +@@ -1243,7 +1230,7 @@ public class JobResultsProvider { + List calendars = new ArrayList<>(); + SearchHit[] hits = response.getHits().getHits(); + for (SearchHit hit : hits) { +- calendars.add(parseSearchHit(hit, Calendar.LENIENT_PARSER, listener::onFailure).build()); ++ calendars.add(hit.parseSearchHit(Calendar.LENIENT_PARSER, listener::onFailure, this).build()); + } + + listener.onResponse(new QueryPage<>(calendars, response.getHits().getTotalHits().value, +diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/UnusedStateRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/UnusedStateRemover.java +old mode 100644 +new mode 100755 +index 4d2c9b76438..72fa504d625 +--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/UnusedStateRemover.java ++++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/UnusedStateRemover.java +@@ -103,8 +103,7 @@ public class UnusedStateRemover implements MlDataRemover { + private void executeDeleteUnusedStateDocs(List unusedDocIds, ActionListener listener) { + LOGGER.info("Found [{}] unused state documents; attempting to delete", + unusedDocIds.size()); +- DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexPattern()) +- .setIndicesOptions(IndicesOptions.lenientExpandOpen()) ++ DeleteByQueryRequest deleteByQueryRequest = IndicesOptions.lenientExpandOpen().setIndicesOptions(new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexPattern())) + .setQuery(QueryBuilders.idsQuery().addIds(unusedDocIds.toArray(new String[0]))); + client.execute(DeleteByQueryAction.INSTANCE, deleteByQueryRequest, ActionListener.wrap( + response -> { +diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java +old mode 100644 +new mode 100755 +index 2770656279c..1281dc3f825 +--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java ++++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java +@@ -88,7 +88,7 @@ public class TimeoutCheckerTests extends FileStructureTestCase { + + assertBusy(() -> { + ElasticsearchTimeoutException e = expectThrows(ElasticsearchTimeoutException.class, +- () -> timeoutChecker.grokCaptures(grok, randomAlphaOfLength(1000000), "should timeout")); ++ () -> grok.grokCaptures(randomAlphaOfLength(1000000), "should timeout", timeoutChecker)); + assertEquals("Aborting grok captures test during [should timeout] as it has taken longer than the timeout of [" + + timeout + "]", e.getMessage()); + }); +diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java +old mode 100644 +new mode 100755 +index b1879eb07f1..044ac5486b8 +--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java ++++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java +@@ -546,7 +546,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase { + private static Job.Builder addCustomRule(Job.Builder job, DetectionRule rule) { + JobUpdate.Builder update1 = new JobUpdate.Builder(job.getId()); + update1.setDetectorUpdates(Collections.singletonList(new JobUpdate.DetectorUpdate(0, null, Collections.singletonList(rule)))); +- Job updatedJob = update1.build().mergeWithJob(job.build(new Date()), null); ++ Job updatedJob = job.build(new Date()).mergeWithJob(null, update1.build()); + return new Job.Builder(updatedJob); + } + +diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ExportBulk.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ExportBulk.java +old mode 100644 +new mode 100755 +index 938f0f57c26..81444f74a8e +--- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ExportBulk.java ++++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ExportBulk.java +@@ -68,6 +68,30 @@ public abstract class ExportBulk { + + protected abstract void doFlush(ActionListener listener); + ++ /** ++ * Add {@code docs} and send the {@code bulk}, then respond to the {@code listener}. ++ * @param docs The monitoring documents to send. ++ * @param listener Returns {@code null} when complete, or failure where relevant. ++ * @param exporters ++ */ ++ public void doExport(final Collection docs, final ActionListener listener, Exporters exporters) { ++ final AtomicReference exceptionRef = new AtomicReference<>(); ++ ++ try { ++ add(docs); ++ } catch (ExportException e) { ++ exceptionRef.set(e); ++ } finally { ++ flush(ActionListener.wrap(r -> { ++ if (exceptionRef.get() == null) { ++ listener.onResponse(null); ++ } else { ++ listener.onFailure(exceptionRef.get()); ++ } ++ }, listener::onFailure)); ++ } ++ } ++ + /** + * This class holds multiple export bulks exposed as a single compound bulk. + */ +diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporters.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporters.java +old mode 100644 +new mode 100755 +index 1b8f5dab9e3..aad36255f4f +--- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporters.java ++++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporters.java +@@ -208,7 +208,7 @@ public class Exporters extends AbstractLifecycleComponent { + } else if (docs != null && docs.size() > 0) { + wrapExportBulk(ActionListener.wrap(bulk -> { + if (bulk != null) { +- doExport(bulk, docs, listener); ++ bulk.doExport(docs, listener, this); + } else { + listener.onResponse(null); + } +@@ -218,31 +218,6 @@ public class Exporters extends AbstractLifecycleComponent { + } + } + +- /** +- * Add {@code docs} and send the {@code bulk}, then respond to the {@code listener}. +- * +- * @param bulk The bulk object to send {@code docs} through. +- * @param docs The monitoring documents to send. +- * @param listener Returns {@code null} when complete, or failure where relevant. +- */ +- private void doExport(final ExportBulk bulk, final Collection docs, final ActionListener listener) { +- final AtomicReference exceptionRef = new AtomicReference<>(); +- +- try { +- bulk.add(docs); +- } catch (ExportException e) { +- exceptionRef.set(e); +- } finally { +- bulk.flush(ActionListener.wrap(r -> { +- if (exceptionRef.get() == null) { +- listener.onResponse(null); +- } else { +- listener.onFailure(exceptionRef.get()); +- } +- }, listener::onFailure)); +- } +- } +- + /** + * Return all the settings of all the exporters, no matter if HTTP or Local + */ +diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java +old mode 100644 +new mode 100755 +index 4b73a716045..961127d84bb +--- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java ++++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java +@@ -20,7 +20,6 @@ import org.elasticsearch.common.Nullable; + import org.elasticsearch.common.unit.TimeValue; + import org.elasticsearch.common.util.set.Sets; + import org.elasticsearch.common.xcontent.XContent; +-import org.elasticsearch.common.xcontent.XContentHelper; + import org.elasticsearch.rest.RestStatus; + + import java.io.IOException; +@@ -181,7 +180,7 @@ public abstract class PublishableHttpResource extends HttpResource { + final XContent xContent, + final int minimumVersion) { + final CheckedFunction responseChecker = +- (response) -> shouldReplaceResource(response, xContent, resourceName, minimumVersion); ++ (response) -> response.shouldReplaceResource(xContent, resourceName, minimumVersion, this); + + checkForResource(client, listener, logger, + resourceBasePath, resourceName, resourceType, resourceOwnerName, resourceOwnerType, +@@ -416,47 +415,6 @@ public abstract class PublishableHttpResource extends HttpResource { + }); + } + +- /** +- * Determine if the current resource should replaced the checked one based on its version (or lack thereof). +- *

+- * This expects a response like (where {@code resourceName} is replaced with its value): +- *


+-     * {
+-     *   "resourceName": {
+-     *     "version": 6000002
+-     *   }
+-     * }
+-     * 
+- * +- * @param response The filtered response from the _template/{name} or _ingest/pipeline/{name} resource APIs +- * @param xContent The XContent parser to use +- * @param resourceName The name of the looked up resource, which is expected to be the top-level key +- * @param minimumVersion The minimum version allowed without being replaced (expected to be the last updated version). +- * @return {@code true} represents that it should be replaced. {@code false} that it should be left alone. +- * @throws IOException if any issue occurs while parsing the {@code xContent} {@code response}. +- * @throws RuntimeException if the response format is changed. +- */ +- protected boolean shouldReplaceResource(final Response response, final XContent xContent, +- final String resourceName, final int minimumVersion) +- throws IOException { +- // no named content used; so EMPTY is fine +- final Map resources = XContentHelper.convertToMap(xContent, response.getEntity().getContent(), false); +- +- // if it's empty, then there's no version in the response thanks to filter_path +- if (resources.isEmpty() == false) { +- @SuppressWarnings("unchecked") +- final Map resource = (Map) resources.get(resourceName); +- final Object version = resource != null ? resource.get("version") : null; +- +- // the version in the template is expected to include the alpha/beta/rc codes as well +- if (version instanceof Number) { +- return ((Number) version).intValue() < minimumVersion; +- } +- } +- +- return true; +- } +- + /** + * A useful placeholder for {@link CheckedFunction}s that want to always return {@code true}. + * +diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestTests.java +old mode 100644 +new mode 100755 +index 5113371f2b3..01041fa1ba5 +--- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestTests.java ++++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestTests.java +@@ -123,7 +123,7 @@ public class MonitoringBulkRequestTests extends ESTestCase { + final long interval = randomNonNegativeLong(); + + final MonitoringBulkRequest bulkRequest = new MonitoringBulkRequest(); +- bulkRequest.add(system, content.bytes(), xContentType, timestamp, interval); ++ system.add(content.bytes(), xContentType, timestamp, interval, bulkRequest); + + final Collection bulkDocs = bulkRequest.getDocs(); + assertNotNull(bulkDocs); +@@ -183,7 +183,7 @@ public class MonitoringBulkRequestTests extends ESTestCase { + + final MonitoringBulkRequest bulkRequest = new MonitoringBulkRequest(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> +- bulkRequest.add(randomFrom(MonitoredSystem.values()), content.bytes(), xContentType, 0L, 0L) ++ randomFrom(MonitoredSystem.values()).add(content.bytes(), xContentType, 0L, 0L, bulkRequest) + ); + + assertThat(e.getMessage(), containsString("source is missing for monitoring document [][doc][" + nbDocs + "]")); +@@ -220,7 +220,7 @@ public class MonitoringBulkRequestTests extends ESTestCase { + + final MonitoringBulkRequest bulkRequest = new MonitoringBulkRequest(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> +- bulkRequest.add(randomFrom(MonitoredSystem.values()), content.bytes(), xContentType, 0L, 0L) ++ randomFrom(MonitoredSystem.values()).add(content.bytes(), xContentType, 0L, 0L, bulkRequest) + ); + + assertThat(e.getMessage(), containsString("unrecognized index name [" + indexName + "]")); +diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java +old mode 100644 +new mode 100755 +index dc4e2410f29..4c42339fa34 +--- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java ++++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java +@@ -260,7 +260,7 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc + when(response.getEntity()).thenReturn(entity); + when(entity.getContent()).thenThrow(new IOException("TEST - expected")); + +- expectThrows(IOException.class, () -> resource.shouldReplaceResource(response, xContent, resourceName, randomInt())); ++ expectThrows(IOException.class, () -> response.shouldReplaceResource(xContent, resourceName, randomInt(), resource)); + } + + public void testShouldReplaceResourceThrowsExceptionForMalformedResponse() { +@@ -270,7 +270,7 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc + + when(response.getEntity()).thenReturn(entity); + +- expectThrows(RuntimeException.class, () -> resource.shouldReplaceResource(response, xContent, resourceName, randomInt())); ++ expectThrows(RuntimeException.class, () -> response.shouldReplaceResource(xContent, resourceName, randomInt(), resource)); + } + + public void testShouldReplaceResourceReturnsTrueVersionIsNotExpected() throws IOException { +@@ -281,7 +281,7 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc + + when(response.getEntity()).thenReturn(entity); + +- assertThat(resource.shouldReplaceResource(response, xContent, resourceName, minimumVersion), is(true)); ++ assertThat(response.shouldReplaceResource(xContent, resourceName, minimumVersion, resource), is(true)); + } + + public void testShouldReplaceResourceChecksVersion() throws IOException { +@@ -297,7 +297,7 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc + + when(response.getEntity()).thenReturn(entity); + +- assertThat(resource.shouldReplaceResource(response, xContent, resourceName, minimumVersion), is(shouldReplace)); ++ assertThat(response.shouldReplaceResource(xContent, resourceName, minimumVersion, resource), is(shouldReplace)); + } + + @SuppressLoggerChecks(reason = "mock logger used") +diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +old mode 100644 +new mode 100755 +index 2a1308353d6..bc45e6ae887 +--- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java ++++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +@@ -135,7 +135,7 @@ public class TransportRollupSearchAction extends TransportAction listener) { +- try { +- final OpenIdConnectPrepareAuthenticationResponse authenticationResponse = +- realm.buildAuthenticationRequestUri(state, nonce, loginHint); +- listener.onResponse(authenticationResponse); +- } catch (ElasticsearchException e) { +- listener.onFailure(e); +- } +- } + } +diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java +old mode 100644 +new mode 100755 +index b4ee8b677c1..0db7da6d5ad +--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java ++++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java +@@ -19,7 +19,6 @@ import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheRespons + import org.elasticsearch.xpack.core.security.authc.Realm; + import org.elasticsearch.xpack.security.authc.AuthenticationService; + import org.elasticsearch.xpack.security.authc.Realms; +-import org.elasticsearch.xpack.security.authc.support.CachingRealm; + + import java.util.List; + +@@ -59,7 +58,7 @@ public class TransportClearRealmCacheAction extends TransportNodesAction listener.onResponse( +- new SamlInvalidateSessionResponse(realm.name(), count, buildLogoutResponseUrl(realm, result)) ++ new SamlInvalidateSessionResponse(realm.name(), count, realm.buildLogoutResponseUrl(result, this)) + ), listener::onFailure)); + } catch (ElasticsearchSecurityException e) { + logger.info("Failed to invalidate SAML session", e); +@@ -77,11 +76,6 @@ public final class TransportSamlInvalidateSessionAction + } + } + +- private String buildLogoutResponseUrl(SamlRealm realm, SamlLogoutRequestHandler.Result result) { +- final LogoutResponse response = realm.buildLogoutResponse(result.getRequestId()); +- return new SamlRedirect(response, realm.getSigningConfiguration()).getRedirectUrl(result.getRelayState()); +- } +- + private void findAndInvalidateTokens(SamlRealm realm, SamlLogoutRequestHandler.Result result, ActionListener listener) { + final Map tokenMetadata = realm.createTokenMetadata(result.getNameId(), result.getSession()); + if (Strings.isNullOrEmpty((String) tokenMetadata.get(SamlRealm.TOKEN_METADATA_NAMEID_VALUE))) { +diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +old mode 100644 +new mode 100755 +index 74d4cce8d02..a0fad256d3a +--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java ++++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +@@ -233,7 +233,7 @@ public class ApiKeyService { + private void createApiKeyAndIndexIt(Authentication authentication, CreateApiKeyRequest request, Set roleDescriptorSet, + ActionListener listener) { + final Instant created = clock.instant(); +- final Instant expiration = getApiKeyExpiration(created, request); ++ final Instant expiration = request.getApiKeyExpiration(created, this); + final SecureString apiKey = UUIDs.randomBase64UUIDSecureString(); + final Version version = clusterService.state().nodes().getMinNodeVersion(); + +@@ -571,14 +571,6 @@ public class ApiKeyService { + } + } + +- private Instant getApiKeyExpiration(Instant now, CreateApiKeyRequest request) { +- if (request.getExpiration() != null) { +- return now.plusSeconds(request.getExpiration().getSeconds()); +- } else { +- return null; +- } +- } +- + private boolean isEnabled() { + return enabled && licenseState.isApiKeyServiceAllowed(); + } +diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java +old mode 100644 +new mode 100755 +index ac933dcfef8..62ea14eff7d +--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java ++++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java +@@ -19,6 +19,7 @@ import com.nimbusds.openid.connect.sdk.AuthenticationRequest; + import com.nimbusds.openid.connect.sdk.LogoutRequest; + import com.nimbusds.openid.connect.sdk.Nonce; + import org.apache.logging.log4j.Logger; ++import org.elasticsearch.ElasticsearchException; + import org.elasticsearch.ElasticsearchSecurityException; + + import org.elasticsearch.action.ActionListener; +@@ -42,6 +43,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmSettings; + import org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings; + import org.elasticsearch.xpack.core.security.user.User; + import org.elasticsearch.xpack.core.ssl.SSLService; ++import org.elasticsearch.xpack.security.action.oidc.TransportOpenIdConnectPrepareAuthenticationAction; + import org.elasticsearch.xpack.security.authc.TokenService; + import org.elasticsearch.xpack.security.authc.support.DelegatedAuthorizationSupport; + import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; +@@ -375,6 +377,17 @@ public class OpenIdConnectRealm extends Realm implements Releasable { + openIdConnectAuthenticator.close(); + } + ++ public void prepareAuthenticationResponse(String state, String nonce, String loginHint, ++ ActionListener listener, TransportOpenIdConnectPrepareAuthenticationAction transportOpenIdConnectPrepareAuthenticationAction) { ++ try { ++ final OpenIdConnectPrepareAuthenticationResponse authenticationResponse = ++ buildAuthenticationRequestUri(state, nonce, loginHint); ++ listener.onResponse(authenticationResponse); ++ } catch (ElasticsearchException e) { ++ listener.onFailure(e); ++ } ++ } ++ + static final class ClaimParser { + private final String name; + private final Function> parser; +diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java +old mode 100644 +new mode 100755 +index be45ff76ec4..e71de10c497 +--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java ++++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRealm.java +@@ -49,6 +49,7 @@ import org.elasticsearch.xpack.core.ssl.SSLConfiguration; + import org.elasticsearch.xpack.core.ssl.CertParsingUtils; + import org.elasticsearch.xpack.core.ssl.SSLService; + import org.elasticsearch.xpack.core.ssl.X509KeyPairSettings; ++import org.elasticsearch.xpack.security.action.saml.TransportSamlInvalidateSessionAction; + import org.elasticsearch.xpack.security.authc.Realms; + import org.elasticsearch.xpack.security.authc.TokenService; + import org.elasticsearch.xpack.security.authc.support.DelegatedAuthorizationSupport; +@@ -552,6 +553,11 @@ public final class SamlRealm extends Realm implements Releasable { + }); + } + ++ public String buildLogoutResponseUrl(SamlLogoutRequestHandler.Result result, TransportSamlInvalidateSessionAction transportSamlInvalidateSessionAction) { ++ final LogoutResponse response = buildLogoutResponse(result.getRequestId()); ++ return new SamlRedirect(response, getSigningConfiguration()).getRedirectUrl(result.getRelayState()); ++ } ++ + private static final class PrivilegedHTTPMetadataResolver extends HTTPMetadataResolver { + + PrivilegedHTTPMetadataResolver(final HttpClient client, final String metadataURL) throws ResolverException { +diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +old mode 100644 +new mode 100755 +index afbe7a1231b..23f83520355 +--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java ++++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +@@ -59,8 +59,6 @@ import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; + import org.elasticsearch.xpack.core.security.user.AnonymousUser; + import org.elasticsearch.xpack.core.security.user.SystemUser; + import org.elasticsearch.xpack.core.security.user.User; +-import org.elasticsearch.xpack.core.security.user.XPackSecurityUser; +-import org.elasticsearch.xpack.core.security.user.XPackUser; + import org.elasticsearch.xpack.security.audit.AuditLevel; + import org.elasticsearch.xpack.security.audit.AuditTrailService; + import org.elasticsearch.xpack.security.audit.AuditUtil; +@@ -164,7 +162,7 @@ public class AuthorizationService { + if (auditId == null) { + // We would like to assert that there is an existing request-id, but if this is a system action, then that might not be + // true because the request-id is generated during authentication +- if (isInternalUser(authentication.getUser()) != false) { ++ if (authentication.getUser().isInternalUser(this) != false) { + auditId = AuditUtil.getOrGenerateRequestId(threadContext); + } else { + auditTrail.tamperedRequest(null, authentication.getUser(), action, originalRequest); +@@ -356,7 +354,7 @@ public class AuthorizationService { + + private AuthorizationEngine getAuthorizationEngineForUser(final User user) { + if (rbacEngine != authorizationEngine && licenseState.isAuthorizationEngineAllowed()) { +- if (ClientReservedRealm.isReserved(user.principal(), settings) || isInternalUser(user)) { ++ if (ClientReservedRealm.isReserved(user.principal(), settings) || user.isInternalUser(this)) { + return rbacEngine; + } else { + return authorizationEngine; +@@ -405,10 +403,6 @@ public class AuthorizationService { + return request; + } + +- private boolean isInternalUser(User user) { +- return SystemUser.is(user) || XPackUser.is(user) || XPackSecurityUser.is(user); +- } +- + private void authorizeRunAs(final RequestInfo requestInfo, final AuthorizationInfo authzInfo, + final ActionListener listener) { + final Authentication authentication = requestInfo.getAuthentication(); +diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +old mode 100644 +new mode 100755 +index b62cb44ac02..1554e794266 +--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java ++++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +@@ -345,8 +345,7 @@ public class SecurityIndexManager implements ClusterStateListener { + logger.info("security index does not exist. Creating [{}] with alias [{}]", indexState.concreteIndexName, this.aliasName); + final byte[] mappingSource = mappingSourceSupplier.get(); + final Tuple mappingAndSettings = parseMappingAndSettingsFromTemplateBytes(mappingSource); +- CreateIndexRequest request = new CreateIndexRequest(indexState.concreteIndexName) +- .alias(new Alias(this.aliasName)) ++ CreateIndexRequest request = new Alias(this.aliasName).alias(new CreateIndexRequest(indexState.concreteIndexName)) + .mapping(MapperService.SINGLE_MAPPING_NAME, mappingAndSettings.v1(), XContentType.JSON) + .waitForActiveShards(ActiveShardCount.ALL) + .settings(mappingAndSettings.v2()); +diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java +old mode 100644 +new mode 100755 +index fa0766d08ff..48eae1a4a9d +--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java ++++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java +@@ -170,7 +170,7 @@ public class SSLDriver implements AutoCloseable { + + private SSLEngineResult unwrap(InboundChannelBuffer networkBuffer, InboundChannelBuffer applicationBuffer) throws SSLException { + while (true) { +- ensureApplicationBufferSize(applicationBuffer); ++ applicationBuffer.ensureApplicationBufferSize(this); + ByteBuffer networkReadBuffer = networkReadPage.byteBuffer(); + networkReadBuffer.clear(); + ByteBufferUtils.copyBytes(networkBuffer.sliceBuffersTo(Math.min(networkBuffer.getIndex(), packetSize)), networkReadBuffer); +@@ -195,7 +195,7 @@ public class SSLDriver implements AutoCloseable { + case BUFFER_OVERFLOW: + // There is not enough space in the application buffer for the decrypted message. Expand + // the application buffer to ensure that it has enough space. +- ensureApplicationBufferSize(applicationBuffer); ++ applicationBuffer.ensureApplicationBufferSize(this); + break; + case CLOSED: + assert engine.isInboundDone() : "We received close_notify so read should be done"; +@@ -249,13 +249,6 @@ public class SSLDriver implements AutoCloseable { + } + } + +- private void ensureApplicationBufferSize(InboundChannelBuffer applicationBuffer) { +- int applicationBufferSize = engine.getSession().getApplicationBufferSize(); +- if (applicationBuffer.getRemaining() < applicationBufferSize) { +- applicationBuffer.ensureCapacity(applicationBuffer.getIndex() + engine.getSession().getApplicationBufferSize()); +- } +- } +- + // There are two potential modes for the driver to be in - REGULAR or CLOSE. REGULAR is the initial mode. + // During this mode the initial data that is read and written will be related to the TLS handshake + // process. Application related data cannot be encrypted until the handshake is complete. Once the +diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +old mode 100644 +new mode 100755 +index b370c8e2b6b..664c0eea1c3 +--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java ++++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +@@ -620,7 +620,7 @@ public class AuthorizationServiceTests extends ESTestCase { + + public void testCreateIndexWithAliasWithoutPermissions() throws IOException { + CreateIndexRequest request = new CreateIndexRequest("a"); +- request.alias(new Alias("a2")); ++ new Alias("a2").alias(request); + ClusterState state = mockEmptyMetaData(); + RoleDescriptor role = new RoleDescriptor("a_all", null, + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); +@@ -642,7 +642,7 @@ public class AuthorizationServiceTests extends ESTestCase { + + public void testCreateIndexWithAlias() throws IOException { + CreateIndexRequest request = new CreateIndexRequest("a"); +- request.alias(new Alias("a2")); ++ new Alias("a2").alias(request); + ClusterState state = mockEmptyMetaData(); + RoleDescriptor role = new RoleDescriptor("a_all", null, + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a", "a2").privileges("all").build()}, null); +@@ -1242,7 +1242,7 @@ public class AuthorizationServiceTests extends ESTestCase { + case 0: + return Tuple.tuple(MultiGetAction.NAME, new MultiGetRequest().add("index", "type", "id")); + case 1: +- return Tuple.tuple(MultiSearchAction.NAME, new MultiSearchRequest().add(new SearchRequest())); ++ return Tuple.tuple(MultiSearchAction.NAME, new SearchRequest().add(new MultiSearchRequest())); + case 2: + return Tuple.tuple(MultiTermVectorsAction.NAME, new MultiTermVectorsRequest().add("index", "type", "id")); + case 3: +diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java +old mode 100644 +new mode 100755 +index 9a1d26e6357..70e463d5c2b +--- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java ++++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java +@@ -21,10 +21,8 @@ import org.elasticsearch.xpack.sql.cli.command.FetchSizeCliCommand; + import org.elasticsearch.xpack.sql.cli.command.PrintLogoCommand; + import org.elasticsearch.xpack.sql.cli.command.ServerInfoCliCommand; + import org.elasticsearch.xpack.sql.cli.command.ServerQueryCliCommand; +-import org.elasticsearch.xpack.sql.client.ClientException; + import org.elasticsearch.xpack.sql.client.ConnectionConfiguration; + import org.elasticsearch.xpack.sql.client.HttpClient; +-import org.elasticsearch.xpack.sql.client.Version; + import org.jline.terminal.TerminalBuilder; + + import java.io.IOException; +@@ -125,7 +123,7 @@ public class Cli extends LoggingAwareCommand { + CliSession cliSession = new CliSession(new HttpClient(con)); + cliSession.setDebug(debug); + if (checkConnection) { +- checkConnection(cliSession, cliTerminal, con); ++ con.checkConnection(cliSession, cliTerminal, this); + } + new CliRepl(cliTerminal, cliSession, cliCommand).execute(); + } finally { +@@ -133,30 +131,4 @@ public class Cli extends LoggingAwareCommand { + } + } + +- private void checkConnection(CliSession cliSession, CliTerminal cliTerminal, ConnectionConfiguration con) throws UserException { +- try { +- cliSession.checkConnection(); +- } catch (ClientException ex) { +- if (cliSession.isDebug()) { +- cliTerminal.error("Client Exception", ex.getMessage()); +- cliTerminal.println(); +- cliTerminal.printStackTrace(ex); +- cliTerminal.flush(); +- } +- if (ex.getCause() != null && ex.getCause() instanceof ConnectException) { +- // Most likely Elasticsearch is not running +- throw new UserException(ExitCodes.IO_ERROR, +- "Cannot connect to the server " + con.connectionString() + " - " + ex.getCause().getMessage()); +- } else if (ex.getCause() != null && ex.getCause() instanceof SQLInvalidAuthorizationSpecException) { +- throw new UserException(ExitCodes.NOPERM, +- "Cannot establish a secure connection to the server " + +- con.connectionString() + " - " + ex.getCause().getMessage()); +- } else { +- // Most likely we connected to something other than Elasticsearch +- throw new UserException(ExitCodes.DATA_ERROR, +- "Cannot communicate with the server " + con.connectionString() + +- ". This version of CLI only works with Elasticsearch version " + Version.CURRENT.toString()); +- } +- } +- } + } +diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java +old mode 100644 +new mode 100755 +index 346592df5d8..6adcb15ac87 +--- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java ++++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java +@@ -17,24 +17,12 @@ public abstract class AbstractServerCliCommand implements CliCommand { + try { + return doHandle(terminal, cliSession, line); + } catch (RuntimeException e) { +- handleExceptionWhileCommunicatingWithServer(terminal, cliSession, e); ++ cliSession.handleExceptionWhileCommunicatingWithServer(terminal, e, this); + } + return true; + } + + protected abstract boolean doHandle(CliTerminal cliTerminal, CliSession cliSession, String line); + +- /** +- * Handle an exception while communication with the server. Extracted +- * into a method so that tests can bubble the failure. +- */ +- protected void handleExceptionWhileCommunicatingWithServer(CliTerminal terminal, CliSession cliSession, RuntimeException e) { +- terminal.line().error("Communication error [").param(e.getMessage() == null ? e.getClass().getName() : e.getMessage()).error("]") +- .ln(); +- if (cliSession.isDebug()) { +- terminal.printStackTrace(e); +- } +- } +- + + } +diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java +old mode 100644 +new mode 100755 +index f5b91704aea..e476a06a016 +--- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java ++++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java +@@ -5,6 +5,7 @@ + */ + package org.elasticsearch.xpack.sql.cli.command; + ++import org.elasticsearch.xpack.sql.cli.CliTerminal; + import org.elasticsearch.xpack.sql.client.HttpClient; + import org.elasticsearch.xpack.sql.client.ClientException; + import org.elasticsearch.xpack.sql.client.Version; +@@ -71,4 +72,19 @@ public class CliSession { + Version.CURRENT.toString()); + } + } ++ ++ /** ++ * Handle an exception while communication with the server. Extracted ++ * into a method so that tests can bubble the failure. ++ * @param terminal ++ * @param e ++ * @param abstractServerCliCommand ++ */ ++ public void handleExceptionWhileCommunicatingWithServer(CliTerminal terminal, RuntimeException e, AbstractServerCliCommand abstractServerCliCommand) { ++ terminal.line().error("Communication error [").param(e.getMessage() == null ? e.getClass().getName() : e.getMessage()).error("]") ++ .ln(); ++ if (isDebug()) { ++ terminal.printStackTrace(e); ++ } ++ } + } +diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java +old mode 100644 +new mode 100755 +index 591762b18a9..8baa3910691 +--- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java ++++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java +@@ -5,6 +5,12 @@ + */ + package org.elasticsearch.xpack.sql.client; + ++import org.elasticsearch.cli.ExitCodes; ++import org.elasticsearch.cli.UserException; ++import org.elasticsearch.xpack.sql.cli.Cli; ++import org.elasticsearch.xpack.sql.cli.CliTerminal; ++import org.elasticsearch.xpack.sql.cli.command.CliSession; ++ + import java.net.URI; + import java.net.URISyntaxException; + import java.util.Arrays; +@@ -240,4 +246,30 @@ public class ConnectionConfiguration { + return connectionString; + } + ++ public void checkConnection(CliSession cliSession, CliTerminal cliTerminal, Cli cli) throws UserException { ++ try { ++ cliSession.checkConnection(); ++ } catch (ClientException ex) { ++ if (cliSession.isDebug()) { ++ cliTerminal.error("Client Exception", ex.getMessage()); ++ cliTerminal.println(); ++ cliTerminal.printStackTrace(ex); ++ cliTerminal.flush(); ++ } ++ if (ex.getCause() != null && ex.getCause() instanceof ConnectException) { ++ // Most likely Elasticsearch is not running ++ throw new UserException(ExitCodes.IO_ERROR, ++ "Cannot connect to the server " + connectionString() + " - " + ex.getCause().getMessage()); ++ } else if (ex.getCause() != null && ex.getCause() instanceof SQLInvalidAuthorizationSpecException) { ++ throw new UserException(ExitCodes.NOPERM, ++ "Cannot establish a secure connection to the server " + ++ connectionString() + " - " + ex.getCause().getMessage()); ++ } else { ++ // Most likely we connected to something other than Elasticsearch ++ throw new UserException(ExitCodes.DATA_ERROR, ++ "Cannot communicate with the server " + connectionString() + ++ ". This version of CLI only works with Elasticsearch version " + Version.CURRENT.toString()); ++ } ++ } ++ } + } +diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzer.java +old mode 100644 +new mode 100755 +index 16e35cd8638..26c7780bd66 +--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzer.java ++++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzer.java +@@ -6,7 +6,6 @@ + package org.elasticsearch.xpack.sql.analysis.analyzer; + + import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; +-import org.elasticsearch.xpack.sql.plan.logical.UnresolvedRelation; + + import java.util.ArrayList; + import java.util.List; +@@ -34,17 +33,7 @@ public class PreAnalyzer { + return PreAnalysis.EMPTY; + } + +- return doPreAnalyze(plan); ++ return plan.doPreAnalyze(this); + } + +- private PreAnalysis doPreAnalyze(LogicalPlan plan) { +- List indices = new ArrayList<>(); +- +- plan.forEachUp(p -> indices.add(new TableInfo(p.table(), p.frozen())), UnresolvedRelation.class); +- +- // mark plan as preAnalyzed (if it were marked, there would be no analysis) +- plan.forEachUp(LogicalPlan::setPreAnalyzed); +- +- return new PreAnalysis(indices); +- } +-} +\ No newline at end of file ++} +diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java +old mode 100644 +new mode 100755 +index 2fb5028e987..3e56e5a265b +--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java ++++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java +@@ -13,17 +13,14 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; + import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; + import org.elasticsearch.action.admin.indices.get.GetIndexRequest; + import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; +-import org.elasticsearch.action.admin.indices.get.GetIndexResponse; + import org.elasticsearch.action.fieldcaps.FieldCapabilities; + import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; + import org.elasticsearch.action.support.IndicesOptions; + import org.elasticsearch.action.support.IndicesOptions.Option; + import org.elasticsearch.action.support.IndicesOptions.WildcardStates; + import org.elasticsearch.client.Client; +-import org.elasticsearch.cluster.metadata.AliasMetaData; + import org.elasticsearch.common.Strings; + import org.elasticsearch.index.IndexNotFoundException; +-import org.elasticsearch.index.IndexSettings; + import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; + import org.elasticsearch.xpack.sql.type.DataType; + import org.elasticsearch.xpack.sql.type.DateEsField; +@@ -219,52 +216,14 @@ public class IndexResolver { + } + + client.admin().indices().getIndex(indexRequest, +- wrap(response -> filterResults(javaRegex, aliases, response, retrieveIndices, retrieveFrozenIndices, listener), ++ wrap(response -> aliases.filterResults(javaRegex, response, retrieveIndices, retrieveFrozenIndices, listener, this), + listener::onFailure)); + + } else { +- filterResults(javaRegex, aliases, null, false, false, listener); ++ aliases.filterResults(javaRegex, null, false, false, listener, this); + } + } + +- private void filterResults(String javaRegex, GetAliasesResponse aliases, GetIndexResponse indices, +- // these are needed to filter out the different results from the same index response +- boolean retrieveIndices, +- boolean retrieveFrozenIndices, +- ActionListener> listener) { +- +- // since the index name does not support ?, filter the results manually +- Pattern pattern = javaRegex != null ? Pattern.compile(javaRegex) : null; +- +- Set result = new TreeSet<>(Comparator.comparing(IndexInfo::name)); +- // filter aliases (if present) +- if (aliases != null) { +- for (ObjectCursor> cursor : aliases.getAliases().values()) { +- for (AliasMetaData amd : cursor.value) { +- String alias = amd.alias(); +- if (alias != null && (pattern == null || pattern.matcher(alias).matches())) { +- result.add(new IndexInfo(alias, IndexType.ALIAS)); +- } +- } +- } +- } +- +- // filter indices (if present) +- String[] indicesNames = indices != null ? indices.indices() : null; +- if (indicesNames != null) { +- for (String indexName : indicesNames) { +- boolean isFrozen = retrieveFrozenIndices +- && IndexSettings.INDEX_SEARCH_THROTTLED.get(indices.getSettings().get(indexName)) == Boolean.TRUE; +- +- if (pattern == null || pattern.matcher(indexName).matches()) { +- result.add(new IndexInfo(indexName, isFrozen ? IndexType.FROZEN_INDEX : IndexType.STANDARD_INDEX)); +- } +- } +- } +- +- listener.onResponse(result); +- } +- + /** + * Resolves a pattern to one (potentially compound meaning that spawns multiple indices) mapping. + */ +@@ -537,4 +496,4 @@ public class IndexResolver { + foundIndices.sort(Comparator.comparing(EsIndex::name)); + return foundIndices; + } +-} +\ No newline at end of file ++} +diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LogicalPlan.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LogicalPlan.java +old mode 100644 +new mode 100755 +index 79614242012..879eb9132f2 +--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LogicalPlan.java ++++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LogicalPlan.java +@@ -5,6 +5,8 @@ + */ + package org.elasticsearch.xpack.sql.plan.logical; + ++import org.elasticsearch.xpack.sql.analysis.analyzer.PreAnalyzer; ++import org.elasticsearch.xpack.sql.analysis.analyzer.TableInfo; + import org.elasticsearch.xpack.sql.capabilities.Resolvable; + import org.elasticsearch.xpack.sql.capabilities.Resolvables; + import org.elasticsearch.xpack.sql.plan.QueryPlan; +@@ -18,6 +20,17 @@ import java.util.List; + */ + public abstract class LogicalPlan extends QueryPlan implements Resolvable { + ++ public PreAnalyzer.PreAnalysis doPreAnalyze(PreAnalyzer preAnalyzer) { ++ List indices = new ArrayList<>(); ++ ++ forEachUp(p -> indices.add(new TableInfo(p.table(), p.frozen())), UnresolvedRelation.class); ++ ++ // mark plan as preAnalyzed (if it were marked, there would be no analysis) ++ forEachUp(LogicalPlan::setPreAnalyzed); ++ ++ return new PreAnalyzer.PreAnalysis(indices); ++ } ++ + /** + * Order is important in the enum; any values should be added at the end. + */ +diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +old mode 100644 +new mode 100755 +index 802d6d37b7c..852d3d8226a +--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java ++++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +@@ -488,19 +488,19 @@ class QueryFolder extends RuleExecutor { + if (sfa.orderBy() instanceof NamedExpression) { + Attribute at = ((NamedExpression) sfa.orderBy()).toAttribute(); + at = qContainer.aliases().getOrDefault(at, at); +- qContainer = qContainer.addSort(new AttributeSort(at, direction, missing)); ++ qContainer = new AttributeSort(at, direction, missing).addSort(qContainer); + } else if (!sfa.orderBy().foldable()) { + // ignore constant + throw new PlanningException("does not know how to order by expression {}", sfa.orderBy()); + } + } else { + // nope, use scripted sorting +- qContainer = qContainer.addSort(new ScriptSort(sfa.script(), direction, missing)); ++ qContainer = new ScriptSort(sfa.script(), direction, missing).addSort(qContainer); + } + } else if (attr instanceof ScoreAttribute) { +- qContainer = qContainer.addSort(new ScoreSort(direction, missing)); ++ qContainer = new ScoreSort(direction, missing).addSort(qContainer); + } else { +- qContainer = qContainer.addSort(new AttributeSort(attr, direction, missing)); ++ qContainer = new AttributeSort(attr, direction, missing).addSort(qContainer); + } + } + } +diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/Aggs.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/Aggs.java +old mode 100644 +new mode 100755 +index 94f854c29f0..940fa70bbd4 +--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/Aggs.java ++++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/Aggs.java +@@ -138,18 +138,6 @@ public class Aggs { + return null; + } + +- public Aggs updateGroup(GroupByKey group) { +- List groups = new ArrayList<>(this.groups); +- for (int i = 0; i < groups.size(); i++) { +- GroupByKey g = groups.get(i); +- if (group.id().equals(g.id())) { +- groups.set(i, group); +- return with(groups); +- } +- } +- throw new SqlIllegalArgumentException("Could not find group named {}", group.id()); +- } +- + public Aggs with(List groups) { + return new Aggs(groups, simpleAggs, pipelineAggs); + } +diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java +old mode 100644 +new mode 100755 +index df207269eec..6128f16948c +--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java ++++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java +@@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.querydsl.agg; + + import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; + import org.elasticsearch.search.aggregations.support.ValueType; ++import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; + import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; + import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction; + import org.elasticsearch.xpack.sql.type.DataType; +@@ -82,4 +83,16 @@ public abstract class GroupByKey extends Agg { + && Objects.equals(script, ((GroupByKey) obj).script) + && Objects.equals(direction, ((GroupByKey) obj).direction); + } ++ ++ public Aggs updateGroup(Aggs aggs) { ++ List groups = new ArrayList<>(aggs.groups()); ++ for (int i = 0; i < groups.size(); i++) { ++ GroupByKey g = groups.get(i); ++ if (id().equals(g.id())) { ++ groups.set(i, this); ++ return aggs.with(groups); ++ } ++ } ++ throw new SqlIllegalArgumentException("Could not find group named {}", id()); ++ } + } +diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java +old mode 100644 +new mode 100755 +index 827eade2e59..1aba50e9ed4 +--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java ++++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java +@@ -283,12 +283,6 @@ public class QueryContainer { + return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, procs, sort, limit, trackHits, includeFrozen); + } + +- public QueryContainer addSort(Sort sortable) { +- Set sort = new LinkedHashSet<>(this.sort); +- sort.add(sortable); +- return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen); +- } +- + private String aliasName(Attribute attr) { + return aliases.getOrDefault(attr, attr).name(); + } +@@ -436,7 +430,7 @@ public class QueryContainer { + } + + public QueryContainer updateGroup(GroupByKey group) { +- return with(aggs.updateGroup(group)); ++ return with(group.updateGroup(aggs)); + } + + // +@@ -479,4 +473,4 @@ public class QueryContainer { + throw new RuntimeException("error rendering", e); + } + } +-} +\ No newline at end of file ++} +diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/Sort.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/Sort.java +old mode 100644 +new mode 100755 +index 33a9865b64f..8029ea0c7e1 +--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/Sort.java ++++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/Sort.java +@@ -11,6 +11,12 @@ import org.elasticsearch.xpack.sql.expression.Order.OrderDirection; + + public abstract class Sort { + ++ public QueryContainer addSort(QueryContainer queryContainer) { ++ Set sort = new LinkedHashSet<>(queryContainer.sort()); ++ sort.add(this); ++ return new QueryContainer(queryContainer.query(), queryContainer.aggs(), queryContainer.fields(), queryContainer.aliases(), queryContainer.pseudoFunctions(), queryContainer.scalarFunctions(), sort, queryContainer.limit(), queryContainer.shouldTrackHits(), queryContainer.shouldIncludeFrozen()); ++ } ++ + public enum Direction { + ASC, DESC; + +diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java +old mode 100644 +new mode 100755 +index fce24758a3b..4a2a4d7f71b +--- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java ++++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java +@@ -85,8 +85,7 @@ public class SourceGeneratorTests extends ESTestCase { + } + + public void testSortScoreSpecified() { +- QueryContainer container = new QueryContainer() +- .addSort(new ScoreSort(Direction.DESC, null)); ++ QueryContainer container = new ScoreSort(Direction.DESC, null).addSort(new QueryContainer()); + SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); + assertEquals(singletonList(scoreSort()), sourceBuilder.sorts()); + } +@@ -94,15 +93,13 @@ public class SourceGeneratorTests extends ESTestCase { + public void testSortFieldSpecified() { + FieldSortBuilder sortField = fieldSort("test").unmappedType("keyword"); + +- QueryContainer container = new QueryContainer() +- .addSort(new AttributeSort(new FieldAttribute(Source.EMPTY, "test", new KeywordEsField("test")), Direction.ASC, +- Missing.LAST)); ++ QueryContainer container = new AttributeSort(new FieldAttribute(Source.EMPTY, "test", new KeywordEsField("test")), Direction.ASC, ++ Missing.LAST).addSort(new QueryContainer()); + SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); + assertEquals(singletonList(sortField.order(SortOrder.ASC).missing("_last")), sourceBuilder.sorts()); + +- container = new QueryContainer() +- .addSort(new AttributeSort(new FieldAttribute(Source.EMPTY, "test", new KeywordEsField("test")), Direction.DESC, +- Missing.FIRST)); ++ container = new AttributeSort(new FieldAttribute(Source.EMPTY, "test", new KeywordEsField("test")), Direction.DESC, ++ Missing.FIRST).addSort(new QueryContainer()); + sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); + assertEquals(singletonList(sortField.order(SortOrder.DESC).missing("_first")), sourceBuilder.sorts()); + } +@@ -126,4 +123,4 @@ public class SourceGeneratorTests extends ESTestCase { + SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); + assertNull(sourceBuilder.sorts()); + } +-} +\ No newline at end of file ++} +diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/webhook/ExecutableWebhookAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/webhook/ExecutableWebhookAction.java +old mode 100644 +new mode 100755 +index ec1f5774b13..6e9526d1fba +--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/webhook/ExecutableWebhookAction.java ++++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/webhook/ExecutableWebhookAction.java +@@ -33,7 +33,7 @@ public class ExecutableWebhookAction extends ExecutableAction { + public Action.Result execute(String actionId, WatchExecutionContext ctx, Payload payload) throws Exception { + Map model = Variables.createCtxParamsMap(ctx, payload); + +- HttpRequest request = action.requestTemplate.render(templateEngine, model); ++ HttpRequest request = templateEngine.renderOther(model, action.requestTemplate); + + if (ctx.simulateAction(actionId)) { + return new WebhookAction.Result.Simulated(request); +diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpRequestTemplate.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpRequestTemplate.java +old mode 100644 +new mode 100755 +index 55e5d07a71c..8efdc316f82 +--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpRequestTemplate.java ++++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpRequestTemplate.java +@@ -9,7 +9,6 @@ import io.netty.handler.codec.http.HttpHeaders; + import org.elasticsearch.ElasticsearchParseException; + import org.elasticsearch.common.Nullable; + import org.elasticsearch.common.Strings; +-import org.elasticsearch.common.collect.MapBuilder; + import org.elasticsearch.common.unit.TimeValue; + import org.elasticsearch.common.xcontent.ToXContent; + import org.elasticsearch.common.xcontent.ToXContentObject; +@@ -19,7 +18,6 @@ import org.elasticsearch.rest.RestUtils; + import org.elasticsearch.script.ScriptType; + import org.elasticsearch.xpack.core.watcher.support.WatcherDateTimeUtils; + import org.elasticsearch.xpack.watcher.common.text.TextTemplate; +-import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; + + import java.io.IOException; + import java.net.URI; +@@ -111,51 +109,6 @@ public class HttpRequestTemplate implements ToXContentObject { + return proxy; + } + +- public HttpRequest render(TextTemplateEngine engine, Map model) { +- HttpRequest.Builder request = HttpRequest.builder(host, port); +- request.method(method); +- request.scheme(scheme); +- if (path != null) { +- request.path(engine.render(path, model)); +- } +- if (params != null && !params.isEmpty()) { +- MapBuilder mapBuilder = MapBuilder.newMapBuilder(); +- for (Map.Entry entry : params.entrySet()) { +- mapBuilder.put(entry.getKey(), engine.render(entry.getValue(), model)); +- } +- request.setParams(mapBuilder.map()); +- } +- if ((headers == null || headers.isEmpty()) && body != null && body.getContentType() != null) { +- request.setHeaders(singletonMap(HttpHeaders.Names.CONTENT_TYPE, body.getContentType().mediaType())); +- } else if (headers != null && !headers.isEmpty()) { +- MapBuilder mapBuilder = MapBuilder.newMapBuilder(); +- if (body != null && body.getContentType() != null) { +- // putting the content type first, so it can be overridden by custom headers +- mapBuilder.put(HttpHeaders.Names.CONTENT_TYPE, body.getContentType().mediaType()); +- } +- for (Map.Entry entry : headers.entrySet()) { +- mapBuilder.put(entry.getKey(), engine.render(entry.getValue(), model)); +- } +- request.setHeaders(mapBuilder.map()); +- } +- if (auth != null) { +- request.auth(auth); +- } +- if (body != null) { +- request.body(engine.render(body, model)); +- } +- if (connectionTimeout != null) { +- request.connectionTimeout(connectionTimeout); +- } +- if (readTimeout != null) { +- request.readTimeout(readTimeout); +- } +- if (proxy != null) { +- request.proxy(proxy); +- } +- return request.build(); +- } +- + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); +diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java +old mode 100644 +new mode 100755 +index 2f2d3d7b9f3..fbf8ff3d8cf +--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java ++++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java +@@ -5,12 +5,15 @@ + */ + package org.elasticsearch.xpack.watcher.common.text; + ++import org.elasticsearch.common.collect.MapBuilder; + import org.elasticsearch.common.xcontent.XContentType; + import org.elasticsearch.script.Script; + import org.elasticsearch.script.ScriptService; + import org.elasticsearch.script.ScriptType; + import org.elasticsearch.script.TemplateScript; + import org.elasticsearch.xpack.watcher.Watcher; ++import org.elasticsearch.xpack.watcher.common.http.HttpRequest; ++import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; + + import java.util.HashMap; + import java.util.Map; +@@ -95,4 +98,49 @@ public class TextTemplateEngine { + return "text/plain"; + } + } ++ ++ public HttpRequest renderOther(Map model, HttpRequestTemplate httpRequestTemplate) { ++ HttpRequest.Builder request = HttpRequest.builder(httpRequestTemplate.host(), httpRequestTemplate.port()); ++ request.method(httpRequestTemplate.method()); ++ request.scheme(httpRequestTemplate.scheme()); ++ if (httpRequestTemplate.path() != null) { ++ request.path(render(httpRequestTemplate.path(), model)); ++ } ++ if (httpRequestTemplate.params() != null && !httpRequestTemplate.params().isEmpty()) { ++ MapBuilder mapBuilder = MapBuilder.newMapBuilder(); ++ for (Map.Entry entry : httpRequestTemplate.params().entrySet()) { ++ mapBuilder.put(entry.getKey(), render(entry.getValue(), model)); ++ } ++ request.setParams(mapBuilder.map()); ++ } ++ if ((httpRequestTemplate.headers() == null || httpRequestTemplate.headers().isEmpty()) && httpRequestTemplate.body() != null && httpRequestTemplate.body().getContentType() != null) { ++ request.setHeaders(singletonMap(HttpHeaders.Names.CONTENT_TYPE, httpRequestTemplate.body().getContentType().mediaType())); ++ } else if (httpRequestTemplate.headers() != null && !httpRequestTemplate.headers().isEmpty()) { ++ MapBuilder mapBuilder = MapBuilder.newMapBuilder(); ++ if (httpRequestTemplate.body() != null && httpRequestTemplate.body().getContentType() != null) { ++ // putting the content type first, so it can be overridden by custom headers ++ mapBuilder.put(HttpHeaders.Names.CONTENT_TYPE, httpRequestTemplate.body().getContentType().mediaType()); ++ } ++ for (Map.Entry entry : httpRequestTemplate.headers().entrySet()) { ++ mapBuilder.put(entry.getKey(), render(entry.getValue(), model)); ++ } ++ request.setHeaders(mapBuilder.map()); ++ } ++ if (httpRequestTemplate.auth() != null) { ++ request.auth(httpRequestTemplate.auth()); ++ } ++ if (httpRequestTemplate.body() != null) { ++ request.body(render(httpRequestTemplate.body(), model)); ++ } ++ if (httpRequestTemplate.connectionTimeout() != null) { ++ request.connectionTimeout(httpRequestTemplate.connectionTimeout()); ++ } ++ if (httpRequestTemplate.readTimeout() != null) { ++ request.readTimeout(httpRequestTemplate.readTimeout()); ++ } ++ if (httpRequestTemplate.proxy() != null) { ++ request.proxy(httpRequestTemplate.proxy()); ++ } ++ return request.build(); ++ } + } +diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java +old mode 100644 +new mode 100755 +index b1ba8c1522a..ba31b0d5af2 +--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java ++++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java +@@ -315,7 +315,7 @@ public class ExecutionService { + } + } + } catch (Exception e) { +- record = createWatchRecord(record, ctx, e); ++ record = record.createWatchRecord(ctx, e, this); + logWatchRecord(ctx, e); + } finally { + if (ctx.knownWatch()) { +@@ -372,19 +372,6 @@ public class ExecutionService { + } + } + +- private WatchRecord createWatchRecord(WatchRecord existingRecord, WatchExecutionContext ctx, Exception e) { +- // it is possible that the watch store update failed, the execution phase is finished +- if (ctx.executionPhase().sealed()) { +- if (existingRecord == null) { +- return new WatchRecord.ExceptionWatchRecord(ctx, e); +- } else { +- return new WatchRecord.ExceptionWatchRecord(existingRecord, e); +- } +- } else { +- return ctx.abortFailedExecution(e); +- } +- } +- + private void logWatchRecord(WatchExecutionContext ctx, Exception e) { + // failed watches stack traces are only logged in debug, otherwise they should be checked out in the history + if (logger.isDebugEnabled()) { +diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/ExecutableHttpInput.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/ExecutableHttpInput.java +old mode 100644 +new mode 100755 +index 79d3918f7a2..4a88eb1dcfb +--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/ExecutableHttpInput.java ++++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/http/ExecutableHttpInput.java +@@ -47,7 +47,7 @@ public class ExecutableHttpInput extends ExecutableInput model = Variables.createCtxParamsMap(ctx, payload); +- request = input.getRequest().render(templateEngine, model); ++ request = templateEngine.renderOther(model, input.getRequest()); + return doExecute(ctx, request); + } catch (Exception e) { + logger.error("failed to execute [{}] input for watch [{}], reason [{}]", TYPE, ctx.watch().id(), e.getMessage()); +diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java +old mode 100644 +new mode 100755 +index de7161dcdd1..08161f9be0f +--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java ++++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java +@@ -142,18 +142,7 @@ public class EmailService extends NotificationService { + throw new IllegalArgumentException("failed to send email with subject [" + email.subject() + "] via account [" + accountName + + "]. account does not exist"); + } +- return send(email, auth, profile, account); +- } +- +- private EmailSent send(Email email, Authentication auth, Profile profile, Account account) throws MessagingException { +- assert account != null; +- try { +- email = account.send(email, auth, profile); +- } catch (MessagingException me) { +- throw new MessagingException("failed to send email with subject [" + email.subject() + "] via account [" + account.name() + +- "]", me); +- } +- return new EmailSent(account.name(), email); ++ return profile.send(email, auth, account, this); + } + + public static class EmailSent { +diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Profile.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Profile.java +old mode 100644 +new mode 100755 +index 9d4a70dc034..c0f739916a4 +--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Profile.java ++++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Profile.java +@@ -209,4 +209,15 @@ public enum Profile { + } + return part; + } ++ ++ public EmailService.EmailSent send(Email email, Authentication auth, Account account, EmailService emailService) throws MessagingException { ++ assert account != null; ++ try { ++ email = account.send(email, auth, this); ++ } catch (MessagingException me) { ++ throw new MessagingException("failed to send email with subject [" + email.subject() + "] via account [" + account.name() + ++ "]", me); ++ } ++ return new EmailService.EmailSent(account.name(), email); ++ } + } +diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/HttpEmailAttachementParser.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/HttpEmailAttachementParser.java +old mode 100644 +new mode 100755 +index 076c57c832f..025904b3c52 +--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/HttpEmailAttachementParser.java ++++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/HttpEmailAttachementParser.java +@@ -80,7 +80,7 @@ public class HttpEmailAttachementParser implements EmailAttachmentParser model = Variables.createCtxParamsMap(context, payload); +- HttpRequest httpRequest = attachment.getRequestTemplate().render(templateEngine, model); ++ HttpRequest httpRequest = templateEngine.renderOther(model, attachment.getRequestTemplate()); + + HttpResponse response = httpClient.execute(httpRequest); + // check for status 200, only then append attachment +diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachment.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachment.java +old mode 100644 +new mode 100755 +index f5d330f5323..84ffb2f830b +--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachment.java ++++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachment.java +@@ -5,10 +5,12 @@ + */ + package org.elasticsearch.xpack.watcher.notification.email.attachment; + ++import org.elasticsearch.ElasticsearchException; + import org.elasticsearch.common.Nullable; + import org.elasticsearch.common.ParseField; + import org.elasticsearch.common.unit.TimeValue; + import org.elasticsearch.common.xcontent.XContentBuilder; ++import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext; + import org.elasticsearch.xpack.watcher.common.http.HttpProxy; + import org.elasticsearch.xpack.watcher.common.http.BasicAuth; + +@@ -127,4 +129,14 @@ public class ReportingAttachment implements EmailAttachmentParser.EmailAttachmen + public int hashCode() { + return Objects.hash(id, url, interval, inline, retries, auth, proxy); + } ++ ++ public void sleep(long sleepMillis, WatchExecutionContext context, ReportingAttachmentParser reportingAttachmentParser) { ++ try { ++ Thread.sleep(sleepMillis); ++ } catch (InterruptedException e) { ++ Thread.currentThread().interrupt(); ++ throw new ElasticsearchException("Watch[{}] reporting[{}] thread was interrupted, while waiting for polling. Aborting.", ++ context.watch().id(), id()); ++ } ++ } + } +diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java +old mode 100644 +new mode 100755 +index d0d9ecc7810..e7444d2fdcd +--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java ++++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java +@@ -103,7 +103,7 @@ public class ReportingAttachmentParser implements EmailAttachmentParser { + ++ public ComponentSpecification nestInNamespaceOther(ComponentSpecification componentSpecification) { ++ ComponentId newNameSpace = ++ (componentSpecification.getNamespace() == null) ? ++ this : ++ componentSpecification.getNamespace().nestInNamespace(this); ++ return new ComponentSpecification(componentSpecification.getName(), componentSpecification.getVersionSpecification(), newNameSpace); ++ } ++ + private final class VersionHandler implements Spec.VersionHandler { + + @Override +diff --git a/component/src/main/java/com/yahoo/component/ComponentSpecification.java b/component/src/main/java/com/yahoo/component/ComponentSpecification.java +old mode 100644 +new mode 100755 +index a27f66dea2..e392377429 +--- a/component/src/main/java/com/yahoo/component/ComponentSpecification.java ++++ b/component/src/main/java/com/yahoo/component/ComponentSpecification.java +@@ -65,14 +65,6 @@ public final class ComponentSpecification { + this(splitter.name, VersionSpecification.fromString(splitter.version), splitter.namespace); + } + +- public ComponentSpecification nestInNamespace(ComponentId namespace) { +- ComponentId newNameSpace = +- (getNamespace() == null) ? +- namespace : +- getNamespace().nestInNamespace(namespace); +- return new ComponentSpecification(getName(), getVersionSpecification(), newNameSpace); +- } +- + /** The namespace is null if this is to match a top level component id **/ + public ComponentId getNamespace() { return spec.namespace; } + +diff --git a/config-model/src/main/java/com/yahoo/vespa/model/container/http/JettyHttpServer.java b/config-model/src/main/java/com/yahoo/vespa/model/container/http/JettyHttpServer.java +old mode 100644 +new mode 100755 +index ab49136751..c1135bfa8c +--- a/config-model/src/main/java/com/yahoo/vespa/model/container/http/JettyHttpServer.java ++++ b/config-model/src/main/java/com/yahoo/vespa/model/container/http/JettyHttpServer.java +@@ -58,7 +58,7 @@ public class JettyHttpServer extends SimpleComponent implements ServerConfig.Pro + final ComponentSpecification classNameSpec = new ComponentSpecification( + className); + return new ComponentModel(new BundleInstantiationSpecification( +- classNameSpec.nestInNamespace(parentId), ++ parentId.nestInNamespaceOther(classNameSpec), + classNameSpec, + null)); + } +diff --git a/configgen/src/main/java/com/yahoo/config/codegen/DefaultValue.java b/configgen/src/main/java/com/yahoo/config/codegen/DefaultValue.java +old mode 100644 +new mode 100755 +index 07632788d8..0f02ab8c79 +--- a/configgen/src/main/java/com/yahoo/config/codegen/DefaultValue.java ++++ b/configgen/src/main/java/com/yahoo/config/codegen/DefaultValue.java +@@ -64,4 +64,13 @@ public class DefaultValue { + } + } + ++ public final void setValue(LeafCNode leafCNode) throws IllegalArgumentException { ++ try { ++ leafCNode.checkDefaultValue(this); ++ leafCNode.setDefaultValue(this); ++ } catch (IllegalArgumentException e) { ++ throw new IllegalArgumentException ++ ("Invalid default value", e); ++ } ++ } + } +diff --git a/configgen/src/main/java/com/yahoo/config/codegen/LeafCNode.java b/configgen/src/main/java/com/yahoo/config/codegen/LeafCNode.java +old mode 100644 +new mode 100755 +index e8dd4221f0..80b8353f86 +--- a/configgen/src/main/java/com/yahoo/config/codegen/LeafCNode.java ++++ b/configgen/src/main/java/com/yahoo/config/codegen/LeafCNode.java +@@ -85,7 +85,7 @@ public abstract class LeafCNode extends CNode { + if (!type.name.equalsIgnoreCase(getType())) { + throw new IllegalArgumentException("Type " + type.name + " does not match " + getType()); + } +- setValue(defLine.getDefault()); ++ defLine.getDefault().setValue(this); + setComment(comment); + restart |= defLine.getRestart(); + } +@@ -95,16 +95,6 @@ public abstract class LeafCNode extends CNode { + return restart; + } + +- public final void setValue(DefaultValue defaultValue) throws IllegalArgumentException { +- try { +- checkDefaultValue(defaultValue); +- setDefaultValue(defaultValue); +- } catch (IllegalArgumentException e) { +- throw new IllegalArgumentException +- ("Invalid default value", e); +- } +- } +- + /** + * Superclass for leaf nodes that should not generate class. + */ +diff --git a/container-search/src/main/java/com/yahoo/search/grouping/vespa/RequestBuilder.java b/container-search/src/main/java/com/yahoo/search/grouping/vespa/RequestBuilder.java +old mode 100644 +new mode 100755 +index 46b1fecd7d..88bd94b8c9 +--- a/container-search/src/main/java/com/yahoo/search/grouping/vespa/RequestBuilder.java ++++ b/container-search/src/main/java/com/yahoo/search/grouping/vespa/RequestBuilder.java +@@ -271,7 +271,7 @@ class RequestBuilder { + } + Group group = getLeafGroup(frame); + for (GroupingExpression exp : lst) { +- group.addAggregationResult(toAggregationResult(exp, group, frame)); ++ toAggregationResult(exp, group, frame).addAggregationResult(group); + } + } + +diff --git a/container-search/src/test/java/com/yahoo/search/grouping/vespa/GroupingExecutorTestCase.java b/container-search/src/test/java/com/yahoo/search/grouping/vespa/GroupingExecutorTestCase.java +old mode 100644 +new mode 100755 +index 11415b46b8..c602999d83 +--- a/container-search/src/test/java/com/yahoo/search/grouping/vespa/GroupingExecutorTestCase.java ++++ b/container-search/src/test/java/com/yahoo/search/grouping/vespa/GroupingExecutorTestCase.java +@@ -178,13 +178,13 @@ public class GroupingExecutorTestCase { + + Grouping grpA = new Grouping(0); + grpA.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("uniqueA")).addAggregationResult(new MaxAggregationResult().setMax(new IntegerResultNode(6)).setTag(4))) +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("common")).addAggregationResult(new MaxAggregationResult().setMax(new IntegerResultNode(9)).setTag(4))) ++ .addChild(new MaxAggregationResult().setMax(new IntegerResultNode(6)).setTag(4).addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("uniqueA")))) ++ .addChild(new MaxAggregationResult().setMax(new IntegerResultNode(9)).setTag(4).addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("common")))) + ); + Grouping grpB = new Grouping(0); + grpB.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("uniqueB")).addAggregationResult(new MaxAggregationResult().setMax(new IntegerResultNode(9)).setTag(4))) +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("common")).addAggregationResult(new MinAggregationResult().setMin(new IntegerResultNode(6)).setTag(3))) ++ .addChild(new MaxAggregationResult().setMax(new IntegerResultNode(9)).setTag(4).addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("uniqueB")))) ++ .addChild(new MinAggregationResult().setMin(new IntegerResultNode(6)).setTag(3).addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("common")))) + ); + Execution exec = newExecution(new GroupingExecutor(), + new ResultProvider(Arrays.asList( +@@ -213,11 +213,11 @@ public class GroupingExecutorTestCase { + + Grouping grpExpected = new Grouping(0); + grpExpected.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("expected")).addAggregationResult(new MaxAggregationResult().setMax(new IntegerResultNode(69)).setTag(3))) ++ .addChild(new MaxAggregationResult().setMax(new IntegerResultNode(69)).setTag(3).addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("expected")))) + ); + Grouping grpUnexpected = new Grouping(1); + grpUnexpected.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("unexpected")).addAggregationResult(new MaxAggregationResult().setMax(new IntegerResultNode(96)).setTag(3))) ++ .addChild(new MaxAggregationResult().setMax(new IntegerResultNode(96)).setTag(3).addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("unexpected")))) + ); + Execution exec = newExecution(new GroupingExecutor(), + new ResultProvider(Arrays.asList( +@@ -242,13 +242,11 @@ public class GroupingExecutorTestCase { + + Grouping grp0 = new Grouping(0); + grp0.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo")) +- .addAggregationResult(new HitsAggregationResult(1, "bar")) ++ .addChild(new HitsAggregationResult(1, "bar").addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo"))) + )); + Grouping grp1 = new Grouping(0); + grp1.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo")) +- .addAggregationResult(new HitsAggregationResult(1, "bar").addHit(new com.yahoo.searchlib.aggregation.FS4Hit())) ++ .addChild(new com.yahoo.searchlib.aggregation.FS4Hit().addHit(new HitsAggregationResult(1, "bar")).addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo"))) + )); + Execution exec = newExecution(new GroupingExecutor(), + new ResultProvider(Arrays.asList( +@@ -282,14 +280,11 @@ public class GroupingExecutorTestCase { + + Grouping grp0 = new Grouping(0); + grp0.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo")) +- .addAggregationResult(new HitsAggregationResult(1, "bar")))); ++ .addChild(new HitsAggregationResult(1, "bar").addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo"))))); + Grouping grp1 = new Grouping(0); + grp1.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo")) +- .addAggregationResult( +- new HitsAggregationResult(1, "bar") +- .addHit(new com.yahoo.searchlib.aggregation.FS4Hit())))); ++ .addChild(new com.yahoo.searchlib.aggregation.FS4Hit().addHit(new HitsAggregationResult(1, "bar")).addAggregationResult( ++ new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo"))))); + Execution exec = newExecution(new GroupingExecutor(), + new ResultProvider(Arrays.asList( + new GroupingListHit(Arrays.asList(grp0), null), +@@ -308,13 +303,11 @@ public class GroupingExecutorTestCase { + + Grouping grp = new Grouping(0); + grp.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group() +- .setId(new StringResultNode("foo")) +- .addAggregationResult(new CountAggregationResult(1)) ++ .addChild(new CountAggregationResult(1).addAggregationResult(new com.yahoo.searchlib.aggregation.Group() ++ .setId(new StringResultNode("foo"))) + .addOrderBy(new AggregationRefNode(0), true)) +- .addChild(new com.yahoo.searchlib.aggregation.Group() +- .setId(new StringResultNode("bar")) +- .addAggregationResult(new CountAggregationResult(2)) ++ .addChild(new CountAggregationResult(2).addAggregationResult(new com.yahoo.searchlib.aggregation.Group() ++ .setId(new StringResultNode("bar"))) + .addOrderBy(new AggregationRefNode(0), true))); + Result res = newExecution(new GroupingExecutor(), + new ResultProvider(Arrays.asList( +@@ -334,13 +327,11 @@ public class GroupingExecutorTestCase { + + Grouping grp0 = new Grouping(0); + grp0.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo")) +- .addAggregationResult(new HitsAggregationResult(1, "bar")) ++ .addChild(new HitsAggregationResult(1, "bar").addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo"))) + )); + Grouping grp1 = new Grouping(0); + grp1.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo")) +- .addAggregationResult(new HitsAggregationResult(1, "bar").addHit(new com.yahoo.searchlib.aggregation.FS4Hit())) ++ .addChild(new com.yahoo.searchlib.aggregation.FS4Hit().addHit(new HitsAggregationResult(1, "bar")).addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo"))) + )); + + ErrorProvider err = new ErrorProvider(1); +@@ -374,25 +365,21 @@ public class GroupingExecutorTestCase { + " each(output(summary(baz))) as(baz)))")); + Grouping pass0A = new Grouping(0); + pass0A.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo")) +- .addAggregationResult(new HitsAggregationResult(1, "bar")) ++ .addChild(new HitsAggregationResult(1, "bar").addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo"))) + )); + Grouping pass0B = new Grouping(1); + pass0B.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo")) +- .addAggregationResult(new HitsAggregationResult(1, "baz")) ++ .addChild(new HitsAggregationResult(1, "baz").addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo"))) + )); + GlobalId gid1 = new GlobalId((new DocumentId("doc:test:1")).getGlobalId()); + GlobalId gid2 = new GlobalId((new DocumentId("doc:test:2")).getGlobalId()); + Grouping pass1A = new Grouping(0); + pass1A.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo")) +- .addAggregationResult(new HitsAggregationResult(1, "bar").addHit(new com.yahoo.searchlib.aggregation.FS4Hit(1, gid1, 3))) ++ .addChild(new com.yahoo.searchlib.aggregation.FS4Hit(1, gid1, 3).addHit(new HitsAggregationResult(1, "bar")).addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo"))) + )); + Grouping pass1B = new Grouping(1); + pass1B.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo")) +- .addAggregationResult(new HitsAggregationResult(1, "baz").addHit(new com.yahoo.searchlib.aggregation.FS4Hit(4, gid2, 6))) ++ .addChild(new com.yahoo.searchlib.aggregation.FS4Hit(4, gid2, 6).addHit(new HitsAggregationResult(1, "baz")).addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo"))) + )); + SummaryMapper sm = new SummaryMapper(); + Execution exec = newExecution(new GroupingExecutor(), +@@ -428,17 +415,14 @@ public class GroupingExecutorTestCase { + + Grouping pass0 = new Grouping(0); + pass0.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group() +- .setId(new StringResultNode("foo")) +- .addAggregationResult( +- new HitsAggregationResult(1, ExpressionConverter.DEFAULT_SUMMARY_NAME)))); ++ .addChild(new HitsAggregationResult(1, ExpressionConverter.DEFAULT_SUMMARY_NAME).addAggregationResult( ++ new com.yahoo.searchlib.aggregation.Group() ++ .setId(new StringResultNode("foo"))))); + Grouping pass1 = new Grouping(0); + pass1.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group() +- .setId(new StringResultNode("foo")) +- .addAggregationResult( +- new HitsAggregationResult(1, ExpressionConverter.DEFAULT_SUMMARY_NAME) +- .addHit(new com.yahoo.searchlib.aggregation.FS4Hit())))); ++ .addChild(new com.yahoo.searchlib.aggregation.FS4Hit().addHit(new HitsAggregationResult(1, ExpressionConverter.DEFAULT_SUMMARY_NAME)).addAggregationResult( ++ new com.yahoo.searchlib.aggregation.Group() ++ .setId(new StringResultNode("foo"))))); + Execution exec = newExecution(new GroupingExecutor(), + new ResultProvider(Arrays.asList( + new GroupingListHit(Arrays.asList(pass0), null), +@@ -459,16 +443,14 @@ public class GroupingExecutorTestCase { + + Grouping grp = new Grouping(0); + grp.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo")) +- .addAggregationResult(new HitsAggregationResult(1, "bar")) ++ .addChild(new HitsAggregationResult(1, "bar").addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo"))) + )); + GroupingListHit pass0 = new GroupingListHit(Arrays.asList(grp), null); + + GlobalId gid = new GlobalId((new DocumentId("doc:test:1")).getGlobalId()); + grp = new Grouping(0); + grp.setRoot(new com.yahoo.searchlib.aggregation.Group() +- .addChild(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo")) +- .addAggregationResult(new HitsAggregationResult(1, "bar").addHit(new com.yahoo.searchlib.aggregation.FS4Hit(4, gid, 6))) ++ .addChild(new com.yahoo.searchlib.aggregation.FS4Hit(4, gid, 6).addHit(new HitsAggregationResult(1, "bar")).addAggregationResult(new com.yahoo.searchlib.aggregation.Group().setId(new StringResultNode("foo"))) + )); + GroupingListHit pass1 = new GroupingListHit(Arrays.asList(grp), null); + Query queryB = newQuery(); /** required by {@link GroupingListHit#getSearchQuery()} */ +diff --git a/container-search/src/test/java/com/yahoo/search/grouping/vespa/ResultBuilderTestCase.java b/container-search/src/test/java/com/yahoo/search/grouping/vespa/ResultBuilderTestCase.java +old mode 100644 +new mode 100755 +index e27003984d..6f80447e68 +--- a/container-search/src/test/java/com/yahoo/search/grouping/vespa/ResultBuilderTestCase.java ++++ b/container-search/src/test/java/com/yahoo/search/grouping/vespa/ResultBuilderTestCase.java +@@ -86,36 +86,30 @@ public class ResultBuilderTestCase { + Grouping grouping = new Grouping() + .setRoot(new Group() + .setTag(1) +- .addChild(new Group() ++ .addChild(new CountAggregationResult(10).setTag(3).addAggregationResult(new Group() + .setTag(2) +- .setId(new StringResultNode("foo")) +- .addAggregationResult(new CountAggregationResult(10).setTag(3)) +- .addChild(new Group() +- .setTag(4) +- .setId(new StringResultNode("foo_a")) +- .addAggregationResult(new CountAggregationResult(15) +- .setTag(5))) +- .addChild(new Group() +- .setTag(4) +- .setId(new StringResultNode("foo_b")) +- .addAggregationResult(new CountAggregationResult(16) +- .setTag(5)))) +- .addChild(new Group() ++ .setId(new StringResultNode("foo"))) ++ .addChild(new CountAggregationResult(15) ++ .setTag(5).addAggregationResult(new Group() ++ .setTag(4) ++ .setId(new StringResultNode("foo_a")))) ++ .addChild(new CountAggregationResult(16) ++ .setTag(5).addAggregationResult(new Group() ++ .setTag(4) ++ .setId(new StringResultNode("foo_b"))))) ++ .addChild(new CountAggregationResult(20).setTag(3).addAggregationResult(new Group() + .setTag(2) +- .setId(new StringResultNode("bar")) +- .addAggregationResult(new CountAggregationResult(20).setTag(3)) +- .addChild(new Group() +- .setTag(4) +- .setId(new StringResultNode("bar_a")) +- .addAggregationResult( +- new CountAggregationResult(25) +- .setTag(5))) +- .addChild(new Group() +- .setTag(4) +- .setId(new StringResultNode("bar_b")) +- .addAggregationResult( +- new CountAggregationResult(26) +- .setTag(5))))); ++ .setId(new StringResultNode("bar"))) ++ .addChild(new CountAggregationResult(25) ++ .setTag(5).addAggregationResult( ++ new Group() ++ .setTag(4) ++ .setId(new StringResultNode("bar_a")))) ++ .addChild(new CountAggregationResult(26) ++ .setTag(5).addAggregationResult( ++ new Group() ++ .setTag(4) ++ .setId(new StringResultNode("bar_b")))))); + assertLayout("all(group(artist) max(5) each(output(count() as(baz)) all(group(album) " + + "max(5) each(output(count() as(cox))) as(group_album))) as(group_artist))", + grouping, +@@ -232,10 +226,9 @@ public class ResultBuilderTestCase { + Group root2 = newGroup(1, new ExpressionCountAggregationResult(new SparseSketch(), sketch -> 42).setTag(2)); + Grouping grouping2 = new Grouping().setRoot(root2); + for (int i = 0; i < 3; ++i) { +- root2.addChild(new Group() ++ new CountAggregationResult(i).setTag(3).addAggregationResult(root2.addChild(new Group() + .setTag(2) +- .setId(new StringResultNode("foo" + i))) +- .addAggregationResult(new CountAggregationResult(i).setTag(3)); ++ .setId(new StringResultNode("foo" + i)))); + } + + // Should return the number of groups when max is not present. +@@ -782,7 +775,7 @@ public class ResultBuilderTestCase { + group.setId(new IntegerResultNode(id)); + } + for (AggregationResult result : results) { +- group.addAggregationResult(result); ++ result.addAggregationResult(group); + } + return group; + } +@@ -805,7 +798,7 @@ public class ResultBuilderTestCase { + res.setTag(hitsTag); + res.setSummaryClass("default"); + for (int i = 0; i < numHits; ++i) { +- res.addHit(new FS4Hit(i + 1, new GlobalId(IdString.createIdString("doc:scheme:")), 1)); ++ new FS4Hit(i + 1, new GlobalId(IdString.createIdString("doc:scheme:")), 1).addHit(res); + } + return res; + } +diff --git a/docproc/src/main/java/com/yahoo/docproc/Call.java b/docproc/src/main/java/com/yahoo/docproc/Call.java +old mode 100644 +new mode 100755 +index edde89cd01..bd1ea5392a +--- a/docproc/src/main/java/com/yahoo/docproc/Call.java ++++ b/docproc/src/main/java/com/yahoo/docproc/Call.java +@@ -4,7 +4,6 @@ package com.yahoo.docproc; + import com.yahoo.component.ComponentId; + import com.yahoo.docproc.jdisc.metric.NullMetric; + import com.yahoo.docproc.proxy.ProxyDocument; +-import com.yahoo.docproc.proxy.ProxyDocumentUpdate; + import com.yahoo.document.Document; + import com.yahoo.document.DocumentOperation; + import com.yahoo.document.DocumentPut; +@@ -106,16 +105,6 @@ public class Call implements Cloneable { + return documentPut; + } + +- /** +- * The DocumentUpdate object a processor should work on. The one in args, or schema mapped. +- * +- * @return a DocumentUpdate +- */ +- private DocumentUpdate configDocUpd(DocumentProcessor proc, DocumentUpdate docU) { +- if (proc.getFieldMap().isEmpty()) return docU; +- return new ProxyDocumentUpdate(docU, proc.getDocMap(docU.getDocumentType().getName())); +- } +- + private void schemaMapProcessing(Processing processing) { + final List documentOperations = processing.getDocumentOperations(); + for (int i = 0; i < documentOperations.size(); i++) { +@@ -123,7 +112,7 @@ public class Call implements Cloneable { + if (op instanceof DocumentPut) { + documentOperations.set(i, configDoc(processor, (DocumentPut) op)); + } else if (op instanceof DocumentUpdate) { +- documentOperations.set(i, configDocUpd(processor, (DocumentUpdate) op)); ++ documentOperations.set(i, processor.configDocUpd((DocumentUpdate) op, this)); + } + } + } +diff --git a/docproc/src/main/java/com/yahoo/docproc/DocprocExecutor.java b/docproc/src/main/java/com/yahoo/docproc/DocprocExecutor.java +old mode 100644 +new mode 100755 +index a1b640d5eb..ecb02364bc +--- a/docproc/src/main/java/com/yahoo/docproc/DocprocExecutor.java ++++ b/docproc/src/main/java/com/yahoo/docproc/DocprocExecutor.java +@@ -153,33 +153,4 @@ public class DocprocExecutor { + log.log(LogLevel.SPAM, message.toString()); + } + +- /** +- * Processes a given Processing through the CallStack of this executor. Note that if a DocumentProcessor +- * returns a LaterProgress for this processing, it will be re-processed (after waiting the specified delay given +- * by the LaterProgress), until done or failed. +- * +- * @param processing the Processing to process. The CallStack of the Processing will be set to a clone of the CallStack of this executor, iff. it is currently null. +- * @return a Progress; this is never a LaterProgress. +- * @throws RuntimeException if a document processor throws an exception during processing, or this thread is interrupted while waiting. +- * @see com.yahoo.docproc.Processing +- * @see com.yahoo.docproc.DocumentProcessor.Progress +- * @see com.yahoo.docproc.DocumentProcessor.LaterProgress +- */ +- public DocumentProcessor.Progress processUntilDone(Processing processing) { +- DocumentProcessor.Progress progress; +- while (true) { +- progress = process(processing); +- if (!(progress instanceof DocumentProcessor.LaterProgress)) { +- break; +- } +- DocumentProcessor.LaterProgress later = (DocumentProcessor.LaterProgress) progress; +- try { +- Thread.sleep(later.getDelay()); +- } catch (InterruptedException e) { +- Thread.currentThread().interrupt(); +- throw new RuntimeException(e); +- } +- } +- return progress; +- } + } +diff --git a/docproc/src/main/java/com/yahoo/docproc/DocumentProcessor.java b/docproc/src/main/java/com/yahoo/docproc/DocumentProcessor.java +old mode 100644 +new mode 100755 +index d6b456056b..8afc3fc96b +--- a/docproc/src/main/java/com/yahoo/docproc/DocumentProcessor.java ++++ b/docproc/src/main/java/com/yahoo/docproc/DocumentProcessor.java +@@ -3,6 +3,8 @@ package com.yahoo.docproc; + + import com.yahoo.collections.Pair; + import com.yahoo.component.chain.ChainedComponent; ++import com.yahoo.docproc.proxy.ProxyDocumentUpdate; ++import com.yahoo.document.DocumentUpdate; + + import java.util.HashMap; + import java.util.Map; +@@ -74,6 +76,18 @@ public abstract class DocumentProcessor extends ChainedComponent { + return "processor " + getId().stringValue(); + } + ++ /** ++ * The DocumentUpdate object a processor should work on. The one in args, or schema mapped. ++ * ++ * @return a DocumentUpdate ++ * @param docU ++ * @param call ++ */ ++ public DocumentUpdate configDocUpd(DocumentUpdate docU, Call call) { ++ if (getFieldMap().isEmpty()) return docU; ++ return new ProxyDocumentUpdate(docU, getDocMap(docU.getDocumentType().getName())); ++ } ++ + /** An enumeration of possible results of calling a process method */ + public static class Progress { + +diff --git a/docproc/src/main/java/com/yahoo/docproc/Processing.java b/docproc/src/main/java/com/yahoo/docproc/Processing.java +old mode 100644 +new mode 100755 +index e157ad0b09..00f42c6b79 +--- a/docproc/src/main/java/com/yahoo/docproc/Processing.java ++++ b/docproc/src/main/java/com/yahoo/docproc/Processing.java +@@ -271,4 +271,34 @@ public class Processing { + getNumDocsCalled = true; + return getDocumentOperations().size(); + } ++ ++ /** ++ * Processes a given Processing through the CallStack of this executor. Note that if a DocumentProcessor ++ * returns a LaterProgress for this processing, it will be re-processed (after waiting the specified delay given ++ * by the LaterProgress), until done or failed. ++ * ++ * ++ * @param docprocExecutor@return a Progress; this is never a LaterProgress. ++ * @throws RuntimeException if a document processor throws an exception during processing, or this thread is interrupted while waiting. ++ * @see Processing ++ * @see DocumentProcessor.Progress ++ * @see DocumentProcessor.LaterProgress ++ */ ++ public DocumentProcessor.Progress processUntilDone(DocprocExecutor docprocExecutor) { ++ DocumentProcessor.Progress progress; ++ while (true) { ++ progress = docprocExecutor.process(this); ++ if (!(progress instanceof DocumentProcessor.LaterProgress)) { ++ break; ++ } ++ DocumentProcessor.LaterProgress later = (DocumentProcessor.LaterProgress) progress; ++ try { ++ Thread.sleep(later.getDelay()); ++ } catch (InterruptedException e) { ++ Thread.currentThread().interrupt(); ++ throw new RuntimeException(e); ++ } ++ } ++ return progress; ++ } + } +diff --git a/document/src/main/java/com/yahoo/document/DataType.java b/document/src/main/java/com/yahoo/document/DataType.java +old mode 100644 +new mode 100755 +index fa5dffd042..52a37134af +--- a/document/src/main/java/com/yahoo/document/DataType.java ++++ b/document/src/main/java/com/yahoo/document/DataType.java +@@ -15,6 +15,9 @@ import com.yahoo.document.datatypes.Raw; + import com.yahoo.document.datatypes.Float16FieldValue; + import com.yahoo.document.datatypes.StringFieldValue; + import com.yahoo.document.datatypes.UriFieldValue; ++import com.yahoo.document.serialization.DeserializationException; ++import com.yahoo.document.serialization.VespaDocumentDeserializer42; ++import com.yahoo.document.update.*; + import com.yahoo.tensor.TensorType; + import com.yahoo.vespa.objects.Identifiable; + import com.yahoo.vespa.objects.Ids; +@@ -316,4 +319,62 @@ public abstract class DataType extends Identifiable implements Serializable, Com + /** Returns whether this is a multivalue type, i.e either a CollectionDataType or a MapDataType */ + public boolean isMultivalue() { return false; } + ++ public ValueUpdate getValueUpdate(DataType subType, VespaDocumentDeserializer42 vespaDocumentDeserializer42) { ++ int vuTypeId = vespaDocumentDeserializer42.getInt(null); ++ ++ ValueUpdate.ValueUpdateClassID op = ValueUpdate.ValueUpdateClassID.getID(vuTypeId); ++ if (op == null) { ++ throw new IllegalArgumentException("Read type "+vuTypeId+" of bytebuffer, but this is not a legal value update type."); ++ } ++ ++ switch (op) { ++ case ADD: ++ { ++ FieldValue fval = subType.createFieldValue(); ++ fval.deserialize(vespaDocumentDeserializer42); ++ int weight = vespaDocumentDeserializer42.getInt(null); ++ return new AddValueUpdate(fval, weight); ++ } ++ case ARITHMETIC: ++ int opId = vespaDocumentDeserializer42.getInt(null); ++ ArithmeticValueUpdate.Operator operator = ArithmeticValueUpdate.Operator.getID(opId); ++ double operand = vespaDocumentDeserializer42.getDouble(null); ++ return new ArithmeticValueUpdate(operator, operand); ++ case ASSIGN: ++ { ++ byte contents = vespaDocumentDeserializer42.getByte(null); ++ FieldValue fval = null; ++ if (contents == (byte) 1) { ++ fval = createFieldValue(); ++ fval.deserialize(vespaDocumentDeserializer42); ++ } ++ return new AssignValueUpdate(fval); ++ } ++ case CLEAR: ++ return new ClearValueUpdate(); ++ case MAP: ++ if (this instanceof ArrayDataType) { ++ CollectionDataType type = (CollectionDataType) this; ++ IntegerFieldValue index = new IntegerFieldValue(); ++ index.deserialize(vespaDocumentDeserializer42); ++ ValueUpdate update = type.getNestedType().getValueUpdate(null, vespaDocumentDeserializer42); ++ return new MapValueUpdate(index, update); ++ } else if (this instanceof WeightedSetDataType) { ++ CollectionDataType type = (CollectionDataType) this; ++ FieldValue fval = type.getNestedType().createFieldValue(); ++ fval.deserialize(vespaDocumentDeserializer42); ++ ValueUpdate update = INT.getValueUpdate(null, vespaDocumentDeserializer42); ++ return new MapValueUpdate(fval, update); ++ } else { ++ throw new DeserializationException("MapValueUpdate only works for arrays and weighted sets"); ++ } ++ case REMOVE: ++ FieldValue fval = ((CollectionDataType) this).getNestedType().createFieldValue(); ++ fval.deserialize(vespaDocumentDeserializer42); ++ return new RemoveValueUpdate(fval); ++ default: ++ throw new DeserializationException( ++ "Could not deserialize ValueUpdate, unknown valueUpdateClassID type " + vuTypeId); ++ } ++ } + } +diff --git a/document/src/main/java/com/yahoo/document/Document.java b/document/src/main/java/com/yahoo/document/Document.java +old mode 100644 +new mode 100755 +index cf0951fb03..82eb0b32a9 +--- a/document/src/main/java/com/yahoo/document/Document.java ++++ b/document/src/main/java/com/yahoo/document/Document.java +@@ -402,4 +402,10 @@ public class Document extends StructuredFieldValue { + return comp; + } + ++ public void verifyType(DocumentUpdate documentUpdate) { ++ if (!documentUpdate.getType().equals(getDataType())) { ++ throw new IllegalArgumentException( ++ "Document " + getId() + " with type " + getDataType() + " must have same type as update, which is type " + documentUpdate.getType()); ++ } ++ } + } +diff --git a/document/src/main/java/com/yahoo/document/DocumentOperation.java b/document/src/main/java/com/yahoo/document/DocumentOperation.java +old mode 100644 +new mode 100755 +index 8209322c47..92db3b1ea2 +--- a/document/src/main/java/com/yahoo/document/DocumentOperation.java ++++ b/document/src/main/java/com/yahoo/document/DocumentOperation.java +@@ -1,6 +1,8 @@ + // Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. + package com.yahoo.document; + ++import com.yahoo.document.select.rule.DocumentNode; ++ + /** + * Base class for "document operations". + * These include "put" (DocumentPut), "update" (DocumentUpdate), "remove" (DocumentRemove) +@@ -35,4 +37,21 @@ public abstract class DocumentOperation { + this.condition = other.condition; + } + ++ public Object evaluate(DocumentNode documentNode) { ++ DocumentType doct; ++ if (this instanceof DocumentPut) { ++ doct = ((DocumentPut) this).getDocument().getDataType(); ++ } else if (this instanceof DocumentUpdate) { ++ doct = ((DocumentUpdate) this).getDocumentType(); ++ } else if (this instanceof DocumentRemove) { ++ DocumentRemove removeOp = (DocumentRemove) this; ++ return (removeOp.getId().getDocType().equals(documentNode.toString()) ? this : Boolean.FALSE); ++ } else if (this instanceof DocumentGet) { ++ DocumentGet getOp = (DocumentGet) this; ++ return (getOp.getId().getDocType().equals(documentNode.toString()) ? this : Boolean.FALSE); ++ } else { ++ throw new IllegalStateException("Document class '" + getClass().getName() + "' is not supported."); ++ } ++ return doct.isA(documentNode.toString()) ? this : Boolean.FALSE; ++ } + } +diff --git a/document/src/main/java/com/yahoo/document/DocumentType.java b/document/src/main/java/com/yahoo/document/DocumentType.java +index 1aa1fed765..df5f495a34 100755 +--- a/document/src/main/java/com/yahoo/document/DocumentType.java ++++ b/document/src/main/java/com/yahoo/document/DocumentType.java +@@ -499,4 +499,27 @@ public class DocumentType extends StructuredDataType { + visitor.visit("bodytype", bodyType); + visitor.visit("inherits", inherits); + } ++ ++ public void validateId(int newId, int version, Field field) { ++ if (newId >= 100 && newId <= 127) { ++ throw new IllegalArgumentException("Attempt to set the id of " + field + " to " + newId + ++ " failed, values from 100 to 127 " + "are reserved for internal use"); ++ } ++ ++ if ((newId & 0x80000000) != 0) // Highest bit must not be set ++ { ++ throw new IllegalArgumentException("Attempt to set the id of " + field + " to " + newId + ++ " failed, negative id values " + " are illegal"); ++ } ++ ++ ++ if (this == null) return; ++ { ++ Field existing = getField(newId, version); ++ if (existing != null && !existing.getName().equals(field.getName())) { ++ throw new IllegalArgumentException("Couldn't set id of " + field + " to " + newId + ", " + existing + ++ " already has this id in " + this); ++ } ++ } ++ } + } +diff --git a/document/src/main/java/com/yahoo/document/DocumentTypeManager.java b/document/src/main/java/com/yahoo/document/DocumentTypeManager.java +old mode 100644 +new mode 100755 +index a3ba27b640..a63b2ea978 +--- a/document/src/main/java/com/yahoo/document/DocumentTypeManager.java ++++ b/document/src/main/java/com/yahoo/document/DocumentTypeManager.java +@@ -8,6 +8,9 @@ import com.yahoo.document.annotation.AnnotationType; + import com.yahoo.document.annotation.AnnotationTypeRegistry; + import com.yahoo.document.annotation.AnnotationTypes; + import com.yahoo.document.config.DocumentmanagerConfig; ++import com.yahoo.document.fieldset.FieldCollection; ++import com.yahoo.document.fieldset.FieldSet; ++import com.yahoo.document.fieldset.FieldSetRepo; + import com.yahoo.document.serialization.DocumentDeserializer; + import com.yahoo.document.serialization.DocumentDeserializerFactory; + import com.yahoo.io.GrowableByteBuffer; +@@ -396,4 +399,25 @@ public class DocumentTypeManager { + public void shutdown() { + if (subscriber!=null) subscriber.close(); + } ++ ++ public FieldSet parseFieldCollection(String docType, String fieldNames, FieldSetRepo fieldSetRepo) { ++ DocumentType type = getDocumentType(docType); ++ if (type == null) { ++ throw new IllegalArgumentException("Unknown document type " + docType); ++ } ++ ++ StringTokenizer tokenizer = new StringTokenizer(fieldNames, ","); ++ FieldCollection collection = new FieldCollection(type); ++ ++ for (; tokenizer.hasMoreTokens(); ) { ++ String token = tokenizer.nextToken(); ++ Field f = type.getField(token); ++ if (f == null) { ++ throw new IllegalArgumentException("No such field " + token); ++ } ++ collection.add(f); ++ } ++ ++ return collection; ++ } + } +diff --git a/document/src/main/java/com/yahoo/document/DocumentUpdate.java b/document/src/main/java/com/yahoo/document/DocumentUpdate.java +old mode 100644 +new mode 100755 +index ef075662ee..0b70fc5ef8 +--- a/document/src/main/java/com/yahoo/document/DocumentUpdate.java ++++ b/document/src/main/java/com/yahoo/document/DocumentUpdate.java +@@ -98,12 +98,6 @@ public class DocumentUpdate extends DocumentOperation implements Iterable> iter = id2FieldUpdates.entrySet().iterator(); iter.hasNext();) { + Map.Entry entry = iter.next(); +diff --git a/document/src/main/java/com/yahoo/document/Field.java b/document/src/main/java/com/yahoo/document/Field.java +old mode 100644 +new mode 100755 +index d0a19a5007..1213609f76 +--- a/document/src/main/java/com/yahoo/document/Field.java ++++ b/document/src/main/java/com/yahoo/document/Field.java +@@ -40,7 +40,7 @@ public class Field extends FieldBase implements FieldSet, Comparable, Serializab + this.dataType = dataType; + this.isHeader = isHeader; + this.forcedId = true; +- validateId(id, null, Document.SERIALIZED_VERSION); ++ null.validateId(id, Document.SERIALIZED_VERSION, this); + } + + public Field(String name) { +@@ -106,7 +106,7 @@ public class Field extends FieldBase implements FieldSet, Comparable, Serializab + private int calculateIdV6(DocumentType owner) { + int newId = BobHash.hash(getName()); // Using a portfriendly hash + if (newId < 0) newId = -newId; // Highest bit is reserved to tell 7-bit id's from 31-bit ones +- validateId(newId, owner, 6); ++ owner.validateId(newId, 6, this); + return newId; + } + +@@ -127,7 +127,7 @@ public class Field extends FieldBase implements FieldSet, Comparable, Serializab + + int newId = BobHash.hash(combined); // Using a portfriendly hash + if (newId < 0) newId = -newId; // Highest bit is reserved to tell 7-bit id's from 31-bit ones +- validateId(newId, owner, Document.SERIALIZED_VERSION); ++ owner.validateId(newId, Document.SERIALIZED_VERSION, this); + return newId; + } + +@@ -144,7 +144,7 @@ public class Field extends FieldBase implements FieldSet, Comparable, Serializab + throw new NullPointerException("Can not assign an id of " + this + " without knowing the owner"); + } + +- validateId(newId, owner, Document.SERIALIZED_VERSION); ++ owner.validateId(newId, Document.SERIALIZED_VERSION, this); + + owner.removeField(getName()); + this.fieldId = newId; +@@ -153,29 +153,6 @@ public class Field extends FieldBase implements FieldSet, Comparable, Serializab + owner.addField(this); + } + +- private void validateId(int newId, DocumentType owner, int version) { +- if (newId >= 100 && newId <= 127) { +- throw new IllegalArgumentException("Attempt to set the id of " + this + " to " + newId + +- " failed, values from 100 to 127 " + "are reserved for internal use"); +- } +- +- if ((newId & 0x80000000) != 0) // Highest bit must not be set +- { +- throw new IllegalArgumentException("Attempt to set the id of " + this + " to " + newId + +- " failed, negative id values " + " are illegal"); +- } +- +- +- if (owner == null) return; +- { +- Field existing = owner.getField(newId, version); +- if (existing != null && !existing.getName().equals(getName())) { +- throw new IllegalArgumentException("Couldn't set id of " + this + " to " + newId + ", " + existing + +- " already has this id in " + owner); +- } +- } +- } +- + /** Returns the datatype of the field */ + public final DataType getDataType() { + return dataType; +diff --git a/document/src/main/java/com/yahoo/document/annotation/AlternateSpanList.java b/document/src/main/java/com/yahoo/document/annotation/AlternateSpanList.java +old mode 100644 +new mode 100755 +index bfac5c2c62..31cc29d69f +--- a/document/src/main/java/com/yahoo/document/annotation/AlternateSpanList.java ++++ b/document/src/main/java/com/yahoo/document/annotation/AlternateSpanList.java +@@ -566,7 +566,7 @@ public class AlternateSpanList extends SpanList { + * @param node span node + */ + public AlternateSpanList add(int i, SpanNode node) { +- checkValidity(node, children(i)); ++ node.checkValidity(children(i), AlternateSpanList.this); + node.setParent(this); + children(i).add(node); + return this; +diff --git a/document/src/main/java/com/yahoo/document/annotation/Annotation.java b/document/src/main/java/com/yahoo/document/annotation/Annotation.java +old mode 100644 +new mode 100755 +index 4b9452f340..4e8f90856e +--- a/document/src/main/java/com/yahoo/document/annotation/Annotation.java ++++ b/document/src/main/java/com/yahoo/document/annotation/Annotation.java +@@ -256,5 +256,20 @@ public class Annotation implements Comparable { + + return comp; + } ++ ++ /** ++ * Adds an Annotation to the internal list of annotations for this SpanTree. Use this when ++ * adding an Annotation that shall annotate a SpanNode. Upon return, Annotation.getSpanNode() ++ * returns the given node. ++ * ++ * @param node the node to annotate ++ * @param spanTree ++ * @return this, for chaining ++ * @see Annotation ++ */ ++ public SpanTree annotate(SpanNode node, SpanTree spanTree) { ++ setSpanNode(node); ++ return spanTree.annotate(this); ++ } + } + +diff --git a/document/src/main/java/com/yahoo/document/annotation/SpanList.java b/document/src/main/java/com/yahoo/document/annotation/SpanList.java +old mode 100644 +new mode 100755 +index 5afe4678c5..65e9733b25 +--- a/document/src/main/java/com/yahoo/document/annotation/SpanList.java ++++ b/document/src/main/java/com/yahoo/document/annotation/SpanList.java +@@ -56,19 +56,6 @@ public class SpanList extends SpanNode { + } + } + +- void checkValidity(SpanNode node, List childrenToCheck) { +- if (!node.isValid()) { +- throw new IllegalStateException("Cannot reuse SpanNode instance " + node + ", is INVALID."); +- } +- if (node.getParent() != null) { +- if (node.getParent() != this) { +- throw new IllegalStateException(node + " is already a child of " + node.getParent() + ", cannot be added to " + this); +- } else if (node.getParent() == this && childrenToCheck.contains(node)) { +- throw new IllegalStateException(node + " is already a child of " + this + ", cannot be added twice to the same parent node."); +- } +- } +- } +- + /** + * Adds a child node to this SpanList. + * +@@ -77,7 +64,7 @@ public class SpanList extends SpanNode { + * @throws IllegalStateException if SpanNode.isValid() returns false. + */ + public SpanList add(SpanNode node) { +- checkValidity(node, children()); ++ node.checkValidity(children(), this); + node.setParent(this); + resetCachedFromAndTo(); + children().add(node); +diff --git a/document/src/main/java/com/yahoo/document/annotation/SpanNode.java b/document/src/main/java/com/yahoo/document/annotation/SpanNode.java +old mode 100644 +new mode 100755 +index e1a7b11d2e..24a714262f +--- a/document/src/main/java/com/yahoo/document/annotation/SpanNode.java ++++ b/document/src/main/java/com/yahoo/document/annotation/SpanNode.java +@@ -84,14 +84,14 @@ public abstract class SpanNode implements Comparable, SpanNodeParent { + + /** + * Convenience method for adding an annotation to this span, same as +- * getSpanTree().{@link SpanTree#annotate(SpanNode,Annotation) spanTree.annotate(this,annotation)} ++ * getSpanTree().{@link Annotation#annotate(SpanNode, SpanTree) spanTree.annotate(this,annotation)} + * + * @param annotation the annotation to add + * @return this for chaining + * @throws NullPointerException if this span is not attached to a tree + */ + public SpanNode annotate(Annotation annotation) { +- getNonNullSpanTree().annotate(this, annotation); ++ annotation.annotate(this, getNonNullSpanTree()); + return this; + } + +@@ -317,4 +317,17 @@ public abstract class SpanNode implements Comparable, SpanNodeParent { + //both from and to are equal + return 0; + } ++ ++ public void checkValidity(List childrenToCheck, SpanList spanList) { ++ if (!isValid()) { ++ throw new IllegalStateException("Cannot reuse SpanNode instance " + this + ", is INVALID."); ++ } ++ if (getParent() != null) { ++ if (getParent() != spanList) { ++ throw new IllegalStateException(this + " is already a child of " + getParent() + ", cannot be added to " + spanList); ++ } else if (getParent() == spanList && childrenToCheck.contains(this)) { ++ throw new IllegalStateException(this + " is already a child of " + spanList + ", cannot be added twice to the same parent node."); ++ } ++ } ++ } + } +diff --git a/document/src/main/java/com/yahoo/document/annotation/SpanTree.java b/document/src/main/java/com/yahoo/document/annotation/SpanTree.java +old mode 100644 +new mode 100755 +index 2206d95aa3..3b8ad316d1 +--- a/document/src/main/java/com/yahoo/document/annotation/SpanTree.java ++++ b/document/src/main/java/com/yahoo/document/annotation/SpanTree.java +@@ -437,21 +437,6 @@ public class SpanTree implements Iterable, SpanNodeParent, Comparabl + return this; + } + +- /** +- * Adds an Annotation to the internal list of annotations for this SpanTree. Use this when +- * adding an Annotation that shall annotate a SpanNode. Upon return, Annotation.getSpanNode() +- * returns the given node. +- * +- * @param node the node to annotate +- * @param annotation the Annotation to add +- * @return this, for chaining +- * @see com.yahoo.document.annotation.Annotation +- */ +- public SpanTree annotate(SpanNode node, Annotation annotation) { +- annotation.setSpanNode(node); +- return annotate(annotation); +- } +- + /** + * Adds an Annotation to the internal list of annotations for this SpanTree. Use this when + * adding an Annotation that shall annotate a SpanNode. Upon return, Annotation.getSpanNode() +@@ -479,7 +464,7 @@ public class SpanTree implements Iterable, SpanNodeParent, Comparabl + * @see com.yahoo.document.annotation.Annotation + */ + public SpanTree annotate(SpanNode node, AnnotationType type,FieldValue value) { +- return annotate(node, new Annotation(type, value)); ++ return new Annotation(type, value).annotate(node, this); + } + + /** +@@ -497,7 +482,7 @@ public class SpanTree implements Iterable, SpanNodeParent, Comparabl + */ + public SpanTree annotate(SpanNode node, AnnotationType type) { + Annotation a = new Annotation(type); +- return annotate(node, a); ++ return a.annotate(node, this); + } + + /** +diff --git a/document/src/main/java/com/yahoo/document/datatypes/Struct.java b/document/src/main/java/com/yahoo/document/datatypes/Struct.java +old mode 100644 +new mode 100755 +index fc75870bb9..3fa3051ca6 +--- a/document/src/main/java/com/yahoo/document/datatypes/Struct.java ++++ b/document/src/main/java/com/yahoo/document/datatypes/Struct.java +@@ -194,18 +194,6 @@ public class Struct extends StructuredFieldValue { + } + } + +- /** +- * Clears this and assigns from the given {@link StructuredFieldValue} +- */ +- public void assignFrom(StructuredFieldValue sfv) { +- clear(); +- Iterator> otherValues = sfv.iterator(); +- while (otherValues.hasNext()) { +- Map.Entry otherEntry = otherValues.next(); +- setFieldValue(otherEntry.getKey(), otherEntry.getValue()); +- } +- } +- + @Override + public boolean equals(Object o) { + if (this == o) return true; +diff --git a/document/src/main/java/com/yahoo/document/datatypes/StructuredFieldValue.java b/document/src/main/java/com/yahoo/document/datatypes/StructuredFieldValue.java +old mode 100644 +new mode 100755 +index b3ea93d846..8afd71cb99 +--- a/document/src/main/java/com/yahoo/document/datatypes/StructuredFieldValue.java ++++ b/document/src/main/java/com/yahoo/document/datatypes/StructuredFieldValue.java +@@ -232,4 +232,16 @@ public abstract class StructuredFieldValue extends CompositeFieldValue { + return false; + } + ++ /** ++ * Clears this and assigns from the given {@link StructuredFieldValue} ++ * @param struct ++ */ ++ public void assignFrom(Struct struct) { ++ struct.clear(); ++ Iterator> otherValues = iterator(); ++ while (otherValues.hasNext()) { ++ Map.Entry otherEntry = otherValues.next(); ++ setFieldValue(otherEntry.getKey(), otherEntry.getValue()); ++ } ++ } + } +diff --git a/document/src/main/java/com/yahoo/document/fieldpathupdate/FieldPathUpdate.java b/document/src/main/java/com/yahoo/document/fieldpathupdate/FieldPathUpdate.java +old mode 100644 +new mode 100755 +index d4144116a0..724eff4d90 +--- a/document/src/main/java/com/yahoo/document/fieldpathupdate/FieldPathUpdate.java ++++ b/document/src/main/java/com/yahoo/document/fieldpathupdate/FieldPathUpdate.java +@@ -19,6 +19,12 @@ import java.util.ListIterator; + */ + public abstract class FieldPathUpdate { + ++ public void write(VespaDocumentSerializer6 vespaDocumentSerializer6) { ++ vespaDocumentSerializer6.putByte(null, (byte) getUpdateType().getCode()); ++ vespaDocumentSerializer6.put(null, getOriginalFieldPath()); ++ vespaDocumentSerializer6.put(null, getOriginalWhereClause()); ++ } ++ + public enum Type { + ASSIGN(0), + REMOVE(1), +@@ -131,7 +137,7 @@ public abstract class FieldPathUpdate { + } + + public void serialize(VespaDocumentSerializer6 data) { +- data.write(this); ++ write(data); + } + + public static FieldPathUpdate create(Type type, DocumentType docType, DocumentUpdateReader reader) { +diff --git a/document/src/main/java/com/yahoo/document/fieldset/FieldSetRepo.java b/document/src/main/java/com/yahoo/document/fieldset/FieldSetRepo.java +old mode 100644 +new mode 100755 +index a703543990..078e2a4d29 +--- a/document/src/main/java/com/yahoo/document/fieldset/FieldSetRepo.java ++++ b/document/src/main/java/com/yahoo/document/fieldset/FieldSetRepo.java +@@ -2,7 +2,6 @@ + package com.yahoo.document.fieldset; + + import com.yahoo.document.Document; +-import com.yahoo.document.DocumentType; + import com.yahoo.document.DocumentTypeManager; + import com.yahoo.document.Field; + import com.yahoo.document.datatypes.FieldValue; +@@ -31,27 +30,6 @@ public class FieldSetRepo { + } + } + +- FieldSet parseFieldCollection(DocumentTypeManager docMan, String docType, String fieldNames) { +- DocumentType type = docMan.getDocumentType(docType); +- if (type == null) { +- throw new IllegalArgumentException("Unknown document type " + docType); +- } +- +- StringTokenizer tokenizer = new StringTokenizer(fieldNames, ","); +- FieldCollection collection = new FieldCollection(type); +- +- for (; tokenizer.hasMoreTokens(); ) { +- String token = tokenizer.nextToken(); +- Field f = type.getField(token); +- if (f == null) { +- throw new IllegalArgumentException("No such field " + token); +- } +- collection.add(f); +- } +- +- return collection; +- } +- + public FieldSet parse(DocumentTypeManager docMan, String fieldSet) { + if (fieldSet.length() == 0) { + throw new IllegalArgumentException("Illegal field set value \"\""); +@@ -71,7 +49,7 @@ public class FieldSetRepo { + String type = tokenizer.nextToken(); + String fields = tokenizer.nextToken(); + +- return parseFieldCollection(docMan, type, fields); ++ return docMan.parseFieldCollection(type, fields, this); + } + + @SuppressWarnings("deprecation") +diff --git a/document/src/main/java/com/yahoo/document/json/TokenBuffer.java b/document/src/main/java/com/yahoo/document/json/TokenBuffer.java +old mode 100644 +new mode 100755 +index e20845bfa5..f8a77654bf +--- a/document/src/main/java/com/yahoo/document/json/TokenBuffer.java ++++ b/document/src/main/java/com/yahoo/document/json/TokenBuffer.java +@@ -9,6 +9,10 @@ import java.util.Iterator; + import com.fasterxml.jackson.core.JsonParser; + import com.fasterxml.jackson.core.JsonToken; + import com.google.common.base.Preconditions; ++import com.yahoo.document.DocumentPut; ++import com.yahoo.document.json.readers.VespaJsonDocumentReader; ++ ++import static com.yahoo.document.json.readers.CompositeReader.populateComposite; + + /** + * Helper class to enable lookahead in the token stream. +@@ -17,6 +21,17 @@ import com.google.common.base.Preconditions; + */ + public class TokenBuffer { + ++ // Exposed for unit testing... ++ public void readPut(DocumentPut put, VespaJsonDocumentReader vespaJsonDocumentReader) { ++ try { ++ if (isEmpty()) // no "fields" map ++ throw new IllegalArgumentException(put + " is missing a 'fields' map"); ++ populateComposite(this, put.getDocument()); ++ } catch (JsonReaderException e) { ++ throw JsonReaderException.addDocId(e, put.getId()); ++ } ++ } ++ + public static final class Token { + public final JsonToken token; + public final String name; +diff --git a/document/src/main/java/com/yahoo/document/json/readers/VespaJsonDocumentReader.java b/document/src/main/java/com/yahoo/document/json/readers/VespaJsonDocumentReader.java +old mode 100644 +new mode 100755 +index e252e71407..afa8c916c9 +--- a/document/src/main/java/com/yahoo/document/json/readers/VespaJsonDocumentReader.java ++++ b/document/src/main/java/com/yahoo/document/json/readers/VespaJsonDocumentReader.java +@@ -22,7 +22,6 @@ import com.yahoo.document.update.FieldUpdate; + + import static com.yahoo.document.json.readers.AddRemoveCreator.createAdds; + import static com.yahoo.document.json.readers.AddRemoveCreator.createRemoves; +-import static com.yahoo.document.json.readers.CompositeReader.populateComposite; + import static com.yahoo.document.json.readers.JsonParserHelpers.expectObjectEnd; + import static com.yahoo.document.json.readers.JsonParserHelpers.expectObjectStart; + import static com.yahoo.document.json.readers.JsonParserHelpers.expectScalarValue; +@@ -50,7 +49,7 @@ public class VespaJsonDocumentReader { + switch (documentParseInfo.operationType) { + case PUT: + documentOperation = new DocumentPut(new Document(documentType, documentParseInfo.documentId)); +- readPut(documentParseInfo.fieldsBuffer, (DocumentPut) documentOperation); ++ documentParseInfo.fieldsBuffer.readPut((DocumentPut) documentOperation, this); + verifyEndState(documentParseInfo.fieldsBuffer, JsonToken.END_OBJECT); + break; + case REMOVE: +@@ -77,17 +76,6 @@ public class VespaJsonDocumentReader { + return documentOperation; + } + +- // Exposed for unit testing... +- public void readPut(TokenBuffer buffer, DocumentPut put) { +- try { +- if (buffer.isEmpty()) // no "fields" map +- throw new IllegalArgumentException(put + " is missing a 'fields' map"); +- populateComposite(buffer, put.getDocument()); +- } catch (JsonReaderException e) { +- throw JsonReaderException.addDocId(e, put.getId()); +- } +- } +- + // Exposed for unit testing... + public void readUpdate(TokenBuffer buffer, DocumentUpdate update) { + if (buffer.isEmpty()) +diff --git a/document/src/main/java/com/yahoo/document/select/OrderingSpecification.java b/document/src/main/java/com/yahoo/document/select/OrderingSpecification.java +old mode 100644 +new mode 100755 +index 97908d3190..65d2f9964d +--- a/document/src/main/java/com/yahoo/document/select/OrderingSpecification.java ++++ b/document/src/main/java/com/yahoo/document/select/OrderingSpecification.java +@@ -1,6 +1,8 @@ + // Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. + package com.yahoo.document.select; + ++import com.yahoo.document.select.rule.LogicNode; ++ + public class OrderingSpecification { + public static int ASCENDING = 0; + public static int DESCENDING = 1; +@@ -46,4 +48,16 @@ public class OrderingSpecification { + public String toString() { + return "O: " + order + " S:" + orderingStart + " W:" + widthBits + " D:" + divisionBits; + } ++ ++ public OrderingSpecification pickOrdering(OrderingSpecification b, boolean isAnd, LogicNode logicNode) { ++ if (getWidthBits() == b.getWidthBits() && getDivisionBits() == b.getDivisionBits() && getOrder() == b.getOrder()) { ++ if ((getOrder() == ASCENDING && isAnd) || ++ (getOrder() == DESCENDING && !isAnd)) { ++ return new OrderingSpecification(getOrder(), Math.max(getOrderingStart(), b.getOrderingStart()), b.getWidthBits(), getDivisionBits()); ++ } else { ++ return new OrderingSpecification(getOrder(), Math.min(getOrderingStart(), b.getOrderingStart()), b.getWidthBits(), getDivisionBits()); ++ } ++ } ++ return null; ++ } + } +diff --git a/document/src/main/java/com/yahoo/document/select/rule/ComparisonNode.java b/document/src/main/java/com/yahoo/document/select/rule/ComparisonNode.java +old mode 100644 +new mode 100755 +index 13a990566e..0d04d0245e +--- a/document/src/main/java/com/yahoo/document/select/rule/ComparisonNode.java ++++ b/document/src/main/java/com/yahoo/document/select/rule/ComparisonNode.java +@@ -96,41 +96,11 @@ public class ComparisonNode implements ExpressionNode { + return this; + } + +- public OrderingSpecification getOrdering(IdNode lhs, LiteralNode rhs, String operator, int order) { +- if (lhs.getWidthBits() == -1 || lhs.getDivisionBits() == -1 || !(rhs.getValue() instanceof Long)) { +- return null; +- } +- +- if (operator.equals("==") || operator.equals("=")) { +- return new OrderingSpecification(order, (Long)rhs.getValue(), lhs.getWidthBits(), lhs.getDivisionBits()); +- } +- +- if (order == OrderingSpecification.ASCENDING) { +- if ((operator.equals("<") || operator.equals("<="))) { +- return new OrderingSpecification(order, 0, lhs.getWidthBits(), lhs.getDivisionBits()); +- } +- if (operator.equals(">")) { +- return new OrderingSpecification(order, (Long)rhs.getValue() + 1, lhs.getWidthBits(), lhs.getDivisionBits()); +- } +- if (operator.equals(">=")) { +- return new OrderingSpecification(order, (Long)rhs.getValue(), lhs.getWidthBits(), lhs.getDivisionBits()); +- } +- } else { +- if (operator.equals("<")) { +- return new OrderingSpecification(order, (Long)rhs.getValue() - 1, lhs.getWidthBits(), lhs.getDivisionBits()); +- } +- if (operator.equals("<=")) { +- return new OrderingSpecification(order, (Long)rhs.getValue(), lhs.getWidthBits(), lhs.getDivisionBits()); +- } +- } +- return null; +- } +- + public OrderingSpecification getOrdering(int order) { + if (lhs instanceof IdNode && rhs instanceof LiteralNode) { +- return getOrdering((IdNode)lhs, (LiteralNode)rhs, operator, order); ++ return ((IdNode)lhs).getOrderingOther((LiteralNode)rhs, operator, order, this); + } else if (rhs instanceof IdNode && lhs instanceof LiteralNode) { +- return getOrdering((IdNode)rhs, (LiteralNode)rhs, operator, order); ++ return ((IdNode)rhs).getOrderingOther((LiteralNode)rhs, operator, order, this); + } + + return null; +diff --git a/document/src/main/java/com/yahoo/document/select/rule/DocumentNode.java b/document/src/main/java/com/yahoo/document/select/rule/DocumentNode.java +old mode 100644 +new mode 100755 +index c0907693da..bf3b530067 +--- a/document/src/main/java/com/yahoo/document/select/rule/DocumentNode.java ++++ b/document/src/main/java/com/yahoo/document/select/rule/DocumentNode.java +@@ -34,25 +34,7 @@ public class DocumentNode implements ExpressionNode { + + @Override + public Object evaluate(Context context) { +- return evaluate(context.getDocumentOperation()); +- } +- +- public Object evaluate(DocumentOperation op) { +- DocumentType doct; +- if (op instanceof DocumentPut) { +- doct = ((DocumentPut)op).getDocument().getDataType(); +- } else if (op instanceof DocumentUpdate) { +- doct = ((DocumentUpdate)op).getDocumentType(); +- } else if (op instanceof DocumentRemove) { +- DocumentRemove removeOp = (DocumentRemove)op; +- return (removeOp.getId().getDocType().equals(type) ? op : Boolean.FALSE); +- } else if (op instanceof DocumentGet) { +- DocumentGet getOp = (DocumentGet)op; +- return (getOp.getId().getDocType().equals(type) ? op : Boolean.FALSE); +- } else { +- throw new IllegalStateException("Document class '" + op.getClass().getName() + "' is not supported."); +- } +- return doct.isA(this.type) ? op : Boolean.FALSE; ++ return context.getDocumentOperation().evaluate(this); + } + + public void accept(Visitor visitor) { +diff --git a/document/src/main/java/com/yahoo/document/select/rule/IdNode.java b/document/src/main/java/com/yahoo/document/select/rule/IdNode.java +old mode 100644 +new mode 100755 +index 3c15a2866c..1ae3100f9f +--- a/document/src/main/java/com/yahoo/document/select/rule/IdNode.java ++++ b/document/src/main/java/com/yahoo/document/select/rule/IdNode.java +@@ -105,4 +105,34 @@ public class IdNode implements ExpressionNode { + public String toString() { + return "id" + (field != null ? "." + field : "") + (widthBits != -1 ? "(" + widthBits + "," + divisionBits + ")" : ""); + } ++ ++ public OrderingSpecification getOrderingOther(LiteralNode rhs, String operator, int order, ComparisonNode comparisonNode) { ++ if (getWidthBits() == -1 || getDivisionBits() == -1 || !(rhs.getValue() instanceof Long)) { ++ return null; ++ } ++ ++ if (operator.equals("==") || operator.equals("=")) { ++ return new OrderingSpecification(order, (Long)rhs.getValue(), getWidthBits(), getDivisionBits()); ++ } ++ ++ if (order == OrderingSpecification.ASCENDING) { ++ if ((operator.equals("<") || operator.equals("<="))) { ++ return new OrderingSpecification(order, 0, getWidthBits(), getDivisionBits()); ++ } ++ if (operator.equals(">")) { ++ return new OrderingSpecification(order, (Long)rhs.getValue() + 1, getWidthBits(), getDivisionBits()); ++ } ++ if (operator.equals(">=")) { ++ return new OrderingSpecification(order, (Long)rhs.getValue(), getWidthBits(), getDivisionBits()); ++ } ++ } else { ++ if (operator.equals("<")) { ++ return new OrderingSpecification(order, (Long)rhs.getValue() - 1, getWidthBits(), getDivisionBits()); ++ } ++ if (operator.equals("<=")) { ++ return new OrderingSpecification(order, (Long)rhs.getValue(), getWidthBits(), getDivisionBits()); ++ } ++ } ++ return null; ++ } + } +diff --git a/document/src/main/java/com/yahoo/document/select/rule/LogicNode.java b/document/src/main/java/com/yahoo/document/select/rule/LogicNode.java +old mode 100644 +new mode 100755 +index a7b112fac7..bd00fd5a2d +--- a/document/src/main/java/com/yahoo/document/select/rule/LogicNode.java ++++ b/document/src/main/java/com/yahoo/document/select/rule/LogicNode.java +@@ -88,18 +88,6 @@ public class LogicNode implements ExpressionNode { + return buf.pop().ordering; + } + +- private OrderingSpecification pickOrdering(OrderingSpecification a, OrderingSpecification b, boolean isAnd) { +- if (a.getWidthBits() == b.getWidthBits() && a.getDivisionBits() == b.getDivisionBits() && a.getOrder() == b.getOrder()) { +- if ((a.getOrder() == OrderingSpecification.ASCENDING && isAnd) || +- (a.getOrder() == OrderingSpecification.DESCENDING && !isAnd)) { +- return new OrderingSpecification(a.getOrder(), Math.max(a.getOrderingStart(), b.getOrderingStart()), b.getWidthBits(), a.getDivisionBits()); +- } else { +- return new OrderingSpecification(a.getOrder(), Math.min(a.getOrderingStart(), b.getOrderingStart()), b.getWidthBits(), a.getDivisionBits()); +- } +- } +- return null; +- } +- + private void pickOrdering(Stack buf) { + OrderingItem rhs = buf.pop(); + OrderingItem lhs = buf.pop(); +@@ -110,12 +98,12 @@ public class LogicNode implements ExpressionNode { + } else if (rhs.ordering == null) { + // empty + } else { +- lhs.ordering = pickOrdering(lhs.ordering, rhs.ordering, true); ++ lhs.ordering = lhs.ordering.pickOrdering(rhs.ordering, true, this); + } + break; + case OR: + if (lhs.ordering != null && rhs.ordering != null) { +- lhs.ordering = pickOrdering(lhs.ordering, rhs.ordering, false); ++ lhs.ordering = lhs.ordering.pickOrdering(rhs.ordering, false, this); + } else { + lhs.ordering = null; + } +diff --git a/document/src/main/java/com/yahoo/document/serialization/VespaDocumentDeserializer42.java b/document/src/main/java/com/yahoo/document/serialization/VespaDocumentDeserializer42.java +old mode 100644 +new mode 100755 +index 7ec4433a24..43edebb968 +--- a/document/src/main/java/com/yahoo/document/serialization/VespaDocumentDeserializer42.java ++++ b/document/src/main/java/com/yahoo/document/serialization/VespaDocumentDeserializer42.java +@@ -50,14 +50,7 @@ import com.yahoo.document.fieldpathupdate.FieldPathUpdate; + import com.yahoo.document.fieldpathupdate.RemoveFieldPathUpdate; + import com.yahoo.document.predicate.BinaryFormat; + import com.yahoo.document.select.parser.ParseException; +-import com.yahoo.document.update.AddValueUpdate; +-import com.yahoo.document.update.ArithmeticValueUpdate; +-import com.yahoo.document.update.AssignValueUpdate; +-import com.yahoo.document.update.ClearValueUpdate; + import com.yahoo.document.update.FieldUpdate; +-import com.yahoo.document.update.MapValueUpdate; +-import com.yahoo.document.update.RemoveValueUpdate; +-import com.yahoo.document.update.ValueUpdate; + import com.yahoo.io.GrowableByteBuffer; + import com.yahoo.tensor.serialization.TypedBinaryFormat; + import com.yahoo.text.Utf8; +@@ -616,65 +609,6 @@ public class VespaDocumentDeserializer42 extends BufferSerializer implements Doc + update.setNewValues((Array)fv); + } + +- public ValueUpdate getValueUpdate(DataType superType, DataType subType) { +- int vuTypeId = getInt(null); +- +- ValueUpdate.ValueUpdateClassID op = ValueUpdate.ValueUpdateClassID.getID(vuTypeId); +- if (op == null) { +- throw new IllegalArgumentException("Read type "+vuTypeId+" of bytebuffer, but this is not a legal value update type."); +- } +- +- switch (op) { +- case ADD: +- { +- FieldValue fval = subType.createFieldValue(); +- fval.deserialize(this); +- int weight = getInt(null); +- return new AddValueUpdate(fval, weight); +- } +- case ARITHMETIC: +- int opId = getInt(null); +- ArithmeticValueUpdate.Operator operator = ArithmeticValueUpdate.Operator.getID(opId); +- double operand = getDouble(null); +- return new ArithmeticValueUpdate(operator, operand); +- case ASSIGN: +- { +- byte contents = getByte(null); +- FieldValue fval = null; +- if (contents == (byte) 1) { +- fval = superType.createFieldValue(); +- fval.deserialize(this); +- } +- return new AssignValueUpdate(fval); +- } +- case CLEAR: +- return new ClearValueUpdate(); +- case MAP: +- if (superType instanceof ArrayDataType) { +- CollectionDataType type = (CollectionDataType) superType; +- IntegerFieldValue index = new IntegerFieldValue(); +- index.deserialize(this); +- ValueUpdate update = getValueUpdate(type.getNestedType(), null); +- return new MapValueUpdate(index, update); +- } else if (superType instanceof WeightedSetDataType) { +- CollectionDataType type = (CollectionDataType) superType; +- FieldValue fval = type.getNestedType().createFieldValue(); +- fval.deserialize(this); +- ValueUpdate update = getValueUpdate(DataType.INT, null); +- return new MapValueUpdate(fval, update); +- } else { +- throw new DeserializationException("MapValueUpdate only works for arrays and weighted sets"); +- } +- case REMOVE: +- FieldValue fval = ((CollectionDataType) superType).getNestedType().createFieldValue(); +- fval.deserialize(this); +- return new RemoveValueUpdate(fval); +- default: +- throw new DeserializationException( +- "Could not deserialize ValueUpdate, unknown valueUpdateClassID type " + vuTypeId); +- } +- } +- + public void read(FieldUpdate fieldUpdate) { + int fieldId = getInt(null); + Field field = fieldUpdate.getDocumentType().getField(fieldId, fieldUpdate.getSerializationVersion()); +@@ -689,9 +623,9 @@ public class VespaDocumentDeserializer42 extends BufferSerializer implements Doc + for (int i = 0; i < size; i++) { + if (field.getDataType() instanceof CollectionDataType) { + CollectionDataType collType = (CollectionDataType) field.getDataType(); +- fieldUpdate.addValueUpdate(getValueUpdate(collType, collType.getNestedType())); ++ fieldUpdate.addValueUpdate(collType.getValueUpdate(collType.getNestedType(), this)); + } else { +- fieldUpdate.addValueUpdate(getValueUpdate(field.getDataType(), null)); ++ fieldUpdate.addValueUpdate(field.getDataType().getValueUpdate(null, this)); + } + } + } +diff --git a/document/src/main/java/com/yahoo/document/serialization/VespaDocumentSerializer6.java b/document/src/main/java/com/yahoo/document/serialization/VespaDocumentSerializer6.java +old mode 100644 +new mode 100755 +index 8f033c8463..99424fda8d +--- a/document/src/main/java/com/yahoo/document/serialization/VespaDocumentSerializer6.java ++++ b/document/src/main/java/com/yahoo/document/serialization/VespaDocumentSerializer6.java +@@ -595,14 +595,8 @@ public class VespaDocumentSerializer6 extends BufferSerializer implements Docume + } + } + +- public void write(FieldPathUpdate update) { +- putByte(null, (byte)update.getUpdateType().getCode()); +- put(null, update.getOriginalFieldPath()); +- put(null, update.getOriginalWhereClause()); +- } +- + public void write(AssignFieldPathUpdate update) { +- write((FieldPathUpdate)update); ++ ((FieldPathUpdate)update).write(this); + byte flags = 0; + if (update.getRemoveIfZero()) { + flags |= AssignFieldPathUpdate.REMOVE_IF_ZERO; +@@ -621,7 +615,7 @@ public class VespaDocumentSerializer6 extends BufferSerializer implements Docume + } + + public void write(AddFieldPathUpdate update) { +- write((FieldPathUpdate)update); ++ ((FieldPathUpdate)update).write(this); + update.getNewValues().serialize(this); + } + +diff --git a/document/src/test/java/com/yahoo/document/DocumentTypeManagerTestCase.java b/document/src/test/java/com/yahoo/document/DocumentTypeManagerTestCase.java +old mode 100644 +new mode 100755 +index 65c217e09e..c9410458e0 +--- a/document/src/test/java/com/yahoo/document/DocumentTypeManagerTestCase.java ++++ b/document/src/test/java/com/yahoo/document/DocumentTypeManagerTestCase.java +@@ -449,7 +449,7 @@ search annotationsimplicitstruct { + value.setFieldValue("x", 10); + SpanNode span = new Span(0, text.length()); + SpanTree tree = new SpanTree("span", span); +- tree.annotate(span, new Annotation(type, value)); ++ new Annotation(type, value).annotate(span, tree); + sfv.setSpanTree(tree); + } + +@@ -461,7 +461,7 @@ search annotationsimplicitstruct { + value.setFieldValue("x", 10); + SpanNode span = new Span(0, text.length()); + SpanTree tree = new SpanTree("span", span); +- tree.annotate(span, new Annotation(type, value)); ++ new Annotation(type, value).annotate(span, tree); + sfv.setSpanTree(tree); + } + } +diff --git a/document/src/test/java/com/yahoo/document/annotation/AbstractTypesTest.java b/document/src/test/java/com/yahoo/document/annotation/AbstractTypesTest.java +index 5899256437..14e73b966c 100755 +--- a/document/src/test/java/com/yahoo/document/annotation/AbstractTypesTest.java ++++ b/document/src/test/java/com/yahoo/document/annotation/AbstractTypesTest.java +@@ -90,23 +90,23 @@ public abstract class AbstractTypesTest { + alternateSpanList.add(s4); + + tree.annotate(s2, dummy); +- tree.annotate(s2, new Annotation(number, new IntegerFieldValue(1234))); ++ new Annotation(number, new IntegerFieldValue(1234)).annotate(s2, tree); + + Struct mother = new Struct(person); + mother.setFieldValue("firstname", "jenny"); + mother.setFieldValue("lastname", "olsen"); + mother.setFieldValue("birthyear", 1909); + Annotation motherA = new Annotation(personA, mother); +- tree.annotate(s2, motherA); ++ motherA.annotate(s2, tree); + + Struct daughter = new Struct(relative); + daughter.setFieldValue("title", "daughter"); + daughter.setFieldValue("related", new AnnotationReference(personReference, motherA)); +- tree.annotate(s6, new Annotation(relativeA, daughter)); ++ new Annotation(relativeA, daughter).annotate(s6, tree); + + tree.annotate(s1, dummy); + tree.annotate(s3, dummy); +- tree.annotate(s3, new Annotation(number, new IntegerFieldValue(2344))); ++ new Annotation(number, new IntegerFieldValue(2344)).annotate(s3, tree); + tree.annotate(s5, dummy); + + List alternateChildren = new ArrayList<>(); +diff --git a/document/src/test/java/com/yahoo/document/annotation/AlternateSpanListAdvTestCase.java b/document/src/test/java/com/yahoo/document/annotation/AlternateSpanListAdvTestCase.java +old mode 100644 +new mode 100755 +index 146f458b6e..0066d76c77 +--- a/document/src/test/java/com/yahoo/document/annotation/AlternateSpanListAdvTestCase.java ++++ b/document/src/test/java/com/yahoo/document/annotation/AlternateSpanListAdvTestCase.java +@@ -82,13 +82,13 @@ public class AlternateSpanListAdvTestCase { + alternate2.add(span22); + alternate2.add(span33); + +- tree.annotate(span1, an1); +- tree.annotate(span2, an2); +- tree.annotate(span3, an3); ++ an1.annotate(span1, tree); ++ an2.annotate(span2, tree); ++ an3.annotate(span3, tree); + +- tree.annotate(span11, an11); +- tree.annotate(span22, an22); +- tree.annotate(span33, an33); ++ an11.annotate(span11, tree); ++ an22.annotate(span22, tree); ++ an33.annotate(span33, tree); + + subtreeList1 = new ArrayList(); + subtreeList1.add(alternate1); +@@ -110,7 +110,7 @@ public class AlternateSpanListAdvTestCase { + @Test (expected = IllegalStateException.class) + public void assertSharingAnnotationInstance() { + SpanNode testNode = new Span(0, 2); +- tree.annotate(testNode, an1); ++ an1.annotate(testNode, tree); + } + + @Test (expected = IllegalStateException.class) +diff --git a/document/src/test/java/com/yahoo/document/annotation/AnnotationTestCase.java b/document/src/test/java/com/yahoo/document/annotation/AnnotationTestCase.java +old mode 100644 +new mode 100755 +index cb04ca1de6..c95d7f117f +--- a/document/src/test/java/com/yahoo/document/annotation/AnnotationTestCase.java ++++ b/document/src/test/java/com/yahoo/document/annotation/AnnotationTestCase.java +@@ -86,8 +86,8 @@ public class AnnotationTestCase extends AbstractTypesTest { + SpanList root = new SpanList(); + tree = new SpanTree("SpanTree1", root); + SpanNode node = new Span(0, 3); +- tree.annotate(node, new Annotation(type1, new StringFieldValue("text"))); +- tree.annotate(node, new Annotation(type2, new IntegerFieldValue(1))); ++ new Annotation(type1, new StringFieldValue("text")).annotate(node, tree); ++ new Annotation(type2, new IntegerFieldValue(1)).annotate(node, tree); + root.add(node); + output.setSpanTree(tree); + } +diff --git a/document/src/test/java/com/yahoo/document/annotation/Bug4155865TestCase.java b/document/src/test/java/com/yahoo/document/annotation/Bug4155865TestCase.java +old mode 100644 +new mode 100755 +index 8c885a33d8..f34d2207f0 +--- a/document/src/test/java/com/yahoo/document/annotation/Bug4155865TestCase.java ++++ b/document/src/test/java/com/yahoo/document/annotation/Bug4155865TestCase.java +@@ -89,8 +89,8 @@ public class Bug4155865TestCase { + + SpanNode spn2 = new Span(1, 4); + SpanNode spn3 = new Span(6, 10); +- tree.annotate(spn2, an111); +- tree.annotate(spn3, an222); ++ an111.annotate(spn2, tree); ++ an222.annotate(spn3, tree); + + List stList = new ArrayList(); + stList.add(spn2); +@@ -117,13 +117,13 @@ public class Bug4155865TestCase { + alternate1.add(span22); + alternate1.add(span33); + +- tree.annotate(span1, an1); +- tree.annotate(span2, an2); +- tree.annotate(span3, an3); ++ an1.annotate(span1, tree); ++ an2.annotate(span2, tree); ++ an3.annotate(span3, tree); + +- tree.annotate(span11, an11); +- tree.annotate(span22, an22); +- tree.annotate(span33, an33); ++ an11.annotate(span11, tree); ++ an22.annotate(span22, tree); ++ an33.annotate(span33, tree); + + List subtreeList1 = new ArrayList(); + subtreeList1.add(alternate1); +diff --git a/document/src/test/java/com/yahoo/document/annotation/Bug4259784TestCase.java b/document/src/test/java/com/yahoo/document/annotation/Bug4259784TestCase.java +old mode 100644 +new mode 100755 +index 0a7739fd8c..2784124546 +--- a/document/src/test/java/com/yahoo/document/annotation/Bug4259784TestCase.java ++++ b/document/src/test/java/com/yahoo/document/annotation/Bug4259784TestCase.java +@@ -72,35 +72,35 @@ public class Bug4259784TestCase { + Struct personValue = (Struct) person.getDataType().createFieldValue(); + personValue.setFieldValue("name", "Richard Bair"); + Annotation personAn = new Annotation(person, personValue); +- tree.annotate(span1, personAn); ++ personAn.annotate(span1, tree); + + Struct companyValue = (Struct) company.getDataType().createFieldValue(); + companyValue.setFieldValue("name", "Sun"); + Annotation compAn = new Annotation(company, companyValue); +- tree.annotate(span2, compAn); ++ compAn.annotate(span2, tree); + + Struct locationVal = new Struct(manager.getDataType("annotation.location")); + locationVal.setFieldValue("lat", 37.774929); + locationVal.setFieldValue("lon", -122.419415); + Annotation locAnnotation = new Annotation(location, locationVal); +- tree.annotate(span3, locAnnotation); ++ locAnnotation.annotate(span3, tree); + + + Struct dirValue1 = new Struct(manager.getDataType("annotation.person")); + dirValue1.setFieldValue("name", "Jonathan Schwartz"); + Annotation dirAnnotation1 = new Annotation(person, dirValue1); +- tree.annotate(span5, dirAnnotation1); ++ dirAnnotation1.annotate(span5, tree); + + Struct dirValue2 = new Struct(manager.getDataType("annotation.person")); + dirValue2.setFieldValue("name", "Scott Mcnealy"); + Annotation dirAnnotation2 = new Annotation(person, dirValue2); +- tree.annotate(span6, dirAnnotation2); ++ dirAnnotation2.annotate(span6, tree); + + + Struct indValue = new Struct(manager.getDataType("annotation.industry")); + indValue.setFieldValue("vertical", "Manufacturing"); + Annotation indAn = new Annotation(industry, indValue); +- tree.annotate(span4, indAn); ++ indAn.annotate(span4, tree); + + + Field compLocField = ((StructDataType) company.getDataType()).getField("place"); +diff --git a/document/src/test/java/com/yahoo/document/annotation/Bug4261985TestCase.java b/document/src/test/java/com/yahoo/document/annotation/Bug4261985TestCase.java +old mode 100644 +new mode 100755 +index a3b33d4b91..ce686c2980 +--- a/document/src/test/java/com/yahoo/document/annotation/Bug4261985TestCase.java ++++ b/document/src/test/java/com/yahoo/document/annotation/Bug4261985TestCase.java +@@ -83,7 +83,7 @@ public class Bug4261985TestCase { + Struct personValue = (Struct) person.getDataType().createFieldValue(); + personValue.setFieldValue("name", "Richard Bair"); + Annotation personAn = new Annotation(person, personValue); +- tree.annotate(span1, personAn); ++ personAn.annotate(span1, tree); + + Struct companyValue = (Struct) company.getDataType().createFieldValue(); + companyValue.setFieldValue("name", "Sun"); +@@ -117,7 +117,7 @@ public class Bug4261985TestCase { + dirFieldVal.add(new AnnotationReference(annRefType, dirAnnotation2)); + companyValue.setFieldValue(dirField, dirFieldVal); + Annotation compAn = new Annotation(company, companyValue); +- tree.annotate(span2, compAn); ++ compAn.annotate(span2, tree); + + Struct bigshotsValue = (Struct) bigshots.getDataType().createFieldValue(); + Field ceosField = ((StructDataType) bigshots.getDataType()).getField("ceos"); +@@ -127,23 +127,23 @@ public class Bug4261985TestCase { + bigshotsValue.setFieldValue(ceosField, ceosFieldVal); + + Annotation bigshotsAn = new Annotation(bigshots, bigshotsValue); +- tree.annotate(span8, bigshotsAn); ++ bigshotsAn.annotate(span8, tree); + + Field selfField = ((StructDataType) bigshots.getDataType()).getField("self"); + AnnotationReferenceDataType annType2 = (AnnotationReferenceDataType) selfField.getDataType(); + FieldValue selfFieldVal = new AnnotationReference(annType2, bigshotsAn); + bigshotsValue.setFieldValue(selfField, selfFieldVal); + bigshotsAn = new Annotation(bigshots, bigshotsValue); +- tree.annotate(span8, bigshotsAn); ++ bigshotsAn.annotate(span8, tree); + +- tree.annotate(span3, locAnnotation); +- tree.annotate(span5, dirAnnotation1); +- tree.annotate(span6, dirAnnotation2); ++ locAnnotation.annotate(span3, tree); ++ dirAnnotation1.annotate(span5, tree); ++ dirAnnotation2.annotate(span6, tree); + + Struct indValue = new Struct(manager.getDataType("annotation.industry")); + indValue.setFieldValue("vertical", "Manufacturing"); + Annotation indAn = new Annotation(industry, indValue); +- tree.annotate(span4, indAn); ++ indAn.annotate(span4, tree); + + StringFieldValue body = (StringFieldValue) document.getFieldValue(document.getDataType().getField("body")); + body.setSpanTree(tree); +diff --git a/document/src/test/java/com/yahoo/document/annotation/Bug4475379TestCase.java b/document/src/test/java/com/yahoo/document/annotation/Bug4475379TestCase.java +index 0291e23c88..a95b20fb2c 100755 +--- a/document/src/test/java/com/yahoo/document/annotation/Bug4475379TestCase.java ++++ b/document/src/test/java/com/yahoo/document/annotation/Bug4475379TestCase.java +@@ -125,15 +125,15 @@ public class Bug4475379TestCase { + alternate2.add(span22); + alternate2.add(span33); + +- tree.annotate(span1, compAn1); +- tree.annotate(span2, personAn1); +- tree.annotate(span3, locAn1); +- tree.annotate(span1, indAn1); +- +- tree.annotate(span11, compAn2); +- tree.annotate(span22, personAn2); +- tree.annotate(span33, locAn2); +- tree.annotate(span11, indAn2); ++ compAn1.annotate(span1, tree); ++ personAn1.annotate(span2, tree); ++ locAn1.annotate(span3, tree); ++ indAn1.annotate(span1, tree); ++ ++ compAn2.annotate(span11, tree); ++ personAn2.annotate(span22, tree); ++ locAn2.annotate(span33, tree); ++ indAn2.annotate(span11, tree); + + List subtreeList1 = new ArrayList<>(); + subtreeList1.add(alternate1); +diff --git a/document/src/test/java/com/yahoo/document/annotation/Bug6425939TestCase.java b/document/src/test/java/com/yahoo/document/annotation/Bug6425939TestCase.java +old mode 100644 +new mode 100755 +index 82b730ae4c..8cfec03369 +--- a/document/src/test/java/com/yahoo/document/annotation/Bug6425939TestCase.java ++++ b/document/src/test/java/com/yahoo/document/annotation/Bug6425939TestCase.java +@@ -59,7 +59,7 @@ public class Bug6425939TestCase { + Struct ps = new Struct(person); + ps.setFieldValue("foo", "epic badger"); + ps.setFieldValue("bar", 54321); +- tree.annotate(node, new Annotation(personA, ps)); ++ new Annotation(personA, ps).annotate(node, tree); + root.add(node); + return tree; + } +diff --git a/document/src/test/java/com/yahoo/document/annotation/DocTestCase.java b/document/src/test/java/com/yahoo/document/annotation/DocTestCase.java +old mode 100644 +new mode 100755 +index e117a51069..c985a1f36b +--- a/document/src/test/java/com/yahoo/document/annotation/DocTestCase.java ++++ b/document/src/test/java/com/yahoo/document/annotation/DocTestCase.java +@@ -201,7 +201,7 @@ public class DocTestCase { + Span span2 = new Span(55, 13); + textNode.add(span1) + .add(span2); +- tree.annotate(span2, city) ++ city.annotate(span2, tree) + .annotate(textNode, textType); + } + +@@ -280,9 +280,8 @@ public class DocTestCase { + .add(span2) + .add(span3) + .add(span4); +- tree.annotate(span1, beginTag) +- .annotate(span2, textType) +- .annotate(span3, sanAnnotation) ++ sanAnnotation.annotate(span3, tree.annotate(span1, beginTag) ++ .annotate(span2, textType)) + .annotate(span4, endTag) + .annotate(paragraph, paragraphType); + } +@@ -296,8 +295,7 @@ public class DocTestCase { + .add(span2) + .add(span3); + +- tree.annotate(span1, beginTag) +- .annotate(span2, franciscoAnnotation) ++ franciscoAnnotation.annotate(span2, tree.annotate(span1, beginTag)) + .annotate(span3, endTag) + .annotate(root, bodyType) + .annotate(city); +diff --git a/document/src/test/java/com/yahoo/document/annotation/SpanNodeAdvTestCase.java b/document/src/test/java/com/yahoo/document/annotation/SpanNodeAdvTestCase.java +old mode 100644 +new mode 100755 +index d181549a75..5a147bb8d3 +--- a/document/src/test/java/com/yahoo/document/annotation/SpanNodeAdvTestCase.java ++++ b/document/src/test/java/com/yahoo/document/annotation/SpanNodeAdvTestCase.java +@@ -65,7 +65,7 @@ public class SpanNodeAdvTestCase { + + tree.annotate(span1, at1).annotate(span2, at2).annotate(span3, at3); + tree.annotate(span11, at1).annotate(span22, at2).annotate(span33, at3); +- tree.annotate(span111, an111).annotate(span222, at2).annotate(span333, at3).annotate(span333, at1); ++ an111.annotate(span111, tree).annotate(span222, at2).annotate(span333, at3).annotate(span333, at1); + tree.annotate(span222, at2); + + root.add(span3); +diff --git a/document/src/test/java/com/yahoo/document/annotation/SpanNodeTestCase.java b/document/src/test/java/com/yahoo/document/annotation/SpanNodeTestCase.java +old mode 100644 +new mode 100755 +index 8e9978c845..03bed3bd88 +--- a/document/src/test/java/com/yahoo/document/annotation/SpanNodeTestCase.java ++++ b/document/src/test/java/com/yahoo/document/annotation/SpanNodeTestCase.java +@@ -290,11 +290,11 @@ public class SpanNodeTestCase { + Annotation detr = new Annotation(detType); + Annotation noun = new Annotation(nounType); + +- tree.annotate(span, word); +- tree.annotate(span, bgtg); +- tree.annotate(span, cpwd); +- tree.annotate(span, detr); +- tree.annotate(span, noun); ++ word.annotate(span, tree); ++ bgtg.annotate(span, tree); ++ cpwd.annotate(span, tree); ++ detr.annotate(span, tree); ++ noun.annotate(span, tree); + + + { +diff --git a/document/src/test/java/com/yahoo/document/annotation/SpanTreeAdvTest.java b/document/src/test/java/com/yahoo/document/annotation/SpanTreeAdvTest.java +old mode 100644 +new mode 100755 +index 852835ab8c..131c5be3e9 +--- a/document/src/test/java/com/yahoo/document/annotation/SpanTreeAdvTest.java ++++ b/document/src/test/java/com/yahoo/document/annotation/SpanTreeAdvTest.java +@@ -65,7 +65,7 @@ public class SpanTreeAdvTest { + + tree.annotate(span1, at1).annotate(span2, at2).annotate(span3, at3); + tree.annotate(span11, at1).annotate(span22, at2).annotate(span33, at3); +- tree.annotate(span111, an111).annotate(span222, at2).annotate(span333, at3).annotate(span333, at1); ++ an111.annotate(span111, tree).annotate(span222, at2).annotate(span333, at3).annotate(span333, at1); + tree.annotate(span222, at2); + + root.add(span3); +@@ -102,7 +102,7 @@ public class SpanTreeAdvTest { + @Test (expected = IllegalStateException.class) + public void assertSharingAnnotationInstance() { + populateSpanTree(); +- tree.annotate(span333, an111); ++ an111.annotate(span333, tree); + } + + +@@ -199,7 +199,7 @@ public class SpanTreeAdvTest { + public void assertReuseRemovedNode() { + populateSpanTree(); + root.remove(span3); +- tree.annotate(span3, new Annotation(at1)); ++ new Annotation(at1).annotate(span3, tree); + } + + @Test (expected = IllegalStateException.class) +diff --git a/document/src/test/java/com/yahoo/document/annotation/SpanTreeTestCase.java b/document/src/test/java/com/yahoo/document/annotation/SpanTreeTestCase.java +index b200e72514..e31a6c9c71 100755 +--- a/document/src/test/java/com/yahoo/document/annotation/SpanTreeTestCase.java ++++ b/document/src/test/java/com/yahoo/document/annotation/SpanTreeTestCase.java +@@ -71,7 +71,7 @@ public class SpanTreeTestCase extends AbstractTypesTest { + dummyD = new Annotation(dummy); + dummyE = new Annotation(dummy); + +- tree.annotate(a, dummyA) ++ dummyA.annotate(a, tree) + .annotate(b, numberB) + .annotate(c, bananaC) + .annotate(c, appleC) +@@ -378,7 +378,7 @@ public class SpanTreeTestCase extends AbstractTypesTest { + + + Annotation dummyAnnotationForB = new Annotation(dummy); +- tree.annotate(b, dummyAnnotationForB); ++ dummyAnnotationForB.annotate(b, tree); + + AnnotationReference referenceToDummyB = new AnnotationReference(refTypeToDummy, dummyAnnotationForB); + Annotation annotationWithRefToDummyB = new Annotation(annotationTypeWithRefToDummy, referenceToDummyB); +@@ -530,9 +530,9 @@ public class SpanTreeTestCase extends AbstractTypesTest { + string.setSpanTree(tree); + + Annotation a1 = new Annotation(grape); +- tree.annotate(span1, a1); ++ a1.annotate(span1, tree); + Annotation a2 = new Annotation(apple); +- tree.annotate(span2, a2); ++ a2.annotate(span2, tree); + + StringFieldValue stringCopy = string.clone(); + +@@ -593,16 +593,16 @@ public class SpanTreeTestCase extends AbstractTypesTest { + string.setSpanTree(tree); + + Annotation a1 = new Annotation(grape); +- tree.annotate(span1, a1); ++ a1.annotate(span1, tree); + Annotation a2 = new Annotation(apple); +- tree.annotate(span2, a2); ++ a2.annotate(span2, tree); + + Struct donald = new Struct(person); + donald.setFieldValue("firstname", "donald"); + donald.setFieldValue("lastname", "duck"); + donald.setFieldValue("birthyear", 1929); + Annotation donaldAnn = new Annotation(personA, donald); +- tree.annotate(list, donaldAnn); ++ donaldAnn.annotate(list, tree); + + + StringFieldValue stringCopy = string.clone(); +@@ -848,8 +848,8 @@ public class SpanTreeTestCase extends AbstractTypesTest { + root.add(endTagSpan); + + //annotate spans: +- tree.annotate(beginTagSpan, beginTag); +- tree.annotate(endTagSpan, endTag); ++ beginTag.annotate(beginTagSpan, tree); ++ endTag.annotate(endTagSpan, tree); + + + //none of the below statements should lead to a StackOverflowError: +diff --git a/document/src/test/java/com/yahoo/document/annotation/SystemTestCase.java b/document/src/test/java/com/yahoo/document/annotation/SystemTestCase.java +index 9163b773bc..15e44fb91c 100755 +--- a/document/src/test/java/com/yahoo/document/annotation/SystemTestCase.java ++++ b/document/src/test/java/com/yahoo/document/annotation/SystemTestCase.java +@@ -45,24 +45,24 @@ public class SystemTestCase { + Struct personValue = new Struct(manager.getDataType("annotation.person")); + personValue.setFieldValue("name", "george washington"); + Annotation person = new Annotation(personType, personValue); +- tree.annotate(personSpan, person); ++ person.annotate(personSpan, tree); + + Struct artistValue = new Struct(manager.getDataType("annotation.artist")); + artistValue.setFieldValue("name", "elvis presley"); + artistValue.setFieldValue("instrument", 20); + Annotation artist = new Annotation(artistType, artistValue); +- tree.annotate(artistSpan, artist); ++ artist.annotate(artistSpan, tree); + + Struct dateValue = new Struct(manager.getDataType("annotation.date")); + dateValue.setFieldValue("exacttime", 123456789L); + Annotation date = new Annotation(dateType, dateValue); +- tree.annotate(dateSpan, date); ++ date.annotate(dateSpan, tree); + + Struct placeValue = new Struct(manager.getDataType("annotation.place")); + placeValue.setFieldValue("lat", 1467L); + placeValue.setFieldValue("lon", 789L); + Annotation place = new Annotation(placeType, placeValue); +- tree.annotate(placeSpan, place); ++ place.annotate(placeSpan, tree); + + Struct eventValue = new Struct(manager.getDataType("annotation.event")); + eventValue.setFieldValue("description", "Big concert"); +@@ -70,7 +70,7 @@ public class SystemTestCase { + eventValue.setFieldValue("date", new AnnotationReference((AnnotationReferenceDataType) manager.getDataType("annotationreference"), date)); + eventValue.setFieldValue("place", new AnnotationReference((AnnotationReferenceDataType) manager.getDataType("annotationreference"), place)); + Annotation event = new Annotation(eventType, eventValue); +- tree.annotate(root, event); ++ event.annotate(root, tree); + + StringFieldValue content = new StringFieldValue("This is the story of a big concert by Elvis and a special guest appearance by George Washington"); + content.setSpanTree(tree); +diff --git a/document/src/test/java/com/yahoo/document/datatypes/StringTestCase.java b/document/src/test/java/com/yahoo/document/datatypes/StringTestCase.java +old mode 100644 +new mode 100755 +index 296ab1ac3f..a539b314e2 +--- a/document/src/test/java/com/yahoo/document/datatypes/StringTestCase.java ++++ b/document/src/test/java/com/yahoo/document/datatypes/StringTestCase.java +@@ -172,7 +172,7 @@ public class StringTestCase extends AbstractTypesTest { + + StringFieldValue innerString = new StringFieldValue("innerBalloooo"); + +- outerTree.annotate(outerSpan, new Annotation(type, innerString)); ++ new Annotation(type, innerString).annotate(outerSpan, outerTree); + + SpanTree innerTree = new SpanTree("inner"); + innerString.setSpanTree(innerTree); +@@ -180,7 +180,7 @@ public class StringTestCase extends AbstractTypesTest { + SpanList innerRoot = (SpanList)innerTree.getRoot(); + Span innerSpan = new Span(0, 1); + innerRoot.add(innerSpan); +- innerTree.annotate(innerSpan, new Annotation(type)); ++ new Annotation(type).annotate(innerSpan, innerTree); + + GrowableByteBuffer buffer = new GrowableByteBuffer(1024); + DocumentSerializer serializer = DocumentSerializerFactory.create6(buffer); +@@ -254,22 +254,22 @@ public class StringTestCase extends AbstractTypesTest { + companyValue.setFieldValue("lon", new DoubleFieldValue(-122.44)); + companyValue.setFieldValue("vertical", new StringFieldValue("software")); + Annotation compAn = new Annotation(company, companyValue); +- tree.annotate(companySpan, compAn); ++ compAn.annotate(companySpan, tree); + + Struct personValue = new Struct(manager.getDataType("annotation.person")); + personValue.setFieldValue("name", new StringFieldValue("Richard Bair")); + Annotation personAn = new Annotation(person, personValue); +- tree.annotate(personSpan, personAn); ++ personAn.annotate(personSpan, tree); + + Struct locValue = new Struct(manager.getDataType("annotation.location")); + locValue.setFieldValue("name", new StringFieldValue("Prinsens Gate")); + Annotation loc = new Annotation(location, locValue); +- tree.annotate(locationSpan, loc); ++ loc.annotate(locationSpan, tree); + + Struct locValue2 = new Struct(manager.getDataType("annotation.location")); + locValue2.setFieldValue("name", new StringFieldValue("Kongens Gate")); + Annotation locAn = new Annotation(location, locValue2); +- tree.annotate(locationSpan, locAn); ++ locAn.annotate(locationSpan, tree); + + SpanList branch = new SpanList(); + +@@ -284,17 +284,17 @@ public class StringTestCase extends AbstractTypesTest { + Struct industryValue = new Struct(manager.getDataType("annotation.industry")); + industryValue.setFieldValue("vertical", new StringFieldValue("Manufacturing")); + Annotation ind = new Annotation(industry, industryValue); +- tree.annotate(span1, ind); ++ ind.annotate(span1, tree); + + Struct pValue = new Struct(manager.getDataType("annotation.person")); + pValue.setFieldValue("name", new StringFieldValue("Praveen Mohan")); + Annotation pAn = new Annotation(person, pValue); +- tree.annotate(span2, pAn); ++ pAn.annotate(span2, tree); + + Struct lValue = new Struct(manager.getDataType("annotation.location")); + lValue.setFieldValue("name", new StringFieldValue("Embassy Golf Links")); + Annotation locn = new Annotation(location, lValue); +- tree.annotate(span3, locn); ++ locn.annotate(span3, tree); + + Struct cValue = (Struct)company.getDataType().createFieldValue(); + cValue.setFieldValue("name", new StringFieldValue("Yahoo")); +@@ -303,12 +303,12 @@ public class StringTestCase extends AbstractTypesTest { + cValue.setFieldValue("lon", new DoubleFieldValue(-42.44)); + cValue.setFieldValue("vertical", new StringFieldValue("search")); + Annotation cAn = new Annotation(company, cValue); +- tree.annotate(branch, cAn); ++ cAn.annotate(branch, tree); + + Struct pVal = new Struct(manager.getDataType("annotation.person")); + pVal.setFieldValue("name", new StringFieldValue("Kim Omar")); + Annotation an = new Annotation(person, pVal); +- tree.annotate(root, an); ++ an.annotate(root, tree); + root.add(branch); + + StringFieldValue body = (StringFieldValue)document.getFieldValue(document.getDataType().getField("body")); +diff --git a/document/src/test/java/com/yahoo/document/json/JsonReaderTestCase.java b/document/src/test/java/com/yahoo/document/json/JsonReaderTestCase.java +old mode 100644 +new mode 100755 +index 9df7d1f91c..835198e295 +--- a/document/src/test/java/com/yahoo/document/json/JsonReaderTestCase.java ++++ b/document/src/test/java/com/yahoo/document/json/JsonReaderTestCase.java +@@ -247,7 +247,7 @@ public class JsonReaderTestCase { + DocumentParseInfo parseInfo = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(parseInfo.documentId); + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + smokeTestDoc(put.getDocument()); + } + +@@ -266,7 +266,7 @@ public class JsonReaderTestCase { + DocumentParseInfo parseInfo = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(parseInfo.documentId); + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + smokeTestDoc(put.getDocument()); + } + +@@ -277,7 +277,7 @@ public class JsonReaderTestCase { + DocumentParseInfo parseInfo = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(parseInfo.documentId); + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + assertEquals("id:unittest:smoke::whee", parseInfo.documentId.toString()); + } + +@@ -291,7 +291,7 @@ public class JsonReaderTestCase { + DocumentParseInfo parseInfo = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(parseInfo.documentId); + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + Document doc = put.getDocument(); + FieldValue f = doc.getFieldValue(doc.getField("skuggsjaa")); + assertSame(Struct.class, f.getClass()); +@@ -499,7 +499,7 @@ public class JsonReaderTestCase { + DocumentParseInfo parseInfo = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(parseInfo.documentId); + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + Document doc = put.getDocument(); + FieldValue f = doc.getFieldValue(doc.getField("actualset")); + assertSame(WeightedSet.class, f.getClass()); +@@ -519,7 +519,7 @@ public class JsonReaderTestCase { + DocumentParseInfo parseInfo = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(parseInfo.documentId); + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + Document doc = put.getDocument(); + FieldValue f = doc.getFieldValue(doc.getField("actualarray")); + assertSame(Array.class, f.getClass()); +@@ -539,7 +539,7 @@ public class JsonReaderTestCase { + DocumentParseInfo parseInfo = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(parseInfo.documentId); + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + Document doc = put.getDocument(); + FieldValue f = doc.getFieldValue(doc.getField("actualmap")); + assertSame(MapFieldValue.class, f.getClass()); +@@ -559,7 +559,7 @@ public class JsonReaderTestCase { + DocumentParseInfo parseInfo = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(parseInfo.documentId); + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + Document doc = put.getDocument(); + FieldValue f = doc.getFieldValue(doc.getField("actualmap")); + assertSame(MapFieldValue.class, f.getClass()); +@@ -577,7 +577,7 @@ public class JsonReaderTestCase { + DocumentParseInfo parseInfo = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(parseInfo.documentId); + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + Document doc = put.getDocument(); + FieldValue f = doc.getFieldValue(doc.getField("singlepos")); + assertSame(Struct.class, f.getClass()); +@@ -593,7 +593,7 @@ public class JsonReaderTestCase { + DocumentParseInfo parseInfo = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(parseInfo.documentId); + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + Document doc = put.getDocument(); + FieldValue f = doc.getFieldValue(doc.getField("singlepos")); + assertSame(Struct.class, f.getClass()); +@@ -610,7 +610,7 @@ public class JsonReaderTestCase { + DocumentParseInfo parseInfo = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(parseInfo.documentId); + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + Document doc = put.getDocument(); + FieldValue f = doc.getFieldValue(doc.getField("actualraw")); + assertSame(Raw.class, f.getClass()); +@@ -628,7 +628,7 @@ public class JsonReaderTestCase { + DocumentParseInfo parseInfo = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(parseInfo.documentId); + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + Document doc = put.getDocument(); + FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); + assertSame(MapFieldValue.class, f.getClass()); +@@ -649,7 +649,7 @@ public class JsonReaderTestCase { + DocumentParseInfo parseInfo = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(parseInfo.documentId); + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + Document doc = put.getDocument(); + FieldValue f = doc.getFieldValue("actualMapStringToArrayOfInt"); + assertSame(MapFieldValue.class, f.getClass()); +@@ -925,7 +925,7 @@ public class JsonReaderTestCase { + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); + exception.expect(IllegalArgumentException.class); + exception.expectMessage("No field 'smething' in the structure of type 'smoke'"); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + } + + @Test +@@ -950,7 +950,7 @@ public class JsonReaderTestCase { + DocumentParseInfo parseInfo = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(parseInfo.documentId); + DocumentPut put = new DocumentPut(new Document(docType, parseInfo.documentId)); +- new VespaJsonDocumentReader().readPut(parseInfo.fieldsBuffer, put); ++ parseInfo.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + smokeTestDoc(put.getDocument()); + } + +diff --git a/document/src/test/java/com/yahoo/document/json/JsonWriterTestCase.java b/document/src/test/java/com/yahoo/document/json/JsonWriterTestCase.java +old mode 100644 +new mode 100755 +index 0ab00b4e2b..3d143fba31 +--- a/document/src/test/java/com/yahoo/document/json/JsonWriterTestCase.java ++++ b/document/src/test/java/com/yahoo/document/json/JsonWriterTestCase.java +@@ -334,7 +334,7 @@ public class JsonWriterTestCase { + DocumentParseInfo raw = r.parseDocument().get(); + DocumentType docType = r.readDocumentType(raw.documentId); + DocumentPut put = new DocumentPut(new Document(docType, raw.documentId)); +- new VespaJsonDocumentReader().readPut(raw.fieldsBuffer, put); ++ raw.fieldsBuffer.readPut(put, new VespaJsonDocumentReader()); + return put.getDocument(); + } + +diff --git a/document/src/test/java/com/yahoo/document/serialization/SerializeAnnotationsTestCase.java b/document/src/test/java/com/yahoo/document/serialization/SerializeAnnotationsTestCase.java +old mode 100644 +new mode 100755 +index 653f121b2d..33d129dbfa +--- a/document/src/test/java/com/yahoo/document/serialization/SerializeAnnotationsTestCase.java ++++ b/document/src/test/java/com/yahoo/document/serialization/SerializeAnnotationsTestCase.java +@@ -143,9 +143,8 @@ public class SerializeAnnotationsTestCase { + paragraph.add(1, alt_span1) + .add(1, alt_span2); + +- tree.annotate(span1, beginTag) +- .annotate(span2, textType) +- .annotate(span3, sanAnnotation) ++ sanAnnotation.annotate(span3, tree.annotate(span1, beginTag) ++ .annotate(span2, textType)) + .annotate(span4, endTag) + .annotate(alt_span1, textType) + .annotate(alt_span2, bodyType) +@@ -161,8 +160,7 @@ public class SerializeAnnotationsTestCase { + .add(span2) + .add(span3); + +- tree.annotate(span1, beginTag) +- .annotate(span2, franciscoAnnotation) ++ franciscoAnnotation.annotate(span2, tree.annotate(span1, beginTag)) + .annotate(span3, endTag) + .annotate(root, bodyType) + .annotate(city); +diff --git a/indexinglanguage/src/main/java/com/yahoo/vespa/indexinglanguage/expressions/ExactExpression.java b/indexinglanguage/src/main/java/com/yahoo/vespa/indexinglanguage/expressions/ExactExpression.java +old mode 100644 +new mode 100755 +index 6056a9b0ca..ff5cedb144 +--- a/indexinglanguage/src/main/java/com/yahoo/vespa/indexinglanguage/expressions/ExactExpression.java ++++ b/indexinglanguage/src/main/java/com/yahoo/vespa/indexinglanguage/expressions/ExactExpression.java +@@ -38,10 +38,10 @@ public final class ExactExpression extends Expression { + SpanList root = new SpanList(); + SpanTree tree = new SpanTree(SpanTrees.LINGUISTICS, root); + SpanNode node = new Span(0, prev.length()); +- tree.annotate(node, new Annotation(AnnotationTypes.TERM, +- next.equals(prev) ? null : new StringFieldValue(next))); +- tree.annotate(node, new Annotation(AnnotationTypes.TOKEN_TYPE, +- new IntegerFieldValue(TokenType.ALPHABETIC.getValue()))); ++ new Annotation(AnnotationTypes.TERM, ++ next.equals(prev) ? null : new StringFieldValue(next)).annotate(node, tree); ++ new Annotation(AnnotationTypes.TOKEN_TYPE, ++ new IntegerFieldValue(TokenType.ALPHABETIC.getValue())).annotate(node, tree); + root.add(node); + output.setSpanTree(tree); + } +diff --git a/indexinglanguage/src/test/java/com/yahoo/vespa/indexinglanguage/expressions/FlattenTestCase.java b/indexinglanguage/src/test/java/com/yahoo/vespa/indexinglanguage/expressions/FlattenTestCase.java +old mode 100644 +new mode 100755 +index 8867d64d19..311b3d4d16 +--- a/indexinglanguage/src/test/java/com/yahoo/vespa/indexinglanguage/expressions/FlattenTestCase.java ++++ b/indexinglanguage/src/test/java/com/yahoo/vespa/indexinglanguage/expressions/FlattenTestCase.java +@@ -27,9 +27,9 @@ public class FlattenTestCase { + @Test + public void requireThatAnnotationsAreFlattened() { + SpanTree tree = new SpanTree(SpanTrees.LINGUISTICS); +- tree.annotate(new Span(0, 3), new Annotation(AnnotationTypes.TERM, new StringFieldValue("oof"))); +- tree.annotate(new Span(4, 3), new Annotation(AnnotationTypes.TERM, new StringFieldValue("rab"))); +- tree.annotate(new Span(8, 3), new Annotation(AnnotationTypes.TERM, new StringFieldValue("zab"))); ++ new Annotation(AnnotationTypes.TERM, new StringFieldValue("oof")).annotate(new Span(0, 3), tree); ++ new Annotation(AnnotationTypes.TERM, new StringFieldValue("rab")).annotate(new Span(4, 3), tree); ++ new Annotation(AnnotationTypes.TERM, new StringFieldValue("zab")).annotate(new Span(8, 3), tree); + + StringFieldValue val = new StringFieldValue("foo bar baz"); + val.setSpanTree(tree); +@@ -40,7 +40,7 @@ public class FlattenTestCase { + @Test + public void requireThatNonTermAnnotationsAreIgnored() { + SpanTree tree = new SpanTree(SpanTrees.LINGUISTICS); +- tree.annotate(new Span(0, 3), new Annotation(AnnotationTypes.STEM, new StringFieldValue("oof"))); ++ new Annotation(AnnotationTypes.STEM, new StringFieldValue("oof")).annotate(new Span(0, 3), tree); + + StringFieldValue val = new StringFieldValue("foo"); + val.setSpanTree(tree); +@@ -62,9 +62,9 @@ public class FlattenTestCase { + @Test + public void requireThatAnnotationsAreSorted() { + SpanTree tree = new SpanTree(SpanTrees.LINGUISTICS); +- tree.annotate(new Span(0, 3), new Annotation(AnnotationTypes.TERM, new StringFieldValue("cox"))); +- tree.annotate(new Span(0, 3), new Annotation(AnnotationTypes.TERM, new StringFieldValue("baz"))); +- tree.annotate(new Span(0, 3), new Annotation(AnnotationTypes.TERM, new StringFieldValue("bar"))); ++ new Annotation(AnnotationTypes.TERM, new StringFieldValue("cox")).annotate(new Span(0, 3), tree); ++ new Annotation(AnnotationTypes.TERM, new StringFieldValue("baz")).annotate(new Span(0, 3), tree); ++ new Annotation(AnnotationTypes.TERM, new StringFieldValue("bar")).annotate(new Span(0, 3), tree); + + StringFieldValue val = new StringFieldValue("foo"); + val.setSpanTree(tree); +@@ -75,7 +75,7 @@ public class FlattenTestCase { + @Test + public void requireThatAnnotationsWithoutFieldValueUseOriginalSpan() { + SpanTree tree = new SpanTree(SpanTrees.LINGUISTICS); +- tree.annotate(new Span(0, 3), new Annotation(AnnotationTypes.TERM)); ++ new Annotation(AnnotationTypes.TERM).annotate(new Span(0, 3), tree); + + StringFieldValue val = new StringFieldValue("foo"); + val.setSpanTree(tree); +diff --git a/jrt/src/com/yahoo/jrt/Request.java b/jrt/src/com/yahoo/jrt/Request.java +old mode 100644 +new mode 100755 +index 66e6f50871..be748ba177 +--- a/jrt/src/com/yahoo/jrt/Request.java ++++ b/jrt/src/com/yahoo/jrt/Request.java +@@ -2,6 +2,8 @@ + package com.yahoo.jrt; + + ++import com.yahoo.jrt.tool.RpcInvoker; ++ + /** + * A Request bundles information about a single RPC invocation. A + * Request contains the name of the method, the method parameters, the +@@ -278,4 +280,29 @@ public class Request + return "request " + methodName + "(" + parameters + ")" + ( returnValues.size()>0 ? ": " + returnValues : ""); + } + ++ public Value getArgument(String parameter, RpcInvoker rpcInvoker) { ++ if (parameter.length()<=1 || parameter.charAt(1)!=':') ++ return new StringValue(parameter); ++ ++ String value=parameter.substring(2); ++ switch (parameter.charAt(0)) { ++ case 'b': ++ return new Int8Value(Byte.parseByte(value)); ++ case 'h': ++ return new Int16Value(Short.parseShort(value)); ++ case 'i': ++ return new Int32Value(Integer.parseInt(value)); ++ case 'l': ++ return new Int64Value(Long.parseLong(value)); ++ case 'f': ++ return new FloatValue(Float.parseFloat(value)); ++ case 'd': ++ return new DoubleValue(Double.parseDouble(value)); ++ case 's': ++ return new StringValue(value); ++ } ++ ++ throw new IllegalArgumentException("The first letter in '" + parameter + "' must be a type argument. " + ++ "There is no jrt type identified by '" + parameter.charAt(0) + "'"); ++ } + } +diff --git a/jrt/src/com/yahoo/jrt/Spec.java b/jrt/src/com/yahoo/jrt/Spec.java +old mode 100644 +new mode 100755 +index 7e4f6d987f..7bfffac633 +--- a/jrt/src/com/yahoo/jrt/Spec.java ++++ b/jrt/src/com/yahoo/jrt/Spec.java +@@ -143,4 +143,19 @@ public class Spec { + return asString; + } + ++ /** ++ * Convenience method for connecting to a peer, invoking a method ++ * and disconnecting. ++ * @param req the invocation request ++ * @param timeout request timeout in seconds ++ * @param supervisor ++ **/ ++ public void invokeBatch(Request req, double timeout, Supervisor supervisor) { ++ Target target = supervisor.connect(this); ++ try { ++ target.invokeSync(req, timeout); ++ } finally { ++ target.close(); ++ } ++ } + } +diff --git a/jrt/src/com/yahoo/jrt/Supervisor.java b/jrt/src/com/yahoo/jrt/Supervisor.java +old mode 100644 +new mode 100755 +index 09360c2da7..87b9b2802a +--- a/jrt/src/com/yahoo/jrt/Supervisor.java ++++ b/jrt/src/com/yahoo/jrt/Supervisor.java +@@ -168,23 +168,6 @@ public class Supervisor { + return transport.listen(this, spec); + } + +- /** +- * Convenience method for connecting to a peer, invoking a method +- * and disconnecting. +- * +- * @param spec the address to connect to +- * @param req the invocation request +- * @param timeout request timeout in seconds +- **/ +- public void invokeBatch(Spec spec, Request req, double timeout) { +- Target target = connect(spec); +- try { +- target.invokeSync(req, timeout); +- } finally { +- target.close(); +- } +- } +- + /** + * This method is invoked when a new target is created + * +diff --git a/jrt/src/com/yahoo/jrt/tool/RpcInvoker.java b/jrt/src/com/yahoo/jrt/tool/RpcInvoker.java +old mode 100644 +new mode 100755 +index 7803767e4d..bd15d2edc7 +--- a/jrt/src/com/yahoo/jrt/tool/RpcInvoker.java ++++ b/jrt/src/com/yahoo/jrt/tool/RpcInvoker.java +@@ -1,19 +1,11 @@ + // Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. + package com.yahoo.jrt.tool; + +-import com.yahoo.jrt.DoubleValue; +-import com.yahoo.jrt.FloatValue; +-import com.yahoo.jrt.Int16Value; +-import com.yahoo.jrt.Int32Value; +-import com.yahoo.jrt.Int64Value; +-import com.yahoo.jrt.Int8Value; + import com.yahoo.jrt.Request; + import com.yahoo.jrt.Spec; +-import com.yahoo.jrt.StringValue; + import com.yahoo.jrt.Supervisor; + import com.yahoo.jrt.Target; + import com.yahoo.jrt.Transport; +-import com.yahoo.jrt.Value; + import com.yahoo.jrt.Values; + + import java.util.Arrays; +@@ -27,37 +19,11 @@ import java.util.ArrayList; + */ + public class RpcInvoker { + +- private Value getArgument(Request request, String parameter) { +- if (parameter.length()<=1 || parameter.charAt(1)!=':') +- return new StringValue(parameter); +- +- String value=parameter.substring(2); +- switch (parameter.charAt(0)) { +- case 'b': +- return new Int8Value(Byte.parseByte(value)); +- case 'h': +- return new Int16Value(Short.parseShort(value)); +- case 'i': +- return new Int32Value(Integer.parseInt(value)); +- case 'l': +- return new Int64Value(Long.parseLong(value)); +- case 'f': +- return new FloatValue(Float.parseFloat(value)); +- case 'd': +- return new DoubleValue(Double.parseDouble(value)); +- case 's': +- return new StringValue(value); +- } +- +- throw new IllegalArgumentException("The first letter in '" + parameter + "' must be a type argument. " + +- "There is no jrt type identified by '" + parameter.charAt(0) + "'"); +- } +- + protected Request createRequest(String method,List arguments) { + Request request=new Request(method); + if (arguments!=null) { + for (String argument : arguments) +- request.parameters().add(getArgument(request,argument)); ++ request.parameters().add(request.getArgument(argument, this)); + } + return request; + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/AggregationResult.java b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/AggregationResult.java +old mode 100644 +new mode 100755 +index 264a9d4d4e..212565f7f2 +--- a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/AggregationResult.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/AggregationResult.java +@@ -158,4 +158,15 @@ public abstract class AggregationResult extends ExpressionNode { + visitor.visit("expression", expression); + visitor.visit("tag", tag); + } ++ ++ /** ++ *

Adds an aggregation result to this group.

++ * ++ * ++ * @param group@return This, to allow chaining. ++ */ ++ public Group addAggregationResult(Group group) { ++ group.getAggregationResults().add(this); ++ return group; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/Group.java b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/Group.java +old mode 100644 +new mode 100755 +index 73171f4dd0..a1c615481f +--- a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/Group.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/Group.java +@@ -252,17 +252,6 @@ public class Group extends Identifiable { + return aggregationResults; + } + +- /** +- *

Adds an aggregation result to this group.

+- * +- * @param result The result to add. +- * @return This, to allow chaining. +- */ +- public Group addAggregationResult(AggregationResult result) { +- aggregationResults.add(result); +- return this; +- } +- + /** + *

Adds an order-by expression to this group. If the expression is an AggregationResult, it will be added to the + * list of this group's AggregationResults, and a reference to that expression is added instead. If the +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/Grouping.java b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/Grouping.java +old mode 100644 +new mode 100755 +index c13bde4b63..10132613ea +--- a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/Grouping.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/Grouping.java +@@ -204,19 +204,6 @@ public class Grouping extends Identifiable { + return groupingLevels; + } + +- /** +- *

Appends the given grouping level specification to the list of levels.

+- * +- * @param level The level to add. +- * @return This, to allow chaining. +- * @throws NullPointerException If level argument is null. +- */ +- public Grouping addLevel(GroupingLevel level) { +- level.getClass(); // throws NullPointerException +- groupingLevels.add(level); +- return this; +- } +- + /** + *

Returns the root group.

+ * +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/GroupingLevel.java b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/GroupingLevel.java +old mode 100644 +new mode 100755 +index 239b709406..af83c445e3 +--- a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/GroupingLevel.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/GroupingLevel.java +@@ -181,4 +181,17 @@ public class GroupingLevel extends Identifiable { + visitor.visit("classify", classify); + visitor.visit("collect", collect); + } ++ ++ /** ++ *

Appends the given grouping level specification to the list of levels.

++ * ++ * ++ * @param grouping@return This, to allow chaining. ++ * @throws NullPointerException If level argument is null. ++ */ ++ public Grouping addLevel(Grouping grouping) { ++ getClass(); // throws NullPointerException ++ grouping.getLevels().add(this); ++ return grouping; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/Hit.java b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/Hit.java +old mode 100644 +new mode 100755 +index 6b2ce5c3b7..b6bc2293b9 +--- a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/Hit.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/Hit.java +@@ -101,4 +101,15 @@ public abstract class Hit extends Identifiable { + visitor.visit("rank", rank); + visitor.visit("context", context); + } ++ ++ /** ++ * Add a hit to this aggregation result ++ * ++ * ++ * @param hitsAggregationResult@return this object ++ */ ++ public HitsAggregationResult addHit(HitsAggregationResult hitsAggregationResult) { ++ hitsAggregationResult.getHits().add(this); ++ return hitsAggregationResult; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/HitsAggregationResult.java b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/HitsAggregationResult.java +old mode 100644 +new mode 100755 +index 275f38f735..fec18db305 +--- a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/HitsAggregationResult.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/HitsAggregationResult.java +@@ -101,17 +101,6 @@ public class HitsAggregationResult extends AggregationResult { + return hits; + } + +- /** +- * Add a hit to this aggregation result +- * +- * @param h the hit +- * @return this object +- */ +- public HitsAggregationResult addHit(Hit h) { +- hits.add(h); +- return this; +- } +- + @Override + public ResultNode getRank() { + if (hits.isEmpty()) { +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/hll/SketchMerger.java b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/hll/SketchMerger.java +old mode 100644 +new mode 100755 +index 11a51e8aa6..c34b898e77 +--- a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/hll/SketchMerger.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/hll/SketchMerger.java +@@ -21,23 +21,13 @@ public class SketchMerger { + } else if (left instanceof SparseSketch && right instanceof NormalSketch) { + return mergeNormalWithSparse(asNormal(right), asSparse(left)); + } else if (left instanceof SparseSketch && right instanceof SparseSketch) { +- return mergeSparseWithSparse(asSparse(left), asSparse(right)); ++ return asSparse(left).mergeSparseWithSparse(asSparse(right), this); + } else { + throw new IllegalArgumentException( + String.format("Invalid sketch types: left=%s, right=%s", right.getClass(), left.getClass())); + } + } + +- private Sketch mergeSparseWithSparse(SparseSketch dest, SparseSketch other) { +- dest.merge(other); +- if (dest.size() > HyperLogLog.SPARSE_SKETCH_CONVERSION_THRESHOLD) { +- NormalSketch newSketch = new NormalSketch(); +- newSketch.aggregate(dest.data()); +- return newSketch; +- } +- return dest; +- } +- + private NormalSketch mergeNormalWithNormal(NormalSketch dest, NormalSketch other) { + dest.merge(other); + return dest; +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/hll/SparseSketch.java b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/hll/SparseSketch.java +old mode 100644 +new mode 100755 +index 29f49060a5..d11487514d +--- a/searchlib/src/main/java/com/yahoo/searchlib/aggregation/hll/SparseSketch.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/aggregation/hll/SparseSketch.java +@@ -102,4 +102,14 @@ public class SparseSketch extends Sketch { + "values=" + values + + '}'; + } ++ ++ public Sketch mergeSparseWithSparse(SparseSketch other, SketchMerger sketchMerger) { ++ merge(other); ++ if (size() > HyperLogLog.SPARSE_SKETCH_CONVERSION_THRESHOLD) { ++ NormalSketch newSketch = new NormalSketch(); ++ newSketch.aggregate(data()); ++ return newSketch; ++ } ++ return this; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/BoolResultNode.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/BoolResultNode.java +old mode 100644 +new mode 100755 +index c850c6f2c3..66b21e528d +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/BoolResultNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/BoolResultNode.java +@@ -92,4 +92,9 @@ public class BoolResultNode extends ResultNode { + public void set(ResultNode rhs) { + value = rhs.getInteger() > 0; + } ++ ++ public BoolResultNodeVector add(BoolResultNodeVector boolResultNodeVector) { ++ boolResultNodeVector.getVector().add(this); ++ return boolResultNodeVector; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/BoolResultNodeVector.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/BoolResultNodeVector.java +old mode 100644 +new mode 100755 +index b8d31be834..39e034852c +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/BoolResultNodeVector.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/BoolResultNodeVector.java +@@ -11,17 +11,13 @@ public class BoolResultNodeVector extends ResultNodeVector { + private ArrayList vector = new ArrayList<>(); + + public BoolResultNodeVector() {} +- public BoolResultNodeVector add(BoolResultNode v) { +- vector.add(v); +- return this; +- } + + public ArrayList getVector() { + return vector; + } + @Override + public ResultNodeVector add(ResultNode r) { +- return add((BoolResultNode)r); ++ return ((BoolResultNode)r).add(this); + } + + @Override +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatBucketResultNode.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatBucketResultNode.java +old mode 100644 +new mode 100755 +index 455a8a4250..c877086b7a +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatBucketResultNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatBucketResultNode.java +@@ -115,4 +115,9 @@ public class FloatBucketResultNode extends BucketResultNode { + visitor.visit("from", from); + visitor.visit("to", to); + } ++ ++ public FloatBucketResultNodeVector add(FloatBucketResultNodeVector floatBucketResultNodeVector) { ++ floatBucketResultNodeVector.getVector().add(this); ++ return floatBucketResultNodeVector; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatBucketResultNodeVector.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatBucketResultNodeVector.java +old mode 100644 +new mode 100755 +index 443358c762..fd65825476 +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatBucketResultNodeVector.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatBucketResultNodeVector.java +@@ -26,13 +26,8 @@ public class FloatBucketResultNodeVector extends ResultNodeVector { + public FloatBucketResultNodeVector() { + } + +- public FloatBucketResultNodeVector add(FloatBucketResultNode v) { +- vector.add(v); +- return this; +- } +- + public ResultNodeVector add(ResultNode r) { +- return add((FloatBucketResultNode)r); ++ return ((FloatBucketResultNode)r).add(this); + } + + public ArrayList getVector() { +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatResultNode.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatResultNode.java +old mode 100644 +new mode 100755 +index bcc4f06171..fab3da952e +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatResultNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatResultNode.java +@@ -179,4 +179,9 @@ public class FloatResultNode extends NumericResultNode { + public static FloatResultNode getPositiveInfinity() { + return positiveInfinity; + } ++ ++ public FloatResultNodeVector addOther(FloatResultNodeVector floatResultNodeVector) { ++ floatResultNodeVector.getVector().add(this); ++ return floatResultNodeVector; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatResultNodeVector.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatResultNodeVector.java +old mode 100644 +new mode 100755 +index 1cb978303d..ce7ed508ac +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatResultNodeVector.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/FloatResultNodeVector.java +@@ -26,13 +26,8 @@ public class FloatResultNodeVector extends ResultNodeVector { + public FloatResultNodeVector() { + } + +- public FloatResultNodeVector add(FloatResultNode v) { +- vector.add(v); +- return this; +- } +- + public ResultNodeVector add(ResultNode r) { +- return add((FloatResultNode)r); ++ return ((FloatResultNode)r).addOther(this); + } + + public ArrayList getVector() { +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/Int16ResultNode.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/Int16ResultNode.java +old mode 100644 +new mode 100755 +index 3e15c35b25..41394f0e83 +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/Int16ResultNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/Int16ResultNode.java +@@ -146,4 +146,9 @@ public class Int16ResultNode extends NumericResultNode { + public void set(ResultNode rhs) { + value = (short)rhs.getInteger(); + } ++ ++ public Int16ResultNodeVector addOther(Int16ResultNodeVector int16ResultNodeVector) { ++ int16ResultNodeVector.getVector().add(this); ++ return int16ResultNodeVector; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/Int16ResultNodeVector.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/Int16ResultNodeVector.java +old mode 100644 +new mode 100755 +index 2842efe710..89c0e32010 +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/Int16ResultNodeVector.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/Int16ResultNodeVector.java +@@ -20,18 +20,13 @@ public class Int16ResultNodeVector extends ResultNodeVector { + public Int16ResultNodeVector() { + } + +- public Int16ResultNodeVector add(Int16ResultNode v) { +- vector.add(v); +- return this; +- } +- + public ArrayList getVector() { + return vector; + } + + @Override + public ResultNodeVector add(ResultNode r) { +- return add((Int16ResultNode)r); ++ return ((Int16ResultNode)r).addOther(this); + } + + @Override +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/Int32ResultNode.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/Int32ResultNode.java +old mode 100644 +new mode 100755 +index 111d3f5c5f..97a7edb9df +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/Int32ResultNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/Int32ResultNode.java +@@ -146,4 +146,9 @@ public class Int32ResultNode extends NumericResultNode { + public void set(ResultNode rhs) { + value = (int)rhs.getInteger(); + } ++ ++ public Int32ResultNodeVector addOther(Int32ResultNodeVector int32ResultNodeVector) { ++ int32ResultNodeVector.getVector().add(this); ++ return int32ResultNodeVector; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/Int32ResultNodeVector.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/Int32ResultNodeVector.java +old mode 100644 +new mode 100755 +index 2dd9e577cf..229bad2bb9 +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/Int32ResultNodeVector.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/Int32ResultNodeVector.java +@@ -21,18 +21,13 @@ public class Int32ResultNodeVector extends ResultNodeVector { + + } + +- public Int32ResultNodeVector add(Int32ResultNode v) { +- vector.add(v); +- return this; +- } +- + public ArrayList getVector() { + return vector; + } + + @Override + public ResultNodeVector add(ResultNode r) { +- return add((Int32ResultNode)r); ++ return ((Int32ResultNode)r).addOther(this); + } + + @Override +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/Int8ResultNode.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/Int8ResultNode.java +old mode 100644 +new mode 100755 +index f240a2d5ef..020b7cff4c +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/Int8ResultNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/Int8ResultNode.java +@@ -144,4 +144,9 @@ public class Int8ResultNode extends NumericResultNode { + public void set(ResultNode rhs) { + value = (byte)rhs.getInteger(); + } ++ ++ public Int8ResultNodeVector addOther(Int8ResultNodeVector int8ResultNodeVector) { ++ int8ResultNodeVector.getVector().add(this); ++ return int8ResultNodeVector; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/Int8ResultNodeVector.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/Int8ResultNodeVector.java +old mode 100644 +new mode 100755 +index edae250def..63f0999d72 +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/Int8ResultNodeVector.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/Int8ResultNodeVector.java +@@ -21,18 +21,13 @@ public class Int8ResultNodeVector extends ResultNodeVector { + + } + +- public Int8ResultNodeVector add(Int8ResultNode v) { +- vector.add(v); +- return this; +- } +- + public ArrayList getVector() { + return vector; + } + + @Override + public ResultNodeVector add(ResultNode r) { +- return add((Int8ResultNode)r); ++ return ((Int8ResultNode)r).addOther(this); + } + + @Override +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerBucketResultNode.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerBucketResultNode.java +old mode 100644 +new mode 100755 +index b8f41d8b06..2f4506ddd1 +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerBucketResultNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerBucketResultNode.java +@@ -99,4 +99,9 @@ public class IntegerBucketResultNode extends BucketResultNode { + visitor.visit("from", from); + visitor.visit("to", to); + } ++ ++ public IntegerBucketResultNodeVector add(IntegerBucketResultNodeVector integerBucketResultNodeVector) { ++ integerBucketResultNodeVector.getVector().add(this); ++ return integerBucketResultNodeVector; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerBucketResultNodeVector.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerBucketResultNodeVector.java +old mode 100644 +new mode 100755 +index c999fdfc6e..6a9502e8ef +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerBucketResultNodeVector.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerBucketResultNodeVector.java +@@ -21,11 +21,6 @@ public class IntegerBucketResultNodeVector extends ResultNodeVector { + + } + +- public IntegerBucketResultNodeVector add(IntegerBucketResultNode v) { +- vector.add(v); +- return this; +- } +- + public ArrayList getVector() { + return vector; + } +@@ -37,7 +32,7 @@ public class IntegerBucketResultNodeVector extends ResultNodeVector { + + @Override + public ResultNodeVector add(ResultNode r) { +- return add((IntegerBucketResultNode)r); ++ return ((IntegerBucketResultNode)r).add(this); + } + + @Override +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerResultNode.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerResultNode.java +old mode 100644 +new mode 100755 +index 62534377d3..7bf27f74d7 +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerResultNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerResultNode.java +@@ -180,4 +180,9 @@ public class IntegerResultNode extends NumericResultNode { + public static IntegerResultNode getPositiveInfinity() { + return positiveInfinity; + } ++ ++ public IntegerResultNodeVector addOther(IntegerResultNodeVector integerResultNodeVector) { ++ integerResultNodeVector.getVector().add(this); ++ return integerResultNodeVector; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerResultNodeVector.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerResultNodeVector.java +old mode 100644 +new mode 100755 +index 3323460da0..cfaa600fbf +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerResultNodeVector.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/IntegerResultNodeVector.java +@@ -21,18 +21,13 @@ public class IntegerResultNodeVector extends ResultNodeVector { + + } + +- public IntegerResultNodeVector add(IntegerResultNode v) { +- vector.add(v); +- return this; +- } +- + public ArrayList getVector() { + return vector; + } + + @Override + public ResultNodeVector add(ResultNode r) { +- return add((IntegerResultNode)r); ++ return ((IntegerResultNode)r).addOther(this); + } + + @Override +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/RawBucketResultNode.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/RawBucketResultNode.java +old mode 100644 +new mode 100755 +index 50b63e2d5d..9dcba50925 +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/RawBucketResultNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/RawBucketResultNode.java +@@ -98,4 +98,9 @@ public class RawBucketResultNode extends BucketResultNode { + visitor.visit("from", from); + visitor.visit("to", to); + } ++ ++ public RawBucketResultNodeVector add(RawBucketResultNodeVector rawBucketResultNodeVector) { ++ rawBucketResultNodeVector.getVector().add(this); ++ return rawBucketResultNodeVector; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/RawBucketResultNodeVector.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/RawBucketResultNodeVector.java +old mode 100644 +new mode 100755 +index 9013a925e8..f34c54b683 +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/RawBucketResultNodeVector.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/RawBucketResultNodeVector.java +@@ -22,13 +22,8 @@ public class RawBucketResultNodeVector extends ResultNodeVector { + public RawBucketResultNodeVector() { + } + +- public RawBucketResultNodeVector add(RawBucketResultNode v) { +- vector.add(v); +- return this; +- } +- + public ResultNodeVector add(ResultNode r) { +- return add((RawBucketResultNode)r); ++ return ((RawBucketResultNode)r).add(this); + } + + public ArrayList getVector() { +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/RawResultNode.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/RawResultNode.java +old mode 100644 +new mode 100755 +index 2ff573218e..f2f1eea605 +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/RawResultNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/RawResultNode.java +@@ -181,4 +181,8 @@ public class RawResultNode extends SingleResultNode { + return positiveInfinity; + } + ++ public RawResultNodeVector addOther(RawResultNodeVector rawResultNodeVector) { ++ rawResultNodeVector.getVector().add(this); ++ return rawResultNodeVector; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/RawResultNodeVector.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/RawResultNodeVector.java +old mode 100644 +new mode 100755 +index 4bb9fc7809..8967a4291c +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/RawResultNodeVector.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/RawResultNodeVector.java +@@ -26,13 +26,8 @@ public class RawResultNodeVector extends ResultNodeVector { + public RawResultNodeVector() { + } + +- public RawResultNodeVector add(RawResultNode v) { +- vector.add(v); +- return this; +- } +- + public ResultNodeVector add(ResultNode r) { +- return add((RawResultNode)r); ++ return ((RawResultNode)r).addOther(this); + } + + public ArrayList getVector() { +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/StringBucketResultNode.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/StringBucketResultNode.java +old mode 100644 +new mode 100755 +index 279f8b17fc..899e6a6f62 +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/StringBucketResultNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/StringBucketResultNode.java +@@ -111,4 +111,9 @@ public class StringBucketResultNode extends BucketResultNode { + visitor.visit("from", from); + visitor.visit("to", to); + } ++ ++ public StringBucketResultNodeVector add(StringBucketResultNodeVector stringBucketResultNodeVector) { ++ stringBucketResultNodeVector.getVector().add(this); ++ return stringBucketResultNodeVector; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/StringBucketResultNodeVector.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/StringBucketResultNodeVector.java +old mode 100644 +new mode 100755 +index 9b530164e5..d4fe2e3e69 +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/StringBucketResultNodeVector.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/StringBucketResultNodeVector.java +@@ -26,13 +26,8 @@ public class StringBucketResultNodeVector extends ResultNodeVector { + public StringBucketResultNodeVector() { + } + +- public StringBucketResultNodeVector add(StringBucketResultNode v) { +- vector.add(v); +- return this; +- } +- + public ResultNodeVector add(ResultNode r) { +- return add((StringBucketResultNode)r); ++ return ((StringBucketResultNode)r).add(this); + } + + public ArrayList getVector() { +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/StringResultNode.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/StringResultNode.java +old mode 100644 +new mode 100755 +index 40d424a275..d1eef45a35 +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/StringResultNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/StringResultNode.java +@@ -173,5 +173,10 @@ public class StringResultNode extends SingleResultNode { + public static PositiveInfinityResultNode getPositiveInfinity() { + return positiveInfinity; + } ++ ++ public StringResultNodeVector addOther(StringResultNodeVector stringResultNodeVector) { ++ stringResultNodeVector.getVector().add(this); ++ return stringResultNodeVector; ++ } + } + +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/expression/StringResultNodeVector.java b/searchlib/src/main/java/com/yahoo/searchlib/expression/StringResultNodeVector.java +old mode 100644 +new mode 100755 +index 2cba466f93..432155d879 +--- a/searchlib/src/main/java/com/yahoo/searchlib/expression/StringResultNodeVector.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/expression/StringResultNodeVector.java +@@ -26,13 +26,8 @@ public class StringResultNodeVector extends ResultNodeVector { + public StringResultNodeVector() { + } + +- public StringResultNodeVector add(StringResultNode v) { +- vector.add(v); +- return this; +- } +- + public ResultNodeVector add(ResultNode r) { +- return add((StringResultNode)r); ++ return ((StringResultNode)r).addOther(this); + } + + public ArrayList getVector() { +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/evaluation/Context.java b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/evaluation/Context.java +old mode 100644 +new mode 100755 +index 4e046df11c..7cbdd61fa5 +--- a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/evaluation/Context.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/evaluation/Context.java +@@ -4,6 +4,7 @@ package com.yahoo.searchlib.rankingexpression.evaluation; + import com.yahoo.searchlib.rankingexpression.Reference; + import com.yahoo.searchlib.rankingexpression.rule.Arguments; + import com.yahoo.searchlib.rankingexpression.rule.ExpressionNode; ++import com.yahoo.searchlib.rankingexpression.rule.SetMembershipNode; + import com.yahoo.tensor.Tensor; + import com.yahoo.tensor.TensorType; + import com.yahoo.tensor.evaluation.EvaluationContext; +@@ -116,4 +117,11 @@ public abstract class Context implements EvaluationContext { + throw new UnsupportedOperationException(this + " does not support return a list of its names"); + } + ++ public boolean testMembership(Predicate test, SetMembershipNode setMembershipNode) { ++ for (ExpressionNode setValue : setMembershipNode.getSetValues()) { ++ if (test.test(setValue.evaluate(this))) ++ return true; ++ } ++ return false; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/evaluation/TensorValue.java b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/evaluation/TensorValue.java +old mode 100644 +new mode 100755 +index ee66dcc5a0..69926369ee +--- a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/evaluation/TensorValue.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/evaluation/TensorValue.java +@@ -122,7 +122,7 @@ public class TensorValue extends Value { + if ( ! (value instanceof TensorValue)) + throw new UnsupportedOperationException("Could not perform " + operationName + + ": The second argument must be a tensor but was " + value); +- return ((TensorValue)value).value; ++ return asTensor(); + } + + public Tensor asTensor() { return value; } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/evaluation/gbdtoptimization/GBDTOptimizer.java b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/evaluation/gbdtoptimization/GBDTOptimizer.java +old mode 100644 +new mode 100755 +index 787818b0f4..6d22d3ab0e +--- a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/evaluation/gbdtoptimization/GBDTOptimizer.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/evaluation/gbdtoptimization/GBDTOptimizer.java +@@ -117,16 +117,16 @@ public class GBDTOptimizer extends Optimizer { + if (condition instanceof ComparisonNode) { + ComparisonNode comparison = (ComparisonNode)condition; + if (comparison.getOperator() == TruthOperator.SMALLER) +- values.add(GBDTNode.MAX_LEAF_VALUE + GBDTNode.MAX_VARIABLES*0 + getVariableIndex(comparison.getLeftCondition(), context)); ++ values.add(GBDTNode.MAX_LEAF_VALUE + GBDTNode.MAX_VARIABLES*0 + comparison.getLeftCondition().getVariableIndex(context, this)); + else if (comparison.getOperator() == TruthOperator.EQUAL) +- values.add(GBDTNode.MAX_LEAF_VALUE + GBDTNode.MAX_VARIABLES*1 + getVariableIndex(comparison.getLeftCondition(), context)); ++ values.add(GBDTNode.MAX_LEAF_VALUE + GBDTNode.MAX_VARIABLES*1 + comparison.getLeftCondition().getVariableIndex(context, this)); + else + throw new IllegalArgumentException("Cannot optimize other conditions than < and ==, encountered: " + comparison.getOperator()); + values.add(toValue(comparison.getRightCondition())); + } + else if (condition instanceof SetMembershipNode) { + SetMembershipNode setMembership = (SetMembershipNode)condition; +- values.add(GBDTNode.MAX_LEAF_VALUE + GBDTNode.MAX_VARIABLES*2 + getVariableIndex(setMembership.getTestValue(),context)); ++ values.add(GBDTNode.MAX_LEAF_VALUE + GBDTNode.MAX_VARIABLES*2 + setMembership.getTestValue().getVariableIndex(context, this)); + values.add((double)setMembership.getSetValues().size()); + for (ExpressionNode setElementNode : setMembership.getSetValues()) + values.add(toValue(setElementNode)); +@@ -138,21 +138,6 @@ public class GBDTOptimizer extends Optimizer { + return values.size(); + } + +- private double getVariableIndex(ExpressionNode node, ContextIndex context) { +- if (!(node instanceof ReferenceNode)) { +- throw new IllegalArgumentException("Contained a left-hand comparison expression " + +- "which was not a feature value but was: " + node); +- } +- ReferenceNode fNode = (ReferenceNode)node; +- Integer index = context.getIndex(fNode.toString()); +- if (index == null) { +- throw new IllegalStateException("The ranking expression contained feature '" + fNode.getName() + +- "', which is not known to " + context + ": The context must be created" + +- "from the same ranking expression which is to be optimized"); +- } +- return index; +- } +- + private double toValue(ExpressionNode node) { + if (node instanceof ConstantNode) { + Value value = ((ConstantNode)node).getValue(); +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/rule/EmbracedNode.java b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/rule/EmbracedNode.java +index d306e067d1..3663136f6d 100755 +--- a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/rule/EmbracedNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/rule/EmbracedNode.java +@@ -4,6 +4,7 @@ package com.yahoo.searchlib.rankingexpression.rule; + import com.yahoo.searchlib.rankingexpression.Reference; + import com.yahoo.searchlib.rankingexpression.evaluation.Context; + import com.yahoo.searchlib.rankingexpression.evaluation.Value; ++import com.yahoo.searchlib.rankingexpression.transform.Simplifier; + import com.yahoo.tensor.TensorType; + import com.yahoo.tensor.evaluation.TypeContext; + +@@ -62,4 +63,9 @@ public final class EmbracedNode extends CompositeNode { + return new EmbracedNode(newChildren.get(0)); + } + ++ public boolean hasSingleUndividableChild(Simplifier simplifier) { ++ if (children().size() > 1) return false; ++ if (children().get(0) instanceof ArithmeticNode) return false; ++ return true; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/rule/ExpressionNode.java b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/rule/ExpressionNode.java +index dba0da7301..a5f086bc76 100755 +--- a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/rule/ExpressionNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/rule/ExpressionNode.java +@@ -3,7 +3,9 @@ package com.yahoo.searchlib.rankingexpression.rule; + + import com.yahoo.searchlib.rankingexpression.Reference; + import com.yahoo.searchlib.rankingexpression.evaluation.Context; ++import com.yahoo.searchlib.rankingexpression.evaluation.ContextIndex; + import com.yahoo.searchlib.rankingexpression.evaluation.Value; ++import com.yahoo.searchlib.rankingexpression.evaluation.gbdtoptimization.GBDTOptimizer; + import com.yahoo.tensor.TensorType; + import com.yahoo.tensor.evaluation.TypeContext; + +@@ -60,4 +62,18 @@ public abstract class ExpressionNode implements Serializable { + */ + public abstract Value evaluate(Context context); + ++ public double getVariableIndex(ContextIndex context, GBDTOptimizer gbdtOptimizer) { ++ if (!(this instanceof ReferenceNode)) { ++ throw new IllegalArgumentException("Contained a left-hand comparison expression " + ++ "which was not a feature value but was: " + this); ++ } ++ ReferenceNode fNode = (ReferenceNode) this; ++ Integer index = context.getIndex(fNode.toString()); ++ if (index == null) { ++ throw new IllegalStateException("The ranking expression contained feature '" + fNode.getName() + ++ "', which is not known to " + context + ": The context must be created" + ++ "from the same ranking expression which is to be optimized"); ++ } ++ return index; ++ } + } +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/rule/SetMembershipNode.java b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/rule/SetMembershipNode.java +old mode 100644 +new mode 100755 +index 9b3bd67481..f4be076c16 +--- a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/rule/SetMembershipNode.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/rule/SetMembershipNode.java +@@ -75,7 +75,7 @@ public class SetMembershipNode extends BooleanNode { + } + + private Value evaluateValue(Value value, Context context) { +- return new BooleanValue(testMembership(value::equals, context)); ++ return new BooleanValue(context.testMembership(value::equals, this)); + } + + private Value evaluateTensor(Tensor tensor, Context context) { +@@ -83,15 +83,7 @@ public class SetMembershipNode extends BooleanNode { + } + + private boolean contains(double value, Context context) { +- return testMembership((setValue) -> setValue.asDouble() == value, context); +- } +- +- private boolean testMembership(Predicate test, Context context) { +- for (ExpressionNode setValue : setValues) { +- if (test.test(setValue.evaluate(context))) +- return true; +- } +- return false; ++ return context.testMembership((setValue) -> setValue.asDouble() == value, this); + } + + @Override +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/transform/ConstantDereferencer.java b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/transform/ConstantDereferencer.java +old mode 100644 +new mode 100755 +index a541eac242..4484e6a525 +--- a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/transform/ConstantDereferencer.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/transform/ConstantDereferencer.java +@@ -1,7 +1,6 @@ + // Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. + package com.yahoo.searchlib.rankingexpression.transform; + +-import com.yahoo.searchlib.rankingexpression.evaluation.TensorValue; + import com.yahoo.searchlib.rankingexpression.evaluation.Value; + import com.yahoo.searchlib.rankingexpression.rule.CompositeNode; + import com.yahoo.searchlib.rankingexpression.rule.ConstantNode; +@@ -30,19 +29,11 @@ public class ConstantDereferencer extends ExpressionTransformer arguments = node.getArguments().expressions(); +- List transformedArguments = new ArrayList<>(arguments.size()); +- for (ExpressionNode argument : arguments) +- transformedArguments.add(transform(argument, context)); +- return node.setArguments(transformedArguments); +- } +- + private ExpressionNode transformConstantReference(ReferenceNode node, TransformContext context) { + Value value = context.constants().get(node.getName()); + if (value == null || value.type().rank() > 0) { +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/transform/Simplifier.java b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/transform/Simplifier.java +old mode 100644 +new mode 100755 +index e8e2fdf245..f137e4a1f0 +--- a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/transform/Simplifier.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/transform/Simplifier.java +@@ -28,19 +28,13 @@ public class Simplifier extends ExpressionTransformer { + node = transformChildren((CompositeNode) node, context); // depth first + if (node instanceof IfNode) + node = transformIf((IfNode) node); +- if (node instanceof EmbracedNode && hasSingleUndividableChild((EmbracedNode)node)) ++ if (node instanceof EmbracedNode && ((EmbracedNode)node).hasSingleUndividableChild(this)) + node = ((EmbracedNode)node).children().get(0); + if (node instanceof ArithmeticNode) + node = transformArithmetic((ArithmeticNode) node); + return node; + } + +- private boolean hasSingleUndividableChild(EmbracedNode node) { +- if (node.children().size() > 1) return false; +- if (node.children().get(0) instanceof ArithmeticNode) return false; +- return true; +- } +- + private ExpressionNode transformArithmetic(ArithmeticNode node) { + if (node.children().size() > 1) { + List children = new ArrayList<>(node.children()); +diff --git a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/transform/TransformContext.java b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/transform/TransformContext.java +old mode 100644 +new mode 100755 +index 7485ce69f9..fc753cbcf4 +--- a/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/transform/TransformContext.java ++++ b/searchlib/src/main/java/com/yahoo/searchlib/rankingexpression/transform/TransformContext.java +@@ -2,6 +2,8 @@ + package com.yahoo.searchlib.rankingexpression.transform; + + import com.yahoo.searchlib.rankingexpression.evaluation.Value; ++import com.yahoo.searchlib.rankingexpression.rule.ExpressionNode; ++import com.yahoo.searchlib.rankingexpression.rule.ReferenceNode; + + import java.util.Map; + +@@ -20,4 +22,11 @@ public class TransformContext { + + public Map constants() { return constants; } + ++ public ExpressionNode transformArguments(ReferenceNode node, ConstantDereferencer constantDereferencer) { ++ List arguments = node.getArguments().expressions(); ++ List transformedArguments = new ArrayList<>(arguments.size()); ++ for (ExpressionNode argument : arguments) ++ transformedArguments.add(constantDereferencer.transform(argument, this)); ++ return node.setArguments(transformedArguments); ++ } + } +diff --git a/searchlib/src/test/java/com/yahoo/searchlib/aggregation/AggregationTestCase.java b/searchlib/src/test/java/com/yahoo/searchlib/aggregation/AggregationTestCase.java +old mode 100644 +new mode 100755 +index cbd6c02cd2..4f809ac97c +--- a/searchlib/src/test/java/com/yahoo/searchlib/aggregation/AggregationTestCase.java ++++ b/searchlib/src/test/java/com/yahoo/searchlib/aggregation/AggregationTestCase.java +@@ -123,8 +123,8 @@ public class AggregationTestCase { + assertEquals(5, a.getMaxHits()); + assertEquals(0, a.getHits().size()); + a.setExpression(new AttributeNode("attributeA")); +- a.addHit(new FS4Hit(1, createGlobalId(2), rank1)); +- a.addHit(new FS4Hit(5, createGlobalId(7), rank2)); ++ new FS4Hit(1, createGlobalId(2), rank1).addHit(a); ++ new FS4Hit(5, createGlobalId(7), rank2).addHit(a); + assertEquals(2, a.getHits().size()); + HitsAggregationResult b = (HitsAggregationResult)serializeDeserialize(a); + assertEquals(a, b); +@@ -135,23 +135,16 @@ public class AggregationTestCase { + assertEquals(1, a.getHits().size()); + assertEquals(2.0, a.getHits().get(0).getRank(), delta); + +- HitsAggregationResult hits = new HitsAggregationResult(3) +- .addHit(new FS4Hit(1, createGlobalId(3), 1)) ++ HitsAggregationResult hits = new FS4Hit(1, createGlobalId(3), 1).addHit(new HitsAggregationResult(3)) + .addHit(new FS4Hit(2, createGlobalId(2), 2)) + .addHit(new FS4Hit(3, createGlobalId(1), 3)); + Grouping request = new Grouping() +- .setRoot(new Group() +- .addAggregationResult(hits.clone()) +- .addChild(new Group() +- .addAggregationResult(hits.clone()) +- .addChild(new Group() +- .addAggregationResult(hits.clone()))) +- .addChild(new Group() +- .addAggregationResult(hits.clone()) +- .addChild(new Group() +- .addAggregationResult(hits.clone()) +- .addChild(new Group() +- .addAggregationResult(hits.clone()))))); ++ .setRoot(hits.clone().addAggregationResult(new Group()) ++ .addChild(hits.clone().addAggregationResult(new Group()) ++ .addChild(hits.clone().addAggregationResult(new Group()))) ++ .addChild(hits.clone().addAggregationResult(new Group()) ++ .addChild(hits.clone().addAggregationResult(new Group()) ++ .addChild(hits.clone().addAggregationResult(new Group()))))); + assertFS4Hits(request, 0, 0, 3); + assertFS4Hits(request, 1, 1, 6); + assertFS4Hits(request, 2, 2, 6); +@@ -188,27 +181,20 @@ public class AggregationTestCase { + assertEquals(5, a.getMaxHits()); + assertEquals(0, a.getHits().size()); + a.setExpression(new AttributeNode("attributeA")); +- a.addHit(new VdsHit("1", s2, rank1)); ++ new VdsHit("1", s2, rank1).addHit(a); + HitsAggregationResult b = (HitsAggregationResult)serializeDeserialize(a); + assertEquals(a, b); + +- HitsAggregationResult hits = new HitsAggregationResult(3) +- .addHit(new VdsHit("1", s3, 1)) ++ HitsAggregationResult hits = new VdsHit("1", s3, 1).addHit(new HitsAggregationResult(3)) + .addHit(new VdsHit("2", s2, 2)) + .addHit(new VdsHit("3", s1, 3)); + Grouping request = new Grouping() +- .setRoot(new Group() +- .addAggregationResult(hits.clone()) +- .addChild(new Group() +- .addAggregationResult(hits.clone()) +- .addChild(new Group() +- .addAggregationResult(hits.clone()))) +- .addChild(new Group() +- .addAggregationResult(hits.clone()) +- .addChild(new Group() +- .addAggregationResult(hits.clone()) +- .addChild(new Group() +- .addAggregationResult(hits.clone()))))); ++ .setRoot(hits.clone().addAggregationResult(new Group()) ++ .addChild(hits.clone().addAggregationResult(new Group()) ++ .addChild(hits.clone().addAggregationResult(new Group()))) ++ .addChild(hits.clone().addAggregationResult(new Group()) ++ .addChild(hits.clone().addAggregationResult(new Group()) ++ .addChild(hits.clone().addAggregationResult(new Group()))))); + assertVdsHits(request, 0, 0, 3); + assertVdsHits(request, 1, 1, 6); + assertVdsHits(request, 2, 2, 6); +@@ -269,7 +255,7 @@ public class AggregationTestCase { + public void testGroup() { + Group a = new Group(); + a.setId(new IntegerResultNode(17)); +- a.addAggregationResult(new XorAggregationResult()); ++ new XorAggregationResult().addAggregationResult(a); + serializeDeserialize1(a); + } + +@@ -281,14 +267,14 @@ public class AggregationTestCase { + + XorAggregationResult xor = new XorAggregationResult(); + xor.setExpression(new MD5BitFunctionNode(new AttributeNode("docid"), 64)); +- level.getGroupPrototype().addAggregationResult(xor); ++ xor.addAggregationResult(level.getGroupPrototype()); + + SumAggregationResult sum = new SumAggregationResult(); + MinFunctionNode min = new MinFunctionNode(); + min.addArg(new AttributeNode("attribute1")); + min.addArg(new AttributeNode("attribute2")); + sum.setExpression(min); +- level.getGroupPrototype().addAggregationResult(sum); ++ sum.addAggregationResult(level.getGroupPrototype()); + + CatFunctionNode cat = new CatFunctionNode(); + cat.addArg(new GetDocIdNamespaceSpecificFunctionNode()); +@@ -296,17 +282,17 @@ public class AggregationTestCase { + cat.addArg(new DocumentFieldNode("flags")); + XorAggregationResult xor2 = new XorAggregationResult(); + xor2.setExpression(new XorBitFunctionNode(cat, 64)); +- level.getGroupPrototype().addAggregationResult(xor2); +- a.addLevel(level); ++ xor2.addAggregationResult(level.getGroupPrototype()); ++ level.addLevel(a); + + Group g = new Group(); + g.setId(new IntegerResultNode(17)); +- g.addAggregationResult(xor); // XXX: this is BAD ++ xor.addAggregationResult(g); // XXX: this is BAD + a.getRoot().addChild(g); + serializeDeserialize1(a); + + Grouping foo = new Grouping(); +- foo.addLevel(level); ++ level.addLevel(foo); + int hashBefore = foo.hashCode(); + foo.setFirstLevel(66); + assertEquals(hashBefore, foo.hashCode()); +diff --git a/searchlib/src/test/java/com/yahoo/searchlib/aggregation/GroupTestCase.java b/searchlib/src/test/java/com/yahoo/searchlib/aggregation/GroupTestCase.java +old mode 100644 +new mode 100755 +index 95ab1c30a3..042bdd2c49 +--- a/searchlib/src/test/java/com/yahoo/searchlib/aggregation/GroupTestCase.java ++++ b/searchlib/src/test/java/com/yahoo/searchlib/aggregation/GroupTestCase.java +@@ -19,7 +19,7 @@ public class GroupTestCase { + public void requireThatAggregationResultsCanBeAdded() { + Group group = new Group(); + AggregationResult res = new AverageAggregationResult(); +- group.addAggregationResult(res); ++ res.addAggregationResult(group); + assertEquals(1, group.getAggregationResults().size()); + assertSame(res, group.getAggregationResults().get(0)); + } +@@ -76,7 +76,7 @@ public class GroupTestCase { + public void requireThatAddOrderByDoesNotAddDuplicateAggregationResult() { + Group group = new Group(); + AggregationResult res = new MinAggregationResult(); +- group.addAggregationResult(res); ++ res.addAggregationResult(group); + group.addOrderBy(res, true); + assertEquals(1, group.getAggregationResults().size()); + assertSame(res, group.getAggregationResults().get(0)); +@@ -87,7 +87,7 @@ public class GroupTestCase { + Group group = new Group(); + AggregationResult foo = new MinAggregationResult(); + foo.setTag(6); +- group.addAggregationResult(foo); ++ foo.addAggregationResult(group); + AggregationResult bar = new MinAggregationResult(); + bar.setTag(9); + group.addOrderBy(bar, true); +@@ -101,7 +101,7 @@ public class GroupTestCase { + Group group = new Group(); + AggregationResult foo = new MinAggregationResult(); + foo.setTag(6); +- group.addAggregationResult(foo); ++ foo.addAggregationResult(group); + AggregationResult bar = new MaxAggregationResult(); + bar.setTag(9); + group.addOrderBy(bar, true); +@@ -125,7 +125,7 @@ public class GroupTestCase { + public void requireThatAddOrderByDoesNotAddDuplicateReferencedAggregationResult() { + Group group = new Group(); + AggregationResult res = new MinAggregationResult(); +- group.addAggregationResult(res); ++ res.addAggregationResult(group); + group.addOrderBy(new AggregationRefNode(res), true); + assertEquals(1, group.getAggregationResults().size()); + assertSame(res, group.getAggregationResults().get(0)); +@@ -144,7 +144,7 @@ public class GroupTestCase { + public void requireThatAddOrderByDoesNotAddDuplicateDeepReferencedAggregationResult() { + Group group = new Group(); + AggregationResult res = new MinAggregationResult(); +- group.addAggregationResult(res); ++ res.addAggregationResult(group); + group.addOrderBy(new NegateFunctionNode(new AggregationRefNode(res)), true); + assertEquals(1, group.getAggregationResults().size()); + assertSame(res, group.getAggregationResults().get(0)); +@@ -154,7 +154,7 @@ public class GroupTestCase { + public void requireThatAddOrderByResolvesReferenceIndex() { + Group group = new Group(); + AggregationResult res = new MinAggregationResult(); +- group.addAggregationResult(res); ++ res.addAggregationResult(group); + group.addOrderBy(new AggregationRefNode(res), true); + assertEquals(1, group.getOrderByExpressions().size()); + AggregationRefNode ref = (AggregationRefNode)group.getOrderByExpressions().get(0); +@@ -166,7 +166,7 @@ public class GroupTestCase { + public void requireThatAddOrderByResolvesDeepReferenceIndex() { + Group group = new Group(); + AggregationResult res = new MinAggregationResult(); +- group.addAggregationResult(res); ++ res.addAggregationResult(group); + group.addOrderBy(new NegateFunctionNode(new AggregationRefNode(res)), true); + assertEquals(1, group.getOrderByExpressions().size()); + AggregationRefNode ref = (AggregationRefNode)((NegateFunctionNode)group.getOrderByExpressions().get(0)).getArg(); +diff --git a/searchlib/src/test/java/com/yahoo/searchlib/aggregation/GroupingSerializationTest.java b/searchlib/src/test/java/com/yahoo/searchlib/aggregation/GroupingSerializationTest.java +old mode 100644 +new mode 100755 +index b9d3bc3bd4..b0cece3a88 +--- a/searchlib/src/test/java/com/yahoo/searchlib/aggregation/GroupingSerializationTest.java ++++ b/searchlib/src/test/java/com/yahoo/searchlib/aggregation/GroupingSerializationTest.java +@@ -52,16 +52,12 @@ public class GroupingSerializationTest { + t.assertMatch(new RawBucketResultNode( + new RawResultNode(new byte[]{1, 0, 0}), + new RawResultNode(new byte[]{1, 1, 0}))); +- t.assertMatch(new IntegerBucketResultNodeVector() +- .add(new IntegerBucketResultNode(878, 3246823))); +- t.assertMatch(new FloatBucketResultNodeVector() +- .add(new FloatBucketResultNode(878, 3246823))); +- t.assertMatch(new StringBucketResultNodeVector() +- .add(new StringBucketResultNode("878", "3246823"))); +- t.assertMatch(new RawBucketResultNodeVector() +- .add(new RawBucketResultNode( +- new RawResultNode(new byte[]{1, 0, 0}), +- new RawResultNode(new byte[]{1, 1, 0})))); ++ t.assertMatch(new IntegerBucketResultNode(878, 3246823).add(new IntegerBucketResultNodeVector())); ++ t.assertMatch(new FloatBucketResultNode(878, 3246823).add(new FloatBucketResultNodeVector())); ++ t.assertMatch(new StringBucketResultNode("878", "3246823").add(new StringBucketResultNodeVector())); ++ t.assertMatch(new RawBucketResultNode( ++ new RawResultNode(new byte[]{1, 0, 0}), ++ new RawResultNode(new byte[]{1, 1, 0})).add(new RawBucketResultNodeVector())); + } + + } +@@ -171,24 +167,21 @@ public class GroupingSerializationTest { + t.assertMatch(new VdsHit("100", new byte[0], 50.0)); + t.assertMatch(new VdsHit("100", "rawsummary".getBytes(), 50.0)); + t.assertMatch(new HitsAggregationResult()); +- t.assertMatch(new HitsAggregationResult() +- .setMaxHits(5) +- .addHit(new FS4Hit(0, createGlobalId(10), 1.0, -1)) ++ t.assertMatch(new FS4Hit(0, createGlobalId(10), 1.0, -1).addHit(new HitsAggregationResult() ++ .setMaxHits(5)) + .addHit(new FS4Hit(0, createGlobalId(20), 2.0, -1)) + .addHit(new FS4Hit(0, createGlobalId(30), 3.0, -1)) + .addHit(new FS4Hit(0, createGlobalId(40), 4.0, -1)) + .addHit(new FS4Hit(0, createGlobalId(50), 5.0, -1)) + .setExpression(new ConstantNode(new IntegerResultNode(5)))); +- t.assertMatch(new HitsAggregationResult() +- .setMaxHits(3) +- .addHit(new FS4Hit(0, createGlobalId(10), 1.0, 100)) ++ t.assertMatch(new FS4Hit(0, createGlobalId(10), 1.0, 100).addHit(new HitsAggregationResult() ++ .setMaxHits(3)) + .addHit(new FS4Hit(0, createGlobalId(20), 2.0, 200)) + .addHit(new FS4Hit(0, createGlobalId(30), 3.0, 300)) + .setExpression(new ConstantNode(new IntegerResultNode(5)))); + //TODO Verify content +- t.assertMatch(new HitsAggregationResult() +- .setMaxHits(3) +- .addHit(new VdsHit("10", "100".getBytes(), 1.0)) ++ t.assertMatch(new VdsHit("10", "100".getBytes(), 1.0).addHit(new HitsAggregationResult() ++ .setMaxHits(3)) + .addHit(new VdsHit("20", "200".getBytes(), 2.0)) + .addHit(new VdsHit("30", "300".getBytes(), 3.0)) + .setExpression(new ConstantNode(new IntegerResultNode(5)))); +@@ -199,12 +192,11 @@ public class GroupingSerializationTest { + public void testGroupingLevel() throws IOException { + try (SerializationTester t = new SerializationTester("testGroupingLevel")) { + GroupingLevel groupingLevel = new GroupingLevel(); +- groupingLevel.setMaxGroups(100) +- .setExpression(createDummyExpression()) +- .getGroupPrototype() +- .addAggregationResult( +- new SumAggregationResult() +- .setExpression(createDummyExpression())); ++ new SumAggregationResult() ++ .setExpression(createDummyExpression()).addAggregationResult( ++ groupingLevel.setMaxGroups(100) ++ .setExpression(createDummyExpression()) ++ .getGroupPrototype()); + t.assertMatch(groupingLevel); + } + } +@@ -217,10 +209,9 @@ public class GroupingSerializationTest { + .setRank(10)); + t.assertMatch(new Group().setId(new IntegerResultNode(100)) + .addChild(new Group().setId(new IntegerResultNode(110))) +- .addChild(new Group().setId(new IntegerResultNode(120)) +- .setRank(20.5) +- .addAggregationResult(new SumAggregationResult() +- .setExpression(createDummyExpression())) ++ .addChild(new SumAggregationResult() ++ .setExpression(createDummyExpression()).addAggregationResult(new Group().setId(new IntegerResultNode(120)) ++ .setRank(20.5)) + .addAggregationResult(new SumAggregationResult() + .setExpression(createDummyExpression()))) + .addChild(new Group().setId(new IntegerResultNode(130)) +@@ -234,32 +225,28 @@ public class GroupingSerializationTest { + t.assertMatch(new Grouping()); + + GroupingLevel level1 = new GroupingLevel(); +- level1.setMaxGroups(100) +- .setExpression(createDummyExpression()) +- .getGroupPrototype() +- .addAggregationResult( +- new SumAggregationResult() +- .setExpression(createDummyExpression())); ++ new SumAggregationResult() ++ .setExpression(createDummyExpression()).addAggregationResult( ++ level1.setMaxGroups(100) ++ .setExpression(createDummyExpression()) ++ .getGroupPrototype()); + GroupingLevel level2 = new GroupingLevel(); +- level2.setMaxGroups(10) +- .setExpression(createDummyExpression()) +- .getGroupPrototype() +- .addAggregationResult( +- new SumAggregationResult() +- .setExpression(createDummyExpression())) ++ new SumAggregationResult() ++ .setExpression(createDummyExpression()).addAggregationResult( ++ level2.setMaxGroups(10) ++ .setExpression(createDummyExpression()) ++ .getGroupPrototype()) + .addAggregationResult( + new SumAggregationResult() + .setExpression(createDummyExpression())); +- t.assertMatch(new Grouping() +- .addLevel(level1) ++ t.assertMatch(level1.addLevel(new Grouping()) + .addLevel(level2)); + + GroupingLevel level3 = new GroupingLevel(); +- level3.setExpression(new AttributeNode("folder")) +- .getGroupPrototype() +- .addAggregationResult( +- new XorAggregationResult() +- .setExpression(new MD5BitFunctionNode(new AttributeNode("docid"), 64))) ++ new XorAggregationResult() ++ .setExpression(new MD5BitFunctionNode(new AttributeNode("docid"), 64)).addAggregationResult( ++ level3.setExpression(new AttributeNode("folder")) ++ .getGroupPrototype()) + .addAggregationResult( + new SumAggregationResult() + .setExpression(new MinFunctionNode() +@@ -272,8 +259,7 @@ public class GroupingSerializationTest { + .addArg(new GetDocIdNamespaceSpecificFunctionNode(new StringResultNode(""))) + .addArg(new DocumentFieldNode("folder")) + .addArg(new DocumentFieldNode("flags")), 64))); +- t.assertMatch(new Grouping() +- .addLevel(level3)); ++ t.assertMatch(level3.addLevel(new Grouping())); + } + } + +diff --git a/searchlib/src/test/java/com/yahoo/searchlib/aggregation/GroupingTestCase.java b/searchlib/src/test/java/com/yahoo/searchlib/aggregation/GroupingTestCase.java +old mode 100644 +new mode 100755 +index fe5405ecb6..85ea96f638 +--- a/searchlib/src/test/java/com/yahoo/searchlib/aggregation/GroupingTestCase.java ++++ b/searchlib/src/test/java/com/yahoo/searchlib/aggregation/GroupingTestCase.java +@@ -129,18 +129,18 @@ public class GroupingTestCase { + Grouping grouping = new Grouping(); + assertEquals(Collections.emptyList(), grouping.getLevels()); + try { +- grouping.addLevel(null); ++ null.addLevel(grouping); + fail(); + } catch (NullPointerException e) { + + } + GroupingLevel level = new GroupingLevel(); +- grouping.addLevel(level); ++ level.addLevel(grouping); + assertEquals(Arrays.asList(level), grouping.getLevels()); + + Grouping other = new Grouping(); + assertFalse(grouping.equals(other)); +- other.addLevel(level); ++ level.addLevel(other); + assertEquals(grouping, other); + + assertEquals(grouping, grouping.clone()); +@@ -184,8 +184,8 @@ public class GroupingTestCase { + + @Test + public void requireThatNeedDeepResultCollectionWorks() { +- assertFalse(new Grouping().addLevel(new GroupingLevel().setGroupPrototype(new Group())).needDeepResultCollection()); +- assertTrue(new Grouping().addLevel(new GroupingLevel().setGroupPrototype(new Group().addOrderBy(new CountAggregationResult(9), true))).needDeepResultCollection()); ++ assertFalse(new GroupingLevel().setGroupPrototype(new Group()).addLevel(new Grouping()).needDeepResultCollection()); ++ assertTrue(new GroupingLevel().setGroupPrototype(new Group().addOrderBy(new CountAggregationResult(9), true)).addLevel(new Grouping()).needDeepResultCollection()); + } + + @Test +@@ -193,8 +193,8 @@ public class GroupingTestCase { + assertFalse(new Grouping().useSinglePass()); + assertFalse(new Grouping().setForceSinglePass(false).useSinglePass()); + assertTrue(new Grouping().setForceSinglePass(true).useSinglePass()); +- assertFalse(new Grouping().addLevel(new GroupingLevel().setGroupPrototype(new Group())).useSinglePass()); +- assertTrue(new Grouping().addLevel(new GroupingLevel().setGroupPrototype(new Group().addOrderBy(new CountAggregationResult(9), true))).useSinglePass()); ++ assertFalse(new GroupingLevel().setGroupPrototype(new Group()).addLevel(new Grouping()).useSinglePass()); ++ assertTrue(new GroupingLevel().setGroupPrototype(new Group().addOrderBy(new CountAggregationResult(9), true)).addLevel(new Grouping()).useSinglePass()); + } + + @Test +diff --git a/searchlib/src/test/java/com/yahoo/searchlib/aggregation/MergeTestCase.java b/searchlib/src/test/java/com/yahoo/searchlib/aggregation/MergeTestCase.java +index 322bcb426d..c6b949ced5 100755 +--- a/searchlib/src/test/java/com/yahoo/searchlib/aggregation/MergeTestCase.java ++++ b/searchlib/src/test/java/com/yahoo/searchlib/aggregation/MergeTestCase.java +@@ -30,44 +30,35 @@ public class MergeTestCase { + // Test merging of hits. + @Test + public void testMergeHits() { +- Grouping request = new Grouping() ++ Grouping request = new GroupingLevel().setMaxGroups(69).addLevel(new Grouping() + .setFirstLevel(0) +- .setLastLevel(1) +- .addLevel(new GroupingLevel().setMaxGroups(69)); +- +- Group expect = new Group() +- .addAggregationResult(new HitsAggregationResult() +- .setMaxHits(5) +- .addHit(new FS4Hit(30, createGlobalId(30), 30)) +- .addHit(new FS4Hit(20, createGlobalId(20), 20)) +- .addHit(new FS4Hit(10, createGlobalId(10), 10)) +- .addHit(new FS4Hit(5, createGlobalId(9), 9)) +- .addHit(new FS4Hit(6, createGlobalId(8), 8)) +- .setExpression(new ConstantNode(new IntegerResultNode(0)))); +- +- Group a = new Group() +- .addAggregationResult(new HitsAggregationResult() +- .setMaxHits(5) +- .addHit(new FS4Hit(10, createGlobalId(10), 10)) +- .addHit(new FS4Hit(1, createGlobalId(5), 5)) +- .addHit(new FS4Hit(2, createGlobalId(4), 4)) +- .setExpression(new ConstantNode( new IntegerResultNode(0)))); +- +- Group b = new Group() +- .addAggregationResult(new HitsAggregationResult() +- .setMaxHits(5) +- .addHit(new FS4Hit(20, createGlobalId(20), 20)) +- .addHit(new FS4Hit(3, createGlobalId(7), 7)) +- .addHit(new FS4Hit(4, createGlobalId(6), 6)) +- .setExpression(new ConstantNode( new IntegerResultNode(0)))); +- +- Group c = new Group() +- .addAggregationResult(new HitsAggregationResult() +- .setMaxHits(5) +- .addHit(new FS4Hit(30, createGlobalId(30), 30)) +- .addHit(new FS4Hit(5, createGlobalId(9), 9)) +- .addHit(new FS4Hit(6, createGlobalId(8), 8)) +- .setExpression(new ConstantNode( new IntegerResultNode(0)))); ++ .setLastLevel(1)); ++ ++ Group expect = new FS4Hit(30, createGlobalId(30), 30).addHit(new HitsAggregationResult() ++ .setMaxHits(5)) ++ .addHit(new FS4Hit(20, createGlobalId(20), 20)) ++ .addHit(new FS4Hit(10, createGlobalId(10), 10)) ++ .addHit(new FS4Hit(5, createGlobalId(9), 9)) ++ .addHit(new FS4Hit(6, createGlobalId(8), 8)) ++ .setExpression(new ConstantNode(new IntegerResultNode(0))).addAggregationResult(new Group()); ++ ++ Group a = new FS4Hit(10, createGlobalId(10), 10).addHit(new HitsAggregationResult() ++ .setMaxHits(5)) ++ .addHit(new FS4Hit(1, createGlobalId(5), 5)) ++ .addHit(new FS4Hit(2, createGlobalId(4), 4)) ++ .setExpression(new ConstantNode( new IntegerResultNode(0))).addAggregationResult(new Group()); ++ ++ Group b = new FS4Hit(20, createGlobalId(20), 20).addHit(new HitsAggregationResult() ++ .setMaxHits(5)) ++ .addHit(new FS4Hit(3, createGlobalId(7), 7)) ++ .addHit(new FS4Hit(4, createGlobalId(6), 6)) ++ .setExpression(new ConstantNode( new IntegerResultNode(0))).addAggregationResult(new Group()); ++ ++ Group c = new FS4Hit(30, createGlobalId(30), 30).addHit(new HitsAggregationResult() ++ .setMaxHits(5)) ++ .addHit(new FS4Hit(5, createGlobalId(9), 9)) ++ .addHit(new FS4Hit(6, createGlobalId(8), 8)) ++ .setExpression(new ConstantNode( new IntegerResultNode(0))).addAggregationResult(new Group()); + + assertMerge(request, a, b, c, expect); + assertMerge(request, a, c, b, expect); +@@ -81,21 +72,18 @@ public class MergeTestCase { + @Test + public void testMergeSimpleSum() { + Grouping lhs = new Grouping() +- .setRoot(new Group() +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(20)) +- .setExpression(new AttributeNode("foo")))); ++ .setRoot(new SumAggregationResult() ++ .setSum(new IntegerResultNode(20)) ++ .setExpression(new AttributeNode("foo")).addAggregationResult(new Group())); + + Grouping rhs = new Grouping() +- .setRoot(new Group() +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(30)) +- .setExpression(new AttributeNode("foo")))); ++ .setRoot(new SumAggregationResult() ++ .setSum(new IntegerResultNode(30)) ++ .setExpression(new AttributeNode("foo")).addAggregationResult(new Group())); + +- Group expect = new Group() +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(50)) +- .setExpression(new AttributeNode("foo"))); ++ Group expect = new SumAggregationResult() ++ .setSum(new IntegerResultNode(50)) ++ .setExpression(new AttributeNode("foo")).addAggregationResult(new Group()); + + assertMerge(lhs, rhs, expect); + } +@@ -106,26 +94,23 @@ public class MergeTestCase { + Grouping lhs = new Grouping() + .setFirstLevel(0) + .setLastLevel(1) +- .setRoot(new Group().addChild(new Group() +- .setId(new StringResultNode("foo")) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(20)) +- .setExpression(new AttributeNode("foo"))))); ++ .setRoot(new Group().addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(20)) ++ .setExpression(new AttributeNode("foo")).addAggregationResult(new Group() ++ .setId(new StringResultNode("foo"))))); + + Grouping rhs = new Grouping() + .setFirstLevel(0) + .setLastLevel(1) +- .setRoot(new Group().addChild(new Group() +- .setId(new StringResultNode("foo")) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(30)) +- .setExpression(new AttributeNode("foo"))))); +- +- Group expect = new Group().addChild(new Group() +- .setId(new StringResultNode("foo")) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(50)) +- .setExpression(new AttributeNode("foo")))); ++ .setRoot(new Group().addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(30)) ++ .setExpression(new AttributeNode("foo")).addAggregationResult(new Group() ++ .setId(new StringResultNode("foo"))))); ++ ++ Group expect = new Group().addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(50)) ++ .setExpression(new AttributeNode("foo")).addAggregationResult(new Group() ++ .setId(new StringResultNode("foo")))); + + assertMerge(lhs, rhs, expect); + } +@@ -137,48 +122,41 @@ public class MergeTestCase { + .setFirstLevel(0) + .setLastLevel(1) + .setRoot(new Group() +- .addChild(new Group() +- .setId(new StringResultNode("foo")) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(20)) +- .setExpression(new AttributeNode("foo")))) +- .addChild(new Group() +- .setId(new StringResultNode("bar")) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(40)) +- .setExpression(new AttributeNode("foo"))))); ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(20)) ++ .setExpression(new AttributeNode("foo")).addAggregationResult(new Group() ++ .setId(new StringResultNode("foo")))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(40)) ++ .setExpression(new AttributeNode("foo")).addAggregationResult(new Group() ++ .setId(new StringResultNode("bar"))))); + + Grouping rhs = new Grouping() + .setFirstLevel(0) + .setLastLevel(1) + .setRoot(new Group() +- .addChild(new Group() +- .setId(new StringResultNode("foo")) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(30)) +- .setExpression(new AttributeNode("foo")))) +- .addChild(new Group() +- .setId(new StringResultNode("baz")) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(30)) +- .setExpression(new AttributeNode("foo"))))); ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(30)) ++ .setExpression(new AttributeNode("foo")).addAggregationResult(new Group() ++ .setId(new StringResultNode("foo")))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(30)) ++ .setExpression(new AttributeNode("foo")).addAggregationResult(new Group() ++ .setId(new StringResultNode("baz"))))); + + Group expect = new Group().addChild( +- new Group() +- .setId(new StringResultNode("foo")) +- .addAggregationResult(new SumAggregationResult() ++ new SumAggregationResult() + .setSum(new IntegerResultNode(50)) +- .setExpression(new AttributeNode("foo")))) +- .addChild(new Group() +- .setId(new StringResultNode("bar")) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(40)) +- .setExpression(new AttributeNode("foo")))) +- .addChild(new Group() +- .setId(new StringResultNode("baz")) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(30)) +- .setExpression(new AttributeNode("foo")))); ++ .setExpression(new AttributeNode("foo")).addAggregationResult(new Group() ++ .setId(new StringResultNode("foo")))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(40)) ++ .setExpression(new AttributeNode("foo")).addAggregationResult(new Group() ++ .setId(new StringResultNode("bar")))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(30)) ++ .setExpression(new AttributeNode("foo")).addAggregationResult(new Group() ++ .setId(new StringResultNode("baz")))); + + assertMerge(lhs, rhs, expect); + } +@@ -186,159 +164,130 @@ public class MergeTestCase { + // Verify that frozen levels are not touched during merge. + @Test + public void testMergeLevels() { +- Grouping request = new Grouping() +- .addLevel(new GroupingLevel() +- .setExpression(new AttributeNode("c1")) +- .setGroupPrototype(new Group().addAggregationResult( +- new SumAggregationResult().setExpression(new AttributeNode("s1"))))) ++ Grouping request = new GroupingLevel() ++ .setExpression(new AttributeNode("c1")) ++ .setGroupPrototype(new SumAggregationResult().setExpression(new AttributeNode("s1")).addAggregationResult( ++ new Group())).addLevel(new Grouping()) + .addLevel(new GroupingLevel() + .setExpression(new AttributeNode("c2")) +- .setGroupPrototype(new Group().addAggregationResult( +- new SumAggregationResult().setExpression(new AttributeNode("s2"))))) ++ .setGroupPrototype(new SumAggregationResult().setExpression(new AttributeNode("s2")).addAggregationResult( ++ new Group()))) + .addLevel(new GroupingLevel() + .setExpression(new AttributeNode("c3")) +- .setGroupPrototype(new Group().addAggregationResult( +- new SumAggregationResult().setExpression(new AttributeNode("s3"))))); +- +- Group lhs = new Group() +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(5)) +- .setExpression(new AttributeNode("s0"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(10)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(10)) +- .setExpression(new AttributeNode("s1"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(20)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(15)) +- .setExpression(new AttributeNode("s2"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(30)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(20)) +- .setExpression(new AttributeNode("s3")))))); ++ .setGroupPrototype(new SumAggregationResult().setExpression(new AttributeNode("s3")).addAggregationResult( ++ new Group()))); + +- Group rhs = new Group() +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(5)) +- .setExpression(new AttributeNode("s0"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(10)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(10)) +- .setExpression(new AttributeNode("s1"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(20)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(15)) +- .setExpression(new AttributeNode("s2"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(30)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(20)) +- .setExpression(new AttributeNode("s3")))))); +- +- Group expectAll = new Group() +- .addAggregationResult(new SumAggregationResult() ++ Group lhs = new SumAggregationResult() ++ .setSum(new IntegerResultNode(5)) ++ .setExpression(new AttributeNode("s0")).addAggregationResult(new Group()) ++ .addChild(new SumAggregationResult() + .setSum(new IntegerResultNode(10)) +- .setExpression(new AttributeNode("s0"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(10)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(20)) +- .setExpression(new AttributeNode("s1"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(20)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(30)) +- .setExpression(new AttributeNode("s2"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(30)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(40)) +- .setExpression(new AttributeNode("s3")))))); +- +- Group expect0 = new Group() +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(5)) +- .setExpression(new AttributeNode("s0"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(10)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(20)) +- .setExpression(new AttributeNode("s1"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(20)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(30)) +- .setExpression(new AttributeNode("s2"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(30)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(40)) +- .setExpression(new AttributeNode("s3")))))); +- +- Group expect1 = new Group() +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(5)) +- .setExpression(new AttributeNode("s0"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(10)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(10)) +- .setExpression(new AttributeNode("s1"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(20)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(30)) +- .setExpression(new AttributeNode("s2"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(30)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(40)) +- .setExpression(new AttributeNode("s3")))))); +- +- Group expect2 = new Group() +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(5)) +- .setExpression(new AttributeNode("s0"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(10)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(10)) +- .setExpression(new AttributeNode("s1"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(20)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(15)) +- .setExpression(new AttributeNode("s2"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(30)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(40)) +- .setExpression(new AttributeNode("s3")))))); +- +- Group expect3 = new Group() +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(5)) +- .setExpression(new AttributeNode("s0"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(10)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(10)) +- .setExpression(new AttributeNode("s1"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(20)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(15)) +- .setExpression(new AttributeNode("s2"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(30)) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(20)) +- .setExpression(new AttributeNode("s3")))))); ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(10))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(15)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(20))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(20)) ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(30)))))); ++ ++ Group rhs = new SumAggregationResult() ++ .setSum(new IntegerResultNode(5)) ++ .setExpression(new AttributeNode("s0")).addAggregationResult(new Group()) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(10)) ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(10))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(15)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(20))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(20)) ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(30)))))); ++ ++ Group expectAll = new SumAggregationResult() ++ .setSum(new IntegerResultNode(10)) ++ .setExpression(new AttributeNode("s0")).addAggregationResult(new Group()) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(20)) ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(10))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(30)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(20))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(40)) ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(30)))))); ++ ++ Group expect0 = new SumAggregationResult() ++ .setSum(new IntegerResultNode(5)) ++ .setExpression(new AttributeNode("s0")).addAggregationResult(new Group()) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(20)) ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(10))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(30)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(20))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(40)) ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(30)))))); ++ ++ Group expect1 = new SumAggregationResult() ++ .setSum(new IntegerResultNode(5)) ++ .setExpression(new AttributeNode("s0")).addAggregationResult(new Group()) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(10)) ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(10))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(30)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(20))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(40)) ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(30)))))); ++ ++ Group expect2 = new SumAggregationResult() ++ .setSum(new IntegerResultNode(5)) ++ .setExpression(new AttributeNode("s0")).addAggregationResult(new Group()) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(10)) ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(10))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(15)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(20))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(40)) ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(30)))))); ++ ++ Group expect3 = new SumAggregationResult() ++ .setSum(new IntegerResultNode(5)) ++ .setExpression(new AttributeNode("s0")).addAggregationResult(new Group()) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(10)) ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(10))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(15)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(20))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(20)) ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(30)))))); + + request.setFirstLevel(0).setLastLevel(3); + assertMerge(request, lhs, rhs, expectAll); +@@ -356,9 +305,8 @@ public class MergeTestCase { + // highest ranked ones, and that they are sorted by group id. + @Test + public void testMergeGroups() { +- Grouping request = new Grouping() +- .addLevel(new GroupingLevel() +- .setExpression(new AttributeNode("attr"))); ++ Grouping request = new GroupingLevel() ++ .setExpression(new AttributeNode("attr")).addLevel(new Grouping()); + Group lhs = new Group() + .addChild(new Group().setId(new IntegerResultNode(5)).setRank(5)) + .addChild(new Group().setId(new IntegerResultNode(10)).setRank(5)) +@@ -412,28 +360,22 @@ public class MergeTestCase { + public void testMergeBuckets() { + Grouping lhs = new Grouping() + .setRoot(new Group().setTag(0) +- .addChild(new Group().setId(new FloatBucketResultNode(FloatResultNode.getNegativeInfinity().getFloat(), 0.4)) +- .addAggregationResult(new CountAggregationResult().setCount(1)) ++ .addChild(new CountAggregationResult().setCount(1).addAggregationResult(new Group().setId(new FloatBucketResultNode(FloatResultNode.getNegativeInfinity().getFloat(), 0.4))) + .setTag(1)) +- .addChild(new Group().setId(new FloatBucketResultNode(0, 0)) +- .addAggregationResult(new CountAggregationResult().setCount(12)) ++ .addChild(new CountAggregationResult().setCount(12).addAggregationResult(new Group().setId(new FloatBucketResultNode(0, 0))) + .setTag(1))); + + Grouping rhs = new Grouping() + .setRoot(new Group().setTag(0) +- .addChild(new Group().setId(new FloatBucketResultNode(FloatResultNode.getNegativeInfinity().getFloat(), 0.4)) +- .addAggregationResult(new CountAggregationResult().setCount(0)) ++ .addChild(new CountAggregationResult().setCount(0).addAggregationResult(new Group().setId(new FloatBucketResultNode(FloatResultNode.getNegativeInfinity().getFloat(), 0.4))) + .setTag(1)) +- .addChild(new Group().setId(new FloatBucketResultNode(0, 0)) +- .addAggregationResult(new CountAggregationResult().setCount(15)) ++ .addChild(new CountAggregationResult().setCount(15).addAggregationResult(new Group().setId(new FloatBucketResultNode(0, 0))) + .setTag(1))); + + Group expected = new Group().setTag(0) +- .addChild(new Group().setId(new FloatBucketResultNode(FloatResultNode.getNegativeInfinity().getFloat(), 0.4)) +- .addAggregationResult(new CountAggregationResult().setCount(1)) ++ .addChild(new CountAggregationResult().setCount(1).addAggregationResult(new Group().setId(new FloatBucketResultNode(FloatResultNode.getNegativeInfinity().getFloat(), 0.4))) + .setTag(1)) +- .addChild(new Group().setId(new FloatBucketResultNode(0, 0)) +- .addAggregationResult(new CountAggregationResult().setCount(27)) ++ .addChild(new CountAggregationResult().setCount(27).addAggregationResult(new Group().setId(new FloatBucketResultNode(0, 0))) + .setTag(1)); + assertMerge(lhs, rhs, expected); + } +@@ -441,36 +383,30 @@ public class MergeTestCase { + // Merge two trees that are ordered by an expression, and verify that the resulting order after merge is correct. + @Test + public void testMergeExpressions() { +- Grouping a = new Grouping() ++ Grouping a = new GroupingLevel().setMaxGroups(1).addLevel(new Grouping() + .setFirstLevel(0) +- .setLastLevel(1) +- .addLevel(new GroupingLevel().setMaxGroups(1)) ++ .setLastLevel(1)) + .setRoot(new Group() +- .addChild(new Group().setId(new StringResultNode("aa")) +- .addAggregationResult(new MaxAggregationResult().setMax(new IntegerResultNode(9))) ++ .addChild(new MaxAggregationResult().setMax(new IntegerResultNode(9)).addAggregationResult(new Group().setId(new StringResultNode("aa"))) + .addAggregationResult(new CountAggregationResult().setCount(2)) + .addOrderBy(new MultiplyFunctionNode().addArg(new AggregationRefNode(0)) + .addArg(new AggregationRefNode(1)), true))); +- Grouping b = new Grouping() ++ Grouping b = new GroupingLevel().setMaxGroups(1).addLevel(new Grouping() + .setFirstLevel(0) +- .setLastLevel(1) +- .addLevel(new GroupingLevel().setMaxGroups(1)) ++ .setLastLevel(1)) + .setRoot(new Group() +- .addChild(new Group().setId(new StringResultNode("ab")) +- .addAggregationResult(new MaxAggregationResult().setMax( +- new IntegerResultNode(12))) ++ .addChild(new MaxAggregationResult().setMax( ++ new IntegerResultNode(12)).addAggregationResult(new Group().setId(new StringResultNode("ab"))) + .addAggregationResult(new CountAggregationResult().setCount(1)) + .addOrderBy(new MultiplyFunctionNode().addArg(new AggregationRefNode(0)) + .addArg(new AggregationRefNode(1)), true))); + +- Grouping expected = new Grouping() ++ Grouping expected = new GroupingLevel().setMaxGroups(1).addLevel(new Grouping() + .setFirstLevel(0) +- .setLastLevel(1) +- .addLevel(new GroupingLevel().setMaxGroups(1)) ++ .setLastLevel(1)) + .setRoot(new Group() +- .addChild(new Group().setId(new StringResultNode("ab")) +- .addAggregationResult(new MaxAggregationResult().setMax( +- new IntegerResultNode(12))) ++ .addChild(new MaxAggregationResult().setMax( ++ new IntegerResultNode(12)).addAggregationResult(new Group().setId(new StringResultNode("ab"))) + .addAggregationResult(new CountAggregationResult().setCount(1)) + .addOrderBy(new MultiplyFunctionNode().addArg(new AggregationRefNode(0)) + .addArg(new AggregationRefNode(1)), true))); +@@ -484,241 +420,206 @@ public class MergeTestCase { + // Merge two relatively complex tree structures and verify that the end result is as expected. + @Test + public void testMergeTrees() { +- Grouping request = new Grouping() +- .addLevel(new GroupingLevel() +- .setMaxGroups(3) +- .setExpression(new AttributeNode("c1")) +- .setGroupPrototype(new Group().addAggregationResult( +- new SumAggregationResult().setExpression(new AttributeNode("s1"))))) ++ Grouping request = new GroupingLevel() ++ .setMaxGroups(3) ++ .setExpression(new AttributeNode("c1")) ++ .setGroupPrototype(new SumAggregationResult().setExpression(new AttributeNode("s1")).addAggregationResult( ++ new Group())).addLevel(new Grouping()) + .addLevel(new GroupingLevel() + .setMaxGroups(2) + .setExpression(new AttributeNode("c2")) +- .setGroupPrototype(new Group().addAggregationResult( +- new SumAggregationResult().setExpression(new AttributeNode("s2"))))) ++ .setGroupPrototype(new SumAggregationResult().setExpression(new AttributeNode("s2")).addAggregationResult( ++ new Group()))) + .addLevel(new GroupingLevel() + .setMaxGroups(1) + .setExpression(new AttributeNode("c3")) +- .setGroupPrototype(new Group().addAggregationResult( +- new SumAggregationResult().setExpression(new AttributeNode("s3"))))); ++ .setGroupPrototype(new SumAggregationResult().setExpression(new AttributeNode("s3")).addAggregationResult( ++ new Group()))); + +- Group lhs = new Group() +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s0"))) ++ Group lhs = new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s0")).addAggregationResult(new Group()) + .addChild(new Group().setId(new IntegerResultNode(4)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(5)) +- .setRank(5) // merged with 200 rank node +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s1"))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(5)) ++ .setRank(5)) + .addChild(new Group().setId(new IntegerResultNode(4)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(5)) +- .setRank(500) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s2"))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(5)) ++ .setRank(500)) + .addChild(new Group().setId(new IntegerResultNode(4)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(5)) +- .setRank(200) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s3")))))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(5)) ++ .setRank(200))))) + .addChild(new Group().setId(new IntegerResultNode(9)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(10)) +- .setRank(100) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s1"))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(10)) ++ .setRank(100)) + // dummy child would be picked up here +- .addChild(new Group() +- .setId(new IntegerResultNode(15)) +- .setRank(200) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s2"))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(15)) ++ .setRank(200)) + .addChild(new Group().setId(new IntegerResultNode(14)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(15)) +- .setRank(300) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s3")))))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(15)) ++ .setRank(300))))) + .addChild(new Group().setId(new IntegerResultNode(14)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(15)) +- .setRank(300) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s1"))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(15)) ++ .setRank(300)) + .addChild(new Group().setId(new IntegerResultNode(19)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(20)) +- .setRank(100) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s2"))))); ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(20)) ++ .setRank(100)))); + +- Group rhs = new Group() +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s0"))) ++ Group rhs = new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s0")).addAggregationResult(new Group()) + .addChild(new Group().setId(new IntegerResultNode(4)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(5)) +- .setRank(200) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s1"))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(5)) ++ .setRank(200)) + .addChild(new Group().setId(new IntegerResultNode(9)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(10)) +- .setRank(400) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s2"))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(10)) ++ .setRank(400)) + .addChild(new Group().setId(new IntegerResultNode(9)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(10)) +- .setRank(100) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s3")))))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(10)) ++ .setRank(100))))) + .addChild(new Group().setId(new IntegerResultNode(9)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(10)) +- .setRank(100) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s1"))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(10)) ++ .setRank(100)) + // dummy child would be picket up here +- .addChild(new Group() +- .setId(new IntegerResultNode(15)) +- .setRank(200) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s2"))))) +- .addChild(new Group().setId(new IntegerResultNode(14)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(15)) +- .setRank(5) // merged with 300 rank node +- .addAggregationResult(new SumAggregationResult() ++ .addChild(new SumAggregationResult() + .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s1"))) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(15)) ++ .setRank(200)))) ++ .addChild(new Group().setId(new IntegerResultNode(14)).setRank(10)) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(15)) ++ .setRank(5)) + .addChild(new Group().setId(new IntegerResultNode(19)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(20)) +- .setRank(5) // merged with 100 rank node +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s2"))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(20)) ++ .setRank(5)) + .addChild(new Group().setId(new IntegerResultNode(19)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(20)) +- .setRank(500) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s3"))))) +- .addChild(new Group().setId(new IntegerResultNode(24)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(25)) +- .setRank(300) +- .addAggregationResult(new SumAggregationResult() ++ .addChild(new SumAggregationResult() + .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s2"))) ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(20)) ++ .setRank(500)))) ++ .addChild(new Group().setId(new IntegerResultNode(24)).setRank(10)) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(25)) ++ .setRank(300)) + .addChild(new Group().setId(new IntegerResultNode(24)).setRank(10)) +- .addChild(new Group() +- .setId(new IntegerResultNode(25)) +- .setRank(400) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s3")))))); +- +- Group expect = new Group() +- .addAggregationResult(new SumAggregationResult() ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(25)) ++ .setRank(400))))); ++ ++ Group expect = new SumAggregationResult() ++ .setSum(new IntegerResultNode(200)) ++ .setExpression(new AttributeNode("s0")).addAggregationResult(new Group()) ++ .addChild(new SumAggregationResult() + .setSum(new IntegerResultNode(200)) +- .setExpression(new AttributeNode("s0"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(5)) +- .setRank(200) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(200)) +- .setExpression(new AttributeNode("s1"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(5)) +- .setRank(500) +- .addAggregationResult(new SumAggregationResult() ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(5)) ++ .setRank(200)) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(5)) ++ .setRank(500)) ++ .addChild(new SumAggregationResult() + .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s2"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(5)) +- .setRank(200) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s3"))))) +- .addChild(new Group() +- .setId(new IntegerResultNode(10)) +- .setRank(400) +- .addAggregationResult(new SumAggregationResult() ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(5)) ++ .setRank(200)))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(10)) ++ .setRank(400)) ++ .addChild(new SumAggregationResult() + .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s2"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(10)) +- .setRank(100) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s3")))))) +- .addChild(new Group() +- .setId(new IntegerResultNode(10)) +- .setRank(100) +- .addAggregationResult(new SumAggregationResult() ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(10)) ++ .setRank(100))))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(200)) ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(10)) ++ .setRank(100)) ++ .addChild(new SumAggregationResult() + .setSum(new IntegerResultNode(200)) +- .setExpression(new AttributeNode("s1"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(15)) +- .setRank(200) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(200)) +- .setExpression(new AttributeNode("s2"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(15)) +- .setRank(300) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s3")))))) +- .addChild(new Group() +- .setId(new IntegerResultNode(15)) +- .setRank(300) +- .addAggregationResult(new SumAggregationResult() ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(15)) ++ .setRank(200)) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(15)) ++ .setRank(300))))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(200)) ++ .setExpression(new AttributeNode("s1")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(15)) ++ .setRank(300)) ++ .addChild(new SumAggregationResult() + .setSum(new IntegerResultNode(200)) +- .setExpression(new AttributeNode("s1"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(20)) +- .setRank(100) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(200)) +- .setExpression(new AttributeNode("s2"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(20)) +- .setRank(500) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s3"))))) +- .addChild(new Group() +- .setId(new IntegerResultNode(25)) +- .setRank(300) +- .addAggregationResult(new SumAggregationResult() ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(20)) ++ .setRank(100)) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(20)) ++ .setRank(500)))) ++ .addChild(new SumAggregationResult() ++ .setSum(new IntegerResultNode(100)) ++ .setExpression(new AttributeNode("s2")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(25)) ++ .setRank(300)) ++ .addChild(new SumAggregationResult() + .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s2"))) +- .addChild(new Group() +- .setId(new IntegerResultNode(25)) +- .setRank(400) +- .addAggregationResult(new SumAggregationResult() +- .setSum(new IntegerResultNode(100)) +- .setExpression(new AttributeNode("s3")))))); ++ .setExpression(new AttributeNode("s3")).addAggregationResult(new Group() ++ .setId(new IntegerResultNode(25)) ++ .setRank(400))))); + + assertMerge(request, lhs, rhs, expect); + assertMerge(request, rhs, lhs, expect); +diff --git a/searchlib/src/test/java/com/yahoo/searchlib/expression/ExpressionTestCase.java b/searchlib/src/test/java/com/yahoo/searchlib/expression/ExpressionTestCase.java +old mode 100644 +new mode 100755 +index d282dc17aa..15fafefd90 +--- a/searchlib/src/test/java/com/yahoo/searchlib/expression/ExpressionTestCase.java ++++ b/searchlib/src/test/java/com/yahoo/searchlib/expression/ExpressionTestCase.java +@@ -24,14 +24,14 @@ public class ExpressionTestCase { + + @Test + public void testRangeBucketPreDefFunctionNode() { +- assertMultiArgFunctionNode(new RangeBucketPreDefFunctionNode(new StringBucketResultNodeVector().add(new StringBucketResultNode("10", "20")), new AttributeNode("foo"))); ++ assertMultiArgFunctionNode(new RangeBucketPreDefFunctionNode(new StringBucketResultNode("10", "20").add(new StringBucketResultNodeVector()), new AttributeNode("foo"))); + assertEquals(new RangeBucketPreDefFunctionNode(), new RangeBucketPreDefFunctionNode()); +- assertEquals(new RangeBucketPreDefFunctionNode(new StringBucketResultNodeVector().add(new StringBucketResultNode("10", "20")), new AttributeNode("foo")), +- new RangeBucketPreDefFunctionNode(new StringBucketResultNodeVector().add(new StringBucketResultNode("10", "20")), new AttributeNode("foo"))); +- assertNotEquals(new RangeBucketPreDefFunctionNode(new StringBucketResultNodeVector().add(new StringBucketResultNode("10", "20")), new AttributeNode("foo")), +- new RangeBucketPreDefFunctionNode(new StringBucketResultNodeVector().add(new StringBucketResultNode("10", "21")), new AttributeNode("foo"))); +- assertNotEquals(new RangeBucketPreDefFunctionNode(new StringBucketResultNodeVector().add(new StringBucketResultNode("10", "20")), new AttributeNode("foo")), +- new RangeBucketPreDefFunctionNode(new StringBucketResultNodeVector().add(new StringBucketResultNode("10", "20")), new AttributeNode("bar"))); ++ assertEquals(new RangeBucketPreDefFunctionNode(new StringBucketResultNode("10", "20").add(new StringBucketResultNodeVector()), new AttributeNode("foo")), ++ new RangeBucketPreDefFunctionNode(new StringBucketResultNode("10", "20").add(new StringBucketResultNodeVector()), new AttributeNode("foo"))); ++ assertNotEquals(new RangeBucketPreDefFunctionNode(new StringBucketResultNode("10", "20").add(new StringBucketResultNodeVector()), new AttributeNode("foo")), ++ new RangeBucketPreDefFunctionNode(new StringBucketResultNode("10", "21").add(new StringBucketResultNodeVector()), new AttributeNode("foo"))); ++ assertNotEquals(new RangeBucketPreDefFunctionNode(new StringBucketResultNode("10", "20").add(new StringBucketResultNodeVector()), new AttributeNode("foo")), ++ new RangeBucketPreDefFunctionNode(new StringBucketResultNode("10", "20").add(new StringBucketResultNodeVector()), new AttributeNode("bar"))); + } + + @Test +@@ -48,35 +48,35 @@ public class ExpressionTestCase { + + @Test + public void testIntegerBucketResultNodeVector() { +- assertResultNode(new IntegerBucketResultNodeVector().add(new IntegerBucketResultNode(10, 20))); +- assertEquals(new IntegerBucketResultNodeVector().add(new IntegerBucketResultNode(10, 20)), +- new IntegerBucketResultNodeVector().add(new IntegerBucketResultNode(10, 20))); +- assertNotEquals(new IntegerBucketResultNodeVector().add(new IntegerBucketResultNode(10, 20)), ++ assertResultNode(new IntegerBucketResultNode(10, 20).add(new IntegerBucketResultNodeVector())); ++ assertEquals(new IntegerBucketResultNode(10, 20).add(new IntegerBucketResultNodeVector()), ++ new IntegerBucketResultNode(10, 20).add(new IntegerBucketResultNodeVector())); ++ assertNotEquals(new IntegerBucketResultNode(10, 20).add(new IntegerBucketResultNodeVector()), + new IntegerBucketResultNodeVector()); +- assertNotEquals(new IntegerBucketResultNodeVector().add(new IntegerBucketResultNode(10, 20)), +- new IntegerBucketResultNodeVector().add(new IntegerBucketResultNode(11, 20))); ++ assertNotEquals(new IntegerBucketResultNode(10, 20).add(new IntegerBucketResultNodeVector()), ++ new IntegerBucketResultNode(11, 20).add(new IntegerBucketResultNodeVector())); + } + + @Test + public void testFloatBucketResultNodeVector() { +- assertResultNode(new FloatBucketResultNodeVector().add(new FloatBucketResultNode(10, 20))); +- assertEquals(new FloatBucketResultNodeVector().add(new FloatBucketResultNode(10, 20)), +- new FloatBucketResultNodeVector().add(new FloatBucketResultNode(10, 20))); +- assertNotEquals(new FloatBucketResultNodeVector().add(new FloatBucketResultNode(10, 20)), ++ assertResultNode(new FloatBucketResultNode(10, 20).add(new FloatBucketResultNodeVector())); ++ assertEquals(new FloatBucketResultNode(10, 20).add(new FloatBucketResultNodeVector()), ++ new FloatBucketResultNode(10, 20).add(new FloatBucketResultNodeVector())); ++ assertNotEquals(new FloatBucketResultNode(10, 20).add(new FloatBucketResultNodeVector()), + new FloatBucketResultNodeVector()); +- assertNotEquals(new FloatBucketResultNodeVector().add(new FloatBucketResultNode(10, 20)), +- new FloatBucketResultNodeVector().add(new FloatBucketResultNode(11, 20))); ++ assertNotEquals(new FloatBucketResultNode(10, 20).add(new FloatBucketResultNodeVector()), ++ new FloatBucketResultNode(11, 20).add(new FloatBucketResultNodeVector())); + } + + @Test + public void testStringBucketResultNodeVector() { +- assertResultNode(new StringBucketResultNodeVector().add(new StringBucketResultNode("10", "20"))); +- assertEquals(new StringBucketResultNodeVector().add(new StringBucketResultNode("10", "20")), +- new StringBucketResultNodeVector().add(new StringBucketResultNode("10", "20"))); +- assertNotEquals(new StringBucketResultNodeVector().add(new StringBucketResultNode("10", "20")), ++ assertResultNode(new StringBucketResultNode("10", "20").add(new StringBucketResultNodeVector())); ++ assertEquals(new StringBucketResultNode("10", "20").add(new StringBucketResultNodeVector()), ++ new StringBucketResultNode("10", "20").add(new StringBucketResultNodeVector())); ++ assertNotEquals(new StringBucketResultNode("10", "20").add(new StringBucketResultNodeVector()), + new StringBucketResultNodeVector()); +- assertNotEquals(new StringBucketResultNodeVector().add(new StringBucketResultNode("10", "20")), +- new StringBucketResultNodeVector().add(new StringBucketResultNode("11", "20"))); ++ assertNotEquals(new StringBucketResultNode("10", "20").add(new StringBucketResultNodeVector()), ++ new StringBucketResultNode("11", "20").add(new StringBucketResultNodeVector())); + } + + @Test +diff --git a/searchlib/src/test/java/com/yahoo/searchlib/expression/ResultNodeVectorTestCase.java b/searchlib/src/test/java/com/yahoo/searchlib/expression/ResultNodeVectorTestCase.java +old mode 100644 +new mode 100755 +index 2fc1771ece..fe95883f7d +--- a/searchlib/src/test/java/com/yahoo/searchlib/expression/ResultNodeVectorTestCase.java ++++ b/searchlib/src/test/java/com/yahoo/searchlib/expression/ResultNodeVectorTestCase.java +@@ -1,7 +1,6 @@ + // Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. + package com.yahoo.searchlib.expression; + +-import com.yahoo.vespa.objects.BufferSerializer; + import org.junit.Test; + + import java.util.List; +@@ -29,71 +28,71 @@ public class ResultNodeVectorTestCase extends ResultNodeTest { + @Test + public void testVectorAdd() { + BoolResultNodeVector b = new BoolResultNodeVector(); +- b.add(new BoolResultNode(true)); +- b.add(new BoolResultNode(false)); ++ new BoolResultNode(true).add(b); ++ new BoolResultNode(false).add(b); + b.add((ResultNode)new BoolResultNode(false)); + assertThat(b.getVector().size(), is(3)); + + Int8ResultNodeVector i8 = new Int8ResultNodeVector(); +- i8.add(new Int8ResultNode((byte)9)); +- i8.add(new Int8ResultNode((byte)2)); ++ new Int8ResultNode((byte)9).addOther(i8); ++ new Int8ResultNode((byte)2).addOther(i8); + i8.add((ResultNode)new Int8ResultNode((byte)5)); + assertThat(i8.getVector().size(), is(3)); + + Int16ResultNodeVector i16 = new Int16ResultNodeVector(); +- i16.add(new Int16ResultNode((short)9)); +- i16.add(new Int16ResultNode((short)2)); ++ new Int16ResultNode((short)9).addOther(i16); ++ new Int16ResultNode((short)2).addOther(i16); + i16.add((ResultNode)new Int16ResultNode((short)5)); + assertThat(i16.getVector().size(), is(3)); + + Int32ResultNodeVector i32 = new Int32ResultNodeVector(); +- i32.add(new Int32ResultNode(9)); +- i32.add(new Int32ResultNode(2)); ++ new Int32ResultNode(9).addOther(i32); ++ new Int32ResultNode(2).addOther(i32); + i32.add((ResultNode)new Int32ResultNode(5)); + assertThat(i32.getVector().size(), is(3)); + + IntegerResultNodeVector ieger = new IntegerResultNodeVector(); +- ieger.add(new IntegerResultNode(9)); +- ieger.add(new IntegerResultNode(2)); ++ new IntegerResultNode(9).addOther(ieger); ++ new IntegerResultNode(2).addOther(ieger); + ieger.add((ResultNode)new IntegerResultNode(5)); + assertThat(ieger.getVector().size(), is(3)); + + FloatResultNodeVector floatvec = new FloatResultNodeVector(); +- floatvec.add(new FloatResultNode(3.3)); +- floatvec.add(new FloatResultNode(3.4)); ++ new FloatResultNode(3.3).addOther(floatvec); ++ new FloatResultNode(3.4).addOther(floatvec); + floatvec.add((ResultNode)new FloatResultNode(3.5)); + assertThat(floatvec.getVector().size(), is(3)); + } + + @Test + public void testCmp() { +- ResultNodeVector int8vec = new Int8ResultNodeVector().add(new Int8ResultNode((byte) 2)); +- ResultNodeVector int8veclarge = new Int8ResultNodeVector().add(new Int8ResultNode((byte) 2)).add(new Int8ResultNode((byte) 5)); +- ResultNodeVector int8vecsmall = new Int8ResultNodeVector().add(new Int8ResultNode((byte) 1)); ++ ResultNodeVector int8vec = new Int8ResultNode((byte) 2).addOther(new Int8ResultNodeVector()); ++ ResultNodeVector int8veclarge = new Int8ResultNode((byte) 2).addOther(new Int8ResultNodeVector()).addOther(new Int8ResultNode((byte) 5)); ++ ResultNodeVector int8vecsmall = new Int8ResultNode((byte) 1).addOther(new Int8ResultNodeVector()); + +- ResultNodeVector int16vec = new Int16ResultNodeVector().add(new Int16ResultNode((short) 2)); +- ResultNodeVector int16veclarge = new Int16ResultNodeVector().add(new Int16ResultNode((short) 2)).add(new Int16ResultNode((short) 5)); +- ResultNodeVector int16vecsmall = new Int16ResultNodeVector().add(new Int16ResultNode((short) 1)); ++ ResultNodeVector int16vec = new Int16ResultNode((short) 2).addOther(new Int16ResultNodeVector()); ++ ResultNodeVector int16veclarge = new Int16ResultNode((short) 2).addOther(new Int16ResultNodeVector()).addOther(new Int16ResultNode((short) 5)); ++ ResultNodeVector int16vecsmall = new Int16ResultNode((short) 1).addOther(new Int16ResultNodeVector()); + +- ResultNodeVector int32vec = new Int32ResultNodeVector().add(new Int32ResultNode(2)); +- ResultNodeVector int32veclarge = new Int32ResultNodeVector().add(new Int32ResultNode(2)).add(new Int32ResultNode(5)); +- ResultNodeVector int32vecsmall = new Int32ResultNodeVector().add(new Int32ResultNode(1)); ++ ResultNodeVector int32vec = new Int32ResultNode(2).addOther(new Int32ResultNodeVector()); ++ ResultNodeVector int32veclarge = new Int32ResultNode(2).addOther(new Int32ResultNodeVector()).addOther(new Int32ResultNode(5)); ++ ResultNodeVector int32vecsmall = new Int32ResultNode(1).addOther(new Int32ResultNodeVector()); + +- ResultNodeVector intvec = new IntegerResultNodeVector().add(new IntegerResultNode(2)); +- ResultNodeVector intveclarge = new IntegerResultNodeVector().add(new IntegerResultNode(2)).add(new IntegerResultNode(5)); +- ResultNodeVector intvecsmall = new IntegerResultNodeVector().add(new IntegerResultNode(1)); ++ ResultNodeVector intvec = new IntegerResultNode(2).addOther(new IntegerResultNodeVector()); ++ ResultNodeVector intveclarge = new IntegerResultNode(2).addOther(new IntegerResultNodeVector()).addOther(new IntegerResultNode(5)); ++ ResultNodeVector intvecsmall = new IntegerResultNode(1).addOther(new IntegerResultNodeVector()); + +- FloatResultNodeVector floatvec = new FloatResultNodeVector().add(new FloatResultNode(2.2)); +- FloatResultNodeVector floatveclarge = new FloatResultNodeVector().add(new FloatResultNode(2.2)).add(new FloatResultNode(5.5)); +- FloatResultNodeVector floatvecsmall = new FloatResultNodeVector().add(new FloatResultNode(1.2)); ++ FloatResultNodeVector floatvec = new FloatResultNode(2.2).addOther(new FloatResultNodeVector()); ++ FloatResultNodeVector floatveclarge = new FloatResultNode(2.2).addOther(new FloatResultNodeVector()).addOther(new FloatResultNode(5.5)); ++ FloatResultNodeVector floatvecsmall = new FloatResultNode(1.2).addOther(new FloatResultNodeVector()); + +- StringResultNodeVector strvec = new StringResultNodeVector().add(new StringResultNode("foo")); +- StringResultNodeVector strveclarge = new StringResultNodeVector().add(new StringResultNode("foolio")); +- StringResultNodeVector strvecsmall = new StringResultNodeVector().add(new StringResultNode("bario")); ++ StringResultNodeVector strvec = new StringResultNode("foo").addOther(new StringResultNodeVector()); ++ StringResultNodeVector strveclarge = new StringResultNode("foolio").addOther(new StringResultNodeVector()); ++ StringResultNodeVector strvecsmall = new StringResultNode("bario").addOther(new StringResultNodeVector()); + +- RawResultNodeVector rawvec = new RawResultNodeVector().add(new RawResultNode(new byte[]{6, 9})); +- RawResultNodeVector rawveclarge = new RawResultNodeVector().add(new RawResultNode(new byte[]{9, 6})); +- RawResultNodeVector rawvecsmall = new RawResultNodeVector().add(new RawResultNode(new byte[]{6, 6})); ++ RawResultNodeVector rawvec = new RawResultNode(new byte[]{6, 9}).addOther(new RawResultNodeVector()); ++ RawResultNodeVector rawveclarge = new RawResultNode(new byte[]{9, 6}).addOther(new RawResultNodeVector()); ++ RawResultNodeVector rawvecsmall = new RawResultNode(new byte[]{6, 6}).addOther(new RawResultNodeVector()); + + assertClassCmp(int8vec); + assertClassCmp(int16vec); +@@ -164,12 +163,12 @@ public class ResultNodeVectorTestCase extends ResultNodeTest { + + @Test + public void testSerialize() throws InstantiationException, IllegalAccessException { +- assertCorrectSerialization(new FloatResultNodeVector().add(new FloatResultNode(1.1)).add(new FloatResultNode(3.3)), new FloatResultNodeVector()); +- assertCorrectSerialization(new IntegerResultNodeVector().add(new IntegerResultNode(1)).add(new IntegerResultNode(3)), new IntegerResultNodeVector()); +- assertCorrectSerialization(new Int16ResultNodeVector().add(new Int16ResultNode((short) 1)).add(new Int16ResultNode((short) 3)), new Int16ResultNodeVector()); +- assertCorrectSerialization(new Int8ResultNodeVector().add(new Int8ResultNode((byte) 1)).add(new Int8ResultNode((byte) 3)), new Int8ResultNodeVector()); +- assertCorrectSerialization(new StringResultNodeVector().add(new StringResultNode("foo")).add(new StringResultNode("bar")), new StringResultNodeVector()); +- assertCorrectSerialization(new RawResultNodeVector().add(new RawResultNode(new byte[]{6, 9})).add(new RawResultNode(new byte[]{9, 6})), new RawResultNodeVector()); +- assertCorrectSerialization(new BoolResultNodeVector().add(new BoolResultNode(true)).add(new BoolResultNode(false)), new BoolResultNodeVector()); ++ assertCorrectSerialization(new FloatResultNode(1.1).addOther(new FloatResultNodeVector()).addOther(new FloatResultNode(3.3)), new FloatResultNodeVector()); ++ assertCorrectSerialization(new IntegerResultNode(1).addOther(new IntegerResultNodeVector()).addOther(new IntegerResultNode(3)), new IntegerResultNodeVector()); ++ assertCorrectSerialization(new Int16ResultNode((short) 1).addOther(new Int16ResultNodeVector()).addOther(new Int16ResultNode((short) 3)), new Int16ResultNodeVector()); ++ assertCorrectSerialization(new Int8ResultNode((byte) 1).addOther(new Int8ResultNodeVector()).addOther(new Int8ResultNode((byte) 3)), new Int8ResultNodeVector()); ++ assertCorrectSerialization(new StringResultNode("foo").addOther(new StringResultNodeVector()).addOther(new StringResultNode("bar")), new StringResultNodeVector()); ++ assertCorrectSerialization(new RawResultNode(new byte[]{6, 9}).addOther(new RawResultNodeVector()).addOther(new RawResultNode(new byte[]{9, 6})), new RawResultNodeVector()); ++ assertCorrectSerialization(new BoolResultNode(true).add(new BoolResultNodeVector()).add(new BoolResultNode(false)), new BoolResultNodeVector()); + } + } diff --git a/vespa/vespa b/vespa/vespa new file mode 160000 index 00000000..712a9d4e --- /dev/null +++ b/vespa/vespa @@ -0,0 +1 @@ +Subproject commit 712a9d4e03d671b29a1efad5a0a865ab71058f83