Replace more uses of MapBuilder with JCL maps (#96649)
This commit is contained in:
parent
0f3f066960
commit
17fd6372bc
|
@ -8,7 +8,6 @@
|
|||
|
||||
package org.elasticsearch.dissect;
|
||||
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
@ -57,7 +56,7 @@ public class DissectMatchTests extends ESTestCase {
|
|||
dissectMatch.add(new DissectKey("+a"), "z");
|
||||
Map<String, String> results = dissectMatch.getResults();
|
||||
assertThat(dissectMatch.isValid(results), equalTo(true));
|
||||
assertThat(results, equalTo(MapBuilder.newMapBuilder().put("a", "x-y-z").map()));
|
||||
assertThat(results, equalTo(Map.of("a", "x-y-z")));
|
||||
}
|
||||
|
||||
public void testAppendWithOrder() {
|
||||
|
@ -67,7 +66,7 @@ public class DissectMatchTests extends ESTestCase {
|
|||
dissectMatch.add(new DissectKey("+a/1"), "z");
|
||||
Map<String, String> results = dissectMatch.getResults();
|
||||
assertThat(dissectMatch.isValid(results), equalTo(true));
|
||||
assertThat(results, equalTo(MapBuilder.newMapBuilder().put("a", "y-z-x").map()));
|
||||
assertThat(results, equalTo(Map.of("a", "y-z-x")));
|
||||
}
|
||||
|
||||
public void testReference() {
|
||||
|
@ -76,7 +75,7 @@ public class DissectMatchTests extends ESTestCase {
|
|||
dissectMatch.add(new DissectKey("*a"), "y");
|
||||
Map<String, String> results = dissectMatch.getResults();
|
||||
assertThat(dissectMatch.isValid(results), equalTo(true));
|
||||
assertThat(results, equalTo(MapBuilder.newMapBuilder().put("y", "x").map()));
|
||||
assertThat(results, equalTo(Map.of("y", "x")));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
|
||||
package org.elasticsearch.ingest.common;
|
||||
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.dissect.DissectException;
|
||||
import org.elasticsearch.ingest.IngestDocument;
|
||||
import org.elasticsearch.ingest.Processor;
|
||||
|
@ -45,7 +44,7 @@ public class DissectProcessorTests extends ESTestCase {
|
|||
1,
|
||||
null,
|
||||
null,
|
||||
MapBuilder.<String, Object>newMapBuilder().put("message", "foo,bar,baz").put("a", "willgetstompped").map()
|
||||
Map.of("message", "foo,bar,baz", "a", "willgetstompped")
|
||||
);
|
||||
assertThat(ingestDocument.getFieldValue("a", String.class), equalTo("willgetstompped"));
|
||||
DissectProcessor dissectProcessor = new DissectProcessor("", null, "message", "%{a},%{b},%{c}", "", true);
|
||||
|
|
|
@ -10,7 +10,6 @@ import org.elasticsearch.TransportVersion;
|
|||
import org.elasticsearch.action.ActionType;
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksRequest;
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
@ -157,7 +156,7 @@ public class GetDatafeedRunningStateAction extends ActionType<GetDatafeedRunning
|
|||
}
|
||||
|
||||
public static Response fromTaskAndState(String datafeedId, RunningState runningState) {
|
||||
return new Response(MapBuilder.<String, RunningState>newMapBuilder().put(datafeedId, runningState).map());
|
||||
return new Response(Map.of(datafeedId, runningState));
|
||||
}
|
||||
|
||||
public Response(StreamInput in) throws IOException {
|
||||
|
|
|
@ -14,7 +14,6 @@ import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction;
|
|||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction;
|
||||
import org.elasticsearch.action.admin.indices.rollover.RolloverAction;
|
||||
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.transport.TcpTransport;
|
||||
import org.elasticsearch.xpack.core.ilm.action.GetLifecycleAction;
|
||||
import org.elasticsearch.xpack.core.ilm.action.PutLifecycleAction;
|
||||
|
@ -45,6 +44,8 @@ import java.util.Set;
|
|||
import java.util.function.BiConsumer;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Map.entry;
|
||||
|
||||
public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>> {
|
||||
/** "Security Solutions" only legacy signals index */
|
||||
public static final String ALERTS_LEGACY_INDEX = ".siem-signals*";
|
||||
|
@ -114,9 +115,9 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
}
|
||||
|
||||
private static Map<String, RoleDescriptor> initializeReservedRoles() {
|
||||
return MapBuilder.<String, RoleDescriptor>newMapBuilder()
|
||||
.put("superuser", SUPERUSER_ROLE_DESCRIPTOR)
|
||||
.put(
|
||||
return Map.ofEntries(
|
||||
entry("superuser", SUPERUSER_ROLE_DESCRIPTOR),
|
||||
entry(
|
||||
"transport_client",
|
||||
new RoleDescriptor(
|
||||
"transport_client",
|
||||
|
@ -125,13 +126,13 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
null,
|
||||
MetadataUtils.DEFAULT_RESERVED_METADATA
|
||||
)
|
||||
)
|
||||
.put("kibana_admin", kibanaAdminUser("kibana_admin", MetadataUtils.DEFAULT_RESERVED_METADATA))
|
||||
.put(
|
||||
),
|
||||
entry("kibana_admin", kibanaAdminUser("kibana_admin", MetadataUtils.DEFAULT_RESERVED_METADATA)),
|
||||
entry(
|
||||
"kibana_user",
|
||||
kibanaAdminUser("kibana_user", MetadataUtils.getDeprecatedReservedMetadata("Please use the [kibana_admin] role instead"))
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"monitoring_user",
|
||||
new RoleDescriptor(
|
||||
"monitoring_user",
|
||||
|
@ -167,8 +168,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
: null,
|
||||
null
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"remote_monitoring_agent",
|
||||
new RoleDescriptor(
|
||||
"remote_monitoring_agent",
|
||||
|
@ -190,8 +191,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
null,
|
||||
MetadataUtils.DEFAULT_RESERVED_METADATA
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"remote_monitoring_collector",
|
||||
new RoleDescriptor(
|
||||
"remote_monitoring_collector",
|
||||
|
@ -209,8 +210,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
MetadataUtils.DEFAULT_RESERVED_METADATA,
|
||||
null
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"ingest_admin",
|
||||
new RoleDescriptor(
|
||||
"ingest_admin",
|
||||
|
@ -219,9 +220,9 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
null,
|
||||
MetadataUtils.DEFAULT_RESERVED_METADATA
|
||||
)
|
||||
)
|
||||
),
|
||||
// reporting_user doesn't have any privileges in Elasticsearch, and Kibana authorizes privileges based on this role
|
||||
.put(
|
||||
entry(
|
||||
"reporting_user",
|
||||
new RoleDescriptor(
|
||||
"reporting_user",
|
||||
|
@ -233,9 +234,9 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
MetadataUtils.getDeprecatedReservedMetadata("Please use Kibana feature privileges instead"),
|
||||
null
|
||||
)
|
||||
)
|
||||
.put(KibanaSystemUser.ROLE_NAME, kibanaSystemRoleDescriptor(KibanaSystemUser.ROLE_NAME))
|
||||
.put(
|
||||
),
|
||||
entry(KibanaSystemUser.ROLE_NAME, kibanaSystemRoleDescriptor(KibanaSystemUser.ROLE_NAME)),
|
||||
entry(
|
||||
"logstash_system",
|
||||
new RoleDescriptor(
|
||||
"logstash_system",
|
||||
|
@ -244,8 +245,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
null,
|
||||
MetadataUtils.DEFAULT_RESERVED_METADATA
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"beats_admin",
|
||||
new RoleDescriptor(
|
||||
"beats_admin",
|
||||
|
@ -255,8 +256,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
null,
|
||||
MetadataUtils.DEFAULT_RESERVED_METADATA
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
UsernamesField.BEATS_ROLE,
|
||||
new RoleDescriptor(
|
||||
UsernamesField.BEATS_ROLE,
|
||||
|
@ -269,8 +270,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
null,
|
||||
MetadataUtils.DEFAULT_RESERVED_METADATA
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
UsernamesField.APM_ROLE,
|
||||
new RoleDescriptor(
|
||||
UsernamesField.APM_ROLE,
|
||||
|
@ -283,8 +284,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
null,
|
||||
MetadataUtils.DEFAULT_RESERVED_METADATA
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"apm_user",
|
||||
new RoleDescriptor(
|
||||
"apm_user",
|
||||
|
@ -337,8 +338,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
MetadataUtils.getDeprecatedReservedMetadata("This role will be removed in 8.0"),
|
||||
null
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"machine_learning_user",
|
||||
new RoleDescriptor(
|
||||
"machine_learning_user",
|
||||
|
@ -370,8 +371,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
MetadataUtils.DEFAULT_RESERVED_METADATA,
|
||||
null
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"machine_learning_admin",
|
||||
new RoleDescriptor(
|
||||
"machine_learning_admin",
|
||||
|
@ -404,9 +405,9 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
MetadataUtils.DEFAULT_RESERVED_METADATA,
|
||||
null
|
||||
)
|
||||
)
|
||||
),
|
||||
// DEPRECATED: to be removed in 9.0.0
|
||||
.put(
|
||||
entry(
|
||||
"data_frame_transforms_admin",
|
||||
new RoleDescriptor(
|
||||
"data_frame_transforms_admin",
|
||||
|
@ -431,9 +432,9 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
MetadataUtils.getDeprecatedReservedMetadata("Please use the [transform_admin] role instead"),
|
||||
null
|
||||
)
|
||||
)
|
||||
),
|
||||
// DEPRECATED: to be removed in 9.0.0
|
||||
.put(
|
||||
entry(
|
||||
"data_frame_transforms_user",
|
||||
new RoleDescriptor(
|
||||
"data_frame_transforms_user",
|
||||
|
@ -458,8 +459,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
MetadataUtils.getDeprecatedReservedMetadata("Please use the [transform_user] role instead"),
|
||||
null
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"transform_admin",
|
||||
new RoleDescriptor(
|
||||
"transform_admin",
|
||||
|
@ -479,8 +480,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
MetadataUtils.DEFAULT_RESERVED_METADATA,
|
||||
null
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"transform_user",
|
||||
new RoleDescriptor(
|
||||
"transform_user",
|
||||
|
@ -500,8 +501,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
MetadataUtils.DEFAULT_RESERVED_METADATA,
|
||||
null
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"watcher_admin",
|
||||
new RoleDescriptor(
|
||||
"watcher_admin",
|
||||
|
@ -515,8 +516,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
null,
|
||||
MetadataUtils.DEFAULT_RESERVED_METADATA
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"watcher_user",
|
||||
new RoleDescriptor(
|
||||
"watcher_user",
|
||||
|
@ -534,8 +535,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
null,
|
||||
MetadataUtils.DEFAULT_RESERVED_METADATA
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"logstash_admin",
|
||||
new RoleDescriptor(
|
||||
"logstash_admin",
|
||||
|
@ -549,16 +550,16 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
null,
|
||||
MetadataUtils.DEFAULT_RESERVED_METADATA
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"rollup_user",
|
||||
new RoleDescriptor("rollup_user", new String[] { "monitor_rollup" }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"rollup_admin",
|
||||
new RoleDescriptor("rollup_admin", new String[] { "manage_rollup" }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"snapshot_user",
|
||||
new RoleDescriptor(
|
||||
"snapshot_user",
|
||||
|
@ -575,8 +576,8 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
MetadataUtils.DEFAULT_RESERVED_METADATA,
|
||||
null
|
||||
)
|
||||
)
|
||||
.put(
|
||||
),
|
||||
entry(
|
||||
"enrich_user",
|
||||
new RoleDescriptor(
|
||||
"enrich_user",
|
||||
|
@ -586,10 +587,10 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
null,
|
||||
MetadataUtils.DEFAULT_RESERVED_METADATA
|
||||
)
|
||||
)
|
||||
.put("viewer", buildViewerRoleDescriptor())
|
||||
.put("editor", buildEditorRoleDescriptor())
|
||||
.immutableMap();
|
||||
),
|
||||
entry("viewer", buildViewerRoleDescriptor()),
|
||||
entry("editor", buildEditorRoleDescriptor())
|
||||
);
|
||||
}
|
||||
|
||||
private static RoleDescriptor buildViewerRoleDescriptor() {
|
||||
|
|
|
@ -17,7 +17,6 @@ import org.elasticsearch.action.get.GetResponse;
|
|||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.core.Strings;
|
||||
|
@ -331,34 +330,22 @@ public class ClassificationIT extends MlNativeDataFrameAnalyticsIntegTestCase {
|
|||
Arrays.asList(
|
||||
new OneHotEncoding(
|
||||
ALIAS_TO_KEYWORD_FIELD,
|
||||
MapBuilder.<String, String>newMapBuilder()
|
||||
.put(KEYWORD_FIELD_VALUES.get(0), "cat_column_custom")
|
||||
.put(KEYWORD_FIELD_VALUES.get(1), "dog_column_custom")
|
||||
.map(),
|
||||
Map.of(KEYWORD_FIELD_VALUES.get(0), "cat_column_custom", KEYWORD_FIELD_VALUES.get(1), "dog_column_custom"),
|
||||
true
|
||||
),
|
||||
new OneHotEncoding(
|
||||
ALIAS_TO_NESTED_FIELD,
|
||||
MapBuilder.<String, String>newMapBuilder()
|
||||
.put(KEYWORD_FIELD_VALUES.get(0), "cat_column_custom_1")
|
||||
.put(KEYWORD_FIELD_VALUES.get(1), "dog_column_custom_1")
|
||||
.map(),
|
||||
Map.of(KEYWORD_FIELD_VALUES.get(0), "cat_column_custom_1", KEYWORD_FIELD_VALUES.get(1), "dog_column_custom_1"),
|
||||
true
|
||||
),
|
||||
new OneHotEncoding(
|
||||
NESTED_FIELD,
|
||||
MapBuilder.<String, String>newMapBuilder()
|
||||
.put(KEYWORD_FIELD_VALUES.get(0), "cat_column_custom_2")
|
||||
.put(KEYWORD_FIELD_VALUES.get(1), "dog_column_custom_2")
|
||||
.map(),
|
||||
Map.of(KEYWORD_FIELD_VALUES.get(0), "cat_column_custom_2", KEYWORD_FIELD_VALUES.get(1), "dog_column_custom_2"),
|
||||
true
|
||||
),
|
||||
new OneHotEncoding(
|
||||
TEXT_FIELD,
|
||||
MapBuilder.<String, String>newMapBuilder()
|
||||
.put(KEYWORD_FIELD_VALUES.get(0), "cat_column_custom_3")
|
||||
.put(KEYWORD_FIELD_VALUES.get(1), "dog_column_custom_3")
|
||||
.map(),
|
||||
Map.of(KEYWORD_FIELD_VALUES.get(0), "cat_column_custom_3", KEYWORD_FIELD_VALUES.get(1), "dog_column_custom_3"),
|
||||
true
|
||||
)
|
||||
),
|
||||
|
@ -1032,18 +1019,12 @@ public class ClassificationIT extends MlNativeDataFrameAnalyticsIntegTestCase {
|
|||
Arrays.asList(
|
||||
new OneHotEncoding(
|
||||
NESTED_FIELD,
|
||||
MapBuilder.<String, String>newMapBuilder()
|
||||
.put(KEYWORD_FIELD_VALUES.get(0), "cat_column_custom_2")
|
||||
.put(KEYWORD_FIELD_VALUES.get(1), "dog_column_custom_2")
|
||||
.map(),
|
||||
Map.of(KEYWORD_FIELD_VALUES.get(0), "cat_column_custom_2", KEYWORD_FIELD_VALUES.get(1), "dog_column_custom_2"),
|
||||
true
|
||||
),
|
||||
new OneHotEncoding(
|
||||
TEXT_FIELD,
|
||||
MapBuilder.<String, String>newMapBuilder()
|
||||
.put(KEYWORD_FIELD_VALUES.get(0), "cat_column_custom_3")
|
||||
.put(KEYWORD_FIELD_VALUES.get(1), "dog_column_custom_3")
|
||||
.map(),
|
||||
Map.of(KEYWORD_FIELD_VALUES.get(0), "cat_column_custom_3", KEYWORD_FIELD_VALUES.get(1), "dog_column_custom_3"),
|
||||
true
|
||||
)
|
||||
),
|
||||
|
|
|
@ -15,7 +15,6 @@ import org.elasticsearch.action.get.GetResponse;
|
|||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.core.Strings;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
|
@ -130,17 +129,8 @@ public class DataFrameAnalysisCustomFeatureIT extends MlNativeDataFrameAnalytics
|
|||
new Multi(
|
||||
new PreProcessor[] {
|
||||
new NGram(TEXT_FIELD, "ngram", new int[] { 2 }, 0, 3, true),
|
||||
new FrequencyEncoding(
|
||||
"ngram.20",
|
||||
"frequency",
|
||||
MapBuilder.<String, Double>newMapBuilder().put("ca", 5.0).put("do", 1.0).map(),
|
||||
true
|
||||
),
|
||||
new OneHotEncoding(
|
||||
"ngram.21",
|
||||
MapBuilder.<String, String>newMapBuilder().put("at", "is_cat").map(),
|
||||
true
|
||||
) },
|
||||
new FrequencyEncoding("ngram.20", "frequency", Map.of("ca", 5.0, "do", 1.0), true),
|
||||
new OneHotEncoding("ngram.21", Map.of("at", "is_cat"), true) },
|
||||
true
|
||||
)
|
||||
),
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
|
|||
import org.elasticsearch.cluster.routing.allocation.AllocationService;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.IndexScopedSettings;
|
||||
|
@ -1604,10 +1603,12 @@ public class MachineLearning extends Plugin
|
|||
|
||||
@Override
|
||||
public Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
|
||||
return MapBuilder.<String, AnalysisProvider<CharFilterFactory>>newMapBuilder()
|
||||
.put(FirstNonBlankLineCharFilter.NAME, FirstNonBlankLineCharFilterFactory::new)
|
||||
.put(FirstLineWithLettersCharFilter.NAME, FirstLineWithLettersCharFilterFactory::new)
|
||||
.map();
|
||||
return Map.of(
|
||||
FirstNonBlankLineCharFilter.NAME,
|
||||
FirstNonBlankLineCharFilterFactory::new,
|
||||
FirstLineWithLettersCharFilter.NAME,
|
||||
FirstLineWithLettersCharFilterFactory::new
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -119,10 +119,7 @@ public class TransportStartDatafeedActionTests extends ESTestCase {
|
|||
.put("old_cluster_1", Version.V_7_0_0)
|
||||
.map();
|
||||
|
||||
Map<String, Object> field = Collections.singletonMap(
|
||||
"runtime_field_foo",
|
||||
MapBuilder.<String, Object>newMapBuilder().put("type", "keyword").put("script", "").map()
|
||||
);
|
||||
Map<String, Object> field = Map.of("runtime_field_foo", Map.of("type", "keyword", "script", ""));
|
||||
|
||||
DatafeedConfig config = new DatafeedConfig.Builder(DatafeedConfigTests.createRandomizedDatafeedConfig("foo")).setRuntimeMappings(
|
||||
field
|
||||
|
|
|
@ -14,7 +14,6 @@ import org.elasticsearch.action.search.SearchRequestBuilder;
|
|||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.client.internal.Client;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.core.TimeValue;
|
||||
import org.elasticsearch.core.Tuple;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
|
@ -163,11 +162,7 @@ public class CompositeAggregationDataExtractorTests extends ESTestCase {
|
|||
|
||||
TestDataExtractor extractor = new TestDataExtractor(1000L, 4000L);
|
||||
|
||||
SearchResponse response = createSearchResponse(
|
||||
"buckets",
|
||||
compositeBucket,
|
||||
MapBuilder.<String, Object>newMapBuilder().put("time_bucket", 4000L).put("airline", "d").map()
|
||||
);
|
||||
SearchResponse response = createSearchResponse("buckets", compositeBucket, Map.of("time_bucket", 4000L, "airline", "d"));
|
||||
extractor.setNextResponse(response);
|
||||
|
||||
assertThat(extractor.hasNext(), is(true));
|
||||
|
@ -253,11 +248,7 @@ public class CompositeAggregationDataExtractorTests extends ESTestCase {
|
|||
|
||||
TestDataExtractor extractor = new TestDataExtractor(1000L, timestamp + 1000 + 1);
|
||||
|
||||
SearchResponse response = createSearchResponse(
|
||||
"buckets",
|
||||
buckets,
|
||||
MapBuilder.<String, Object>newMapBuilder().put("time_bucket", 1000L).put("airline", "d").map()
|
||||
);
|
||||
SearchResponse response = createSearchResponse("buckets", buckets, Map.of("time_bucket", 1000L, "airline", "d"));
|
||||
extractor.setNextResponse(response);
|
||||
extractor.cancel();
|
||||
// We should have next right now as we have not yet determined if we have handled a page or not
|
||||
|
@ -286,11 +277,7 @@ public class CompositeAggregationDataExtractorTests extends ESTestCase {
|
|||
|
||||
TestDataExtractor extractor = new TestDataExtractor(1000L, timestamp + 1000 + 1);
|
||||
|
||||
SearchResponse response = createSearchResponse(
|
||||
"buckets",
|
||||
buckets,
|
||||
MapBuilder.<String, Object>newMapBuilder().put("time_bucket", 1000L).put("airline", "d").map()
|
||||
);
|
||||
SearchResponse response = createSearchResponse("buckets", buckets, Map.of("time_bucket", 1000L, "airline", "d"));
|
||||
extractor.setNextResponse(response);
|
||||
|
||||
assertThat(extractor.hasNext(), is(true));
|
||||
|
@ -319,11 +306,7 @@ public class CompositeAggregationDataExtractorTests extends ESTestCase {
|
|||
)
|
||||
);
|
||||
}
|
||||
response = createSearchResponse(
|
||||
"buckets",
|
||||
buckets,
|
||||
MapBuilder.<String, Object>newMapBuilder().put("time_bucket", 3000L).put("airline", "a").map()
|
||||
);
|
||||
response = createSearchResponse("buckets", buckets, Map.of("time_bucket", 3000L, "airline", "a"));
|
||||
extractor.setNextResponse(response);
|
||||
extractor.cancel();
|
||||
assertThat(extractor.hasNext(), is(true));
|
||||
|
|
|
@ -15,7 +15,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodeRole;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNodeUtils;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.Randomness;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.core.Tuple;
|
||||
|
@ -1225,10 +1224,12 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|||
"filled_ml_node_name",
|
||||
"filled_ml_node_id",
|
||||
new TransportAddress(InetAddress.getLoopbackAddress(), 9301),
|
||||
MapBuilder.<String, String>newMapBuilder()
|
||||
.put(MachineLearning.MAX_JVM_SIZE_NODE_ATTR, "10")
|
||||
.put(MachineLearning.MACHINE_MEMORY_NODE_ATTR, Long.toString(ByteSizeValue.ofGb(30).getBytes()))
|
||||
.map(),
|
||||
Map.of(
|
||||
MachineLearning.MAX_JVM_SIZE_NODE_ATTR,
|
||||
"10",
|
||||
MachineLearning.MACHINE_MEMORY_NODE_ATTR,
|
||||
Long.toString(ByteSizeValue.ofGb(30).getBytes())
|
||||
),
|
||||
ROLES_WITH_ML
|
||||
)
|
||||
)
|
||||
|
@ -1237,10 +1238,12 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|||
"not_filled_ml_node",
|
||||
"not_filled_ml_node_id",
|
||||
new TransportAddress(InetAddress.getLoopbackAddress(), 9302),
|
||||
MapBuilder.<String, String>newMapBuilder()
|
||||
.put(MachineLearning.MAX_JVM_SIZE_NODE_ATTR, "10")
|
||||
.put(MachineLearning.MACHINE_MEMORY_NODE_ATTR, Long.toString(ByteSizeValue.ofGb(30).getBytes()))
|
||||
.map(),
|
||||
Map.of(
|
||||
MachineLearning.MAX_JVM_SIZE_NODE_ATTR,
|
||||
"10",
|
||||
MachineLearning.MACHINE_MEMORY_NODE_ATTR,
|
||||
Long.toString(ByteSizeValue.ofGb(30).getBytes())
|
||||
),
|
||||
ROLES_WITH_ML
|
||||
)
|
||||
)
|
||||
|
@ -1249,10 +1252,12 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|||
"not_filled_smaller_ml_node",
|
||||
"not_filled_smaller_ml_node_id",
|
||||
new TransportAddress(InetAddress.getLoopbackAddress(), 9303),
|
||||
MapBuilder.<String, String>newMapBuilder()
|
||||
.put(MachineLearning.MAX_JVM_SIZE_NODE_ATTR, "10")
|
||||
.put(MachineLearning.MACHINE_MEMORY_NODE_ATTR, Long.toString(ByteSizeValue.ofGb(10).getBytes()))
|
||||
.map(),
|
||||
Map.of(
|
||||
MachineLearning.MAX_JVM_SIZE_NODE_ATTR,
|
||||
"10",
|
||||
MachineLearning.MACHINE_MEMORY_NODE_ATTR,
|
||||
Long.toString(ByteSizeValue.ofGb(10).getBytes())
|
||||
),
|
||||
ROLES_WITH_ML
|
||||
)
|
||||
)
|
||||
|
|
|
@ -11,7 +11,6 @@ import org.apache.logging.log4j.Logger;
|
|||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.RefCountingRunnable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.logging.DeprecationCategory;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
|
@ -316,10 +315,7 @@ public class Realms extends AbstractLifecycleComponent implements Iterable<Realm
|
|||
assert ReservedRealm.TYPE.equals(type) == false;
|
||||
realmMap.compute(type, (key, value) -> {
|
||||
if (value == null) {
|
||||
return MapBuilder.<String, Object>newMapBuilder()
|
||||
.put("enabled", false)
|
||||
.put("available", isRealmTypeAvailable(licenseStateSnapshot, type))
|
||||
.map();
|
||||
return Map.of("enabled", false, "available", isRealmTypeAvailable(licenseStateSnapshot, type));
|
||||
}
|
||||
|
||||
assert value instanceof Map;
|
||||
|
|
|
@ -9,7 +9,6 @@ package org.elasticsearch.xpack.security;
|
|||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.MockLicenseState;
|
||||
|
@ -149,10 +148,7 @@ public class SecurityInfoTransportActionTests extends ESTestCase {
|
|||
final boolean httpIpFilterEnabled = randomBoolean();
|
||||
final boolean transportIPFilterEnabled = randomBoolean();
|
||||
when(ipFilter.usageStats()).thenReturn(
|
||||
MapBuilder.<String, Object>newMapBuilder()
|
||||
.put("http", Collections.singletonMap("enabled", httpIpFilterEnabled))
|
||||
.put("transport", Collections.singletonMap("enabled", transportIPFilterEnabled))
|
||||
.map()
|
||||
Map.of("http", Map.of("enabled", httpIpFilterEnabled), "transport", Map.of("enabled", transportIPFilterEnabled))
|
||||
);
|
||||
|
||||
final boolean rolesStoreEnabled = randomBoolean();
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.action.update.UpdateRequestBuilder;
|
|||
import org.elasticsearch.client.internal.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.core.PathUtils;
|
||||
|
@ -250,10 +249,12 @@ public class TransportSamlLogoutActionTests extends SamlTestCase {
|
|||
public void testLogoutInvalidatesToken() throws Exception {
|
||||
final String session = randomAlphaOfLengthBetween(12, 18);
|
||||
final String nameId = randomAlphaOfLengthBetween(6, 16);
|
||||
final Map<String, Object> userMetadata = MapBuilder.<String, Object>newMapBuilder()
|
||||
.put(SamlRealm.USER_METADATA_NAMEID_FORMAT, NameID.TRANSIENT)
|
||||
.put(SamlRealm.USER_METADATA_NAMEID_VALUE, nameId)
|
||||
.map();
|
||||
final Map<String, Object> userMetadata = Map.of(
|
||||
SamlRealm.USER_METADATA_NAMEID_FORMAT,
|
||||
NameID.TRANSIENT,
|
||||
SamlRealm.USER_METADATA_NAMEID_VALUE,
|
||||
nameId
|
||||
);
|
||||
final User user = new User("punisher", new String[] { "superuser" }, null, null, userMetadata, true);
|
||||
final Authentication.RealmRef realmRef = new Authentication.RealmRef(samlRealm.name(), SamlRealmSettings.TYPE, "node01");
|
||||
final Map<String, Object> tokenMetadata = samlRealm.createTokenMetadata(
|
||||
|
|
|
@ -12,7 +12,6 @@ import org.elasticsearch.ElasticsearchParseException;
|
|||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -147,7 +146,7 @@ public class EmailActionTests extends ESTestCase {
|
|||
Map<String, Object> data = new HashMap<>();
|
||||
Payload payload = new Payload.Simple(data);
|
||||
|
||||
Map<String, Object> metadata = MapBuilder.<String, Object>newMapBuilder().put("_key", "_val").map();
|
||||
Map<String, Object> metadata = Map.of("_key", "_val");
|
||||
|
||||
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
|
||||
|
||||
|
@ -589,7 +588,7 @@ public class EmailActionTests extends ESTestCase {
|
|||
|
||||
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
|
||||
Wid wid = new Wid(randomAlphaOfLength(5), now);
|
||||
Map<String, Object> metadata = MapBuilder.<String, Object>newMapBuilder().put("_key", "_val").map();
|
||||
Map<String, Object> metadata = Map.of("_key", "_val");
|
||||
WatchExecutionContext ctx = mockExecutionContextBuilder("watch1").wid(wid)
|
||||
.payload(new Payload.Simple())
|
||||
.time("watch1", now)
|
||||
|
@ -615,7 +614,7 @@ public class EmailActionTests extends ESTestCase {
|
|||
private WatchExecutionContext createWatchExecutionContext() {
|
||||
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
|
||||
Wid wid = new Wid(randomAlphaOfLength(5), now);
|
||||
Map<String, Object> metadata = MapBuilder.<String, Object>newMapBuilder().put("_key", "_val").map();
|
||||
Map<String, Object> metadata = Map.of("_key", "_val");
|
||||
return mockExecutionContextBuilder("watch1").wid(wid)
|
||||
.payload(new Payload.Simple())
|
||||
.time("watch1", now)
|
||||
|
|
|
@ -16,7 +16,6 @@ import org.elasticsearch.action.index.IndexRequest;
|
|||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.client.internal.Client;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.time.DateUtils;
|
||||
import org.elasticsearch.common.util.Maps;
|
||||
|
@ -186,7 +185,7 @@ public class IndexActionTests extends ESTestCase {
|
|||
|
||||
// using doc_id with bulk fails regardless of using ID
|
||||
expectThrows(IllegalStateException.class, () -> {
|
||||
final List<Map<?, ?>> idList = Arrays.asList(docWithId, MapBuilder.newMapBuilder().put("foo", "bar1").put("_id", "1").map());
|
||||
final List<Map<?, ?>> idList = Arrays.asList(docWithId, Map.of("foo", "bar1", "_id", "1"));
|
||||
|
||||
final Object list = randomFrom(
|
||||
new Map<?, ?>[] { singletonMap("foo", "bar"), singletonMap("foo", "bar1") },
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.actions.jira;
|
||||
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.settings.MockSecureSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -285,7 +284,7 @@ public class ExecutableJiraActionTests extends ESTestCase {
|
|||
private WatchExecutionContext createWatchExecutionContext() {
|
||||
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
|
||||
Wid wid = new Wid(randomAlphaOfLength(5), now);
|
||||
Map<String, Object> metadata = MapBuilder.<String, Object>newMapBuilder().put("_key", "_val").map();
|
||||
Map<String, Object> metadata = Map.of("_key", "_val");
|
||||
return mockExecutionContextBuilder("watch1").wid(wid)
|
||||
.payload(new Payload.Simple())
|
||||
.time("watch1", now)
|
||||
|
|
|
@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.actions.pagerduty;
|
|||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xcontent.ToXContent;
|
||||
|
@ -76,7 +75,7 @@ public class PagerDutyActionTests extends ESTestCase {
|
|||
Map<String, Object> data = new HashMap<>();
|
||||
Payload payload = new Payload.Simple(data);
|
||||
|
||||
Map<String, Object> metadata = MapBuilder.<String, Object>newMapBuilder().put("_key", "_val").map();
|
||||
Map<String, Object> metadata = Map.of("_key", "_val");
|
||||
|
||||
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.actions.slack;
|
|||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xcontent.ToXContent;
|
||||
import org.elasticsearch.xcontent.XContentBuilder;
|
||||
|
@ -73,7 +72,7 @@ public class SlackActionTests extends ESTestCase {
|
|||
Map<String, Object> data = new HashMap<>();
|
||||
Payload payload = new Payload.Simple(data);
|
||||
|
||||
Map<String, Object> metadata = MapBuilder.<String, Object>newMapBuilder().put("_key", "_val").map();
|
||||
Map<String, Object> metadata = Map.of("_key", "_val");
|
||||
|
||||
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
|
||||
|
||||
|
|
|
@ -9,7 +9,6 @@ package org.elasticsearch.xpack.watcher.input.http;
|
|||
import io.netty.handler.codec.http.HttpHeaders;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.Maps;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -263,9 +262,7 @@ public class HttpInputTests extends ESTestCase {
|
|||
assertThat(result.type(), equalTo(HttpInput.TYPE));
|
||||
List<String> expectedHeaderValues = new ArrayList<>();
|
||||
expectedHeaderValues.add(headerValue);
|
||||
Map<String, Object> expectedHeaderMap = MapBuilder.<String, Object>newMapBuilder()
|
||||
.put(headerName.toLowerCase(Locale.ROOT), expectedHeaderValues)
|
||||
.map();
|
||||
Map<String, Object> expectedHeaderMap = Map.of(headerName.toLowerCase(Locale.ROOT), expectedHeaderValues);
|
||||
assertThat(result.payload().data(), hasKey("_headers"));
|
||||
assertThat(result.payload().data().get("_headers"), equalTo(expectedHeaderMap));
|
||||
}
|
||||
|
|
|
@ -9,7 +9,6 @@ package org.elasticsearch.xpack.watcher.notification.email.attachment;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -179,7 +178,7 @@ public class HttpEmailAttachementParserTests extends ESTestCase {
|
|||
private WatchExecutionContext createWatchExecutionContext() {
|
||||
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
|
||||
Wid wid = new Wid(randomAlphaOfLength(5), now);
|
||||
Map<String, Object> metadata = MapBuilder.<String, Object>newMapBuilder().put("_key", "_val").map();
|
||||
Map<String, Object> metadata = Map.of("_key", "_val");
|
||||
return mockExecutionContextBuilder("watch1").wid(wid)
|
||||
.payload(new Payload.Simple())
|
||||
.time("watch1", now)
|
||||
|
|
|
@ -61,10 +61,10 @@ public class FilterXContentTests extends ESTestCase {
|
|||
|
||||
public void testNestedPayloadFiltering() throws Exception {
|
||||
Map<String, Object> data = new HashMap<>();
|
||||
data.put("leaf1", MapBuilder.newMapBuilder().put("key1", "value1").put("key2", true).map());
|
||||
data.put("leaf1", Map.of("key1", "value1", "key2", true));
|
||||
data.put("leaf2", MapBuilder.newMapBuilder().put("key1", "value1").put("key2", "value2").put("key3", 3).map());
|
||||
Map<Object, Object> innerMap = MapBuilder.newMapBuilder().put("key1", "value1").put("key2", "value2").map();
|
||||
data.put("leaf3", MapBuilder.newMapBuilder().put("key1", "value1").put("key2", innerMap).map());
|
||||
Map<Object, Object> innerMap = Map.of("key1", "value1", "key2", "value2");
|
||||
data.put("leaf3", Map.of("key1", "value1", "key2", innerMap));
|
||||
|
||||
XContentBuilder builder = jsonBuilder().map(data);
|
||||
XContentParser parser = createParser(builder);
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.test;
|
||||
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.xpack.core.watcher.execution.Wid;
|
||||
import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent;
|
||||
|
@ -47,7 +46,7 @@ public class WatchExecutionContextMockBuilder {
|
|||
}
|
||||
|
||||
public WatchExecutionContextMockBuilder payload(String key, Object value) {
|
||||
return payload(new Payload.Simple(MapBuilder.<String, Object>newMapBuilder().put(key, value).map()));
|
||||
return payload(new Payload.Simple(Map.of(key, value)));
|
||||
}
|
||||
|
||||
public WatchExecutionContextMockBuilder payload(Map<String, Object> payload) {
|
||||
|
@ -79,7 +78,7 @@ public class WatchExecutionContextMockBuilder {
|
|||
}
|
||||
|
||||
public WatchExecutionContextMockBuilder metadata(String key, String value) {
|
||||
return metadata(MapBuilder.<String, Object>newMapBuilder().put(key, value).map());
|
||||
return metadata(Map.of(key, value));
|
||||
}
|
||||
|
||||
public WatchExecutionContext buildMock() {
|
||||
|
|
|
@ -264,10 +264,7 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
|||
}
|
||||
|
||||
private String verifyElasticsearchRefreshToken(String refreshToken) throws IOException {
|
||||
final Map<String, ?> body = MapBuilder.<String, Object>newMapBuilder()
|
||||
.put("grant_type", "refresh_token")
|
||||
.put("refresh_token", refreshToken)
|
||||
.map();
|
||||
final Map<String, ?> body = Map.of("grant_type", "refresh_token", "refresh_token", refreshToken);
|
||||
final Response response = client().performRequest(buildRequest("POST", "/_security/oauth2/token", body, kibanaAuth()));
|
||||
assertOK(response);
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ package org.elasticsearch.xpack.security.authc.ldap;
|
|||
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.client.internal.Client;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.SecuritySettingsSource;
|
||||
import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction;
|
||||
|
@ -73,10 +72,12 @@ public class ActiveDirectoryRunAsIT extends AbstractAdLdapRealmTestCase {
|
|||
}
|
||||
|
||||
protected Client runAsClient(String user) {
|
||||
final Map<String, String> headers = MapBuilder.<String, String>newMapBuilder()
|
||||
.put(BASIC_AUTH_HEADER, UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, BOOTSTRAP_PASSWORD))
|
||||
.put(AuthenticationServiceField.RUN_AS_USER_HEADER, user)
|
||||
.map();
|
||||
final Map<String, String> headers = Map.of(
|
||||
BASIC_AUTH_HEADER,
|
||||
UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, BOOTSTRAP_PASSWORD),
|
||||
AuthenticationServiceField.RUN_AS_USER_HEADER,
|
||||
user
|
||||
);
|
||||
return client().filterWithHeader(headers);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue