[8.16] Track source for objects and fields with [synthetic_source_keep:arrays] in arrays as ignored (#116065) (#116226)
* Track source for objects and fields with [synthetic_source_keep:arrays] in arrays as ignored (#116065)
* Track source for objects and fields with [synthetic_source_keep:arrays] in arrays as ignored
* Update TransportResumeFollowActionTests.java
* rest compat fixes
* rest compat fixes
* update test
(cherry picked from commit 6cf45366d5
)
# Conflicts:
# rest-api-spec/build.gradle
# rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml
# server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
# server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java
# server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java
* Update DocumentParserContext.java
* fixes
This commit is contained in:
parent
9f98d2331b
commit
e80a641f36
|
@ -356,8 +356,8 @@ object param - nested object with stored array:
|
||||||
sort: name
|
sort: name
|
||||||
- match: { hits.total.value: 2 }
|
- match: { hits.total.value: 2 }
|
||||||
- match: { hits.hits.0._source.name: A }
|
- match: { hits.hits.0._source.name: A }
|
||||||
- match: { hits.hits.0._source.nested_array_regular.0.b.c: [ 10, 100] }
|
- match: { hits.hits.0._source.nested_array_regular.0.b.c: [ 10, 100 ] }
|
||||||
- match: { hits.hits.0._source.nested_array_regular.1.b.c: [ 20, 200] }
|
- match: { hits.hits.0._source.nested_array_regular.1.b.c: [ 20, 200 ] }
|
||||||
- match: { hits.hits.1._source.name: B }
|
- match: { hits.hits.1._source.name: B }
|
||||||
- match: { hits.hits.1._source.nested_array_stored.0.b.0.c: 10 }
|
- match: { hits.hits.1._source.nested_array_stored.0.b.0.c: 10 }
|
||||||
- match: { hits.hits.1._source.nested_array_stored.0.b.1.c: 100 }
|
- match: { hits.hits.1._source.nested_array_stored.0.b.1.c: 100 }
|
||||||
|
@ -411,55 +411,6 @@ index param - nested array within array:
|
||||||
- match: { hits.hits.0._source.path.to.some.3.id: [ 1000, 2000 ] }
|
- match: { hits.hits.0._source.path.to.some.3.id: [ 1000, 2000 ] }
|
||||||
|
|
||||||
|
|
||||||
---
|
|
||||||
index param - nested array within array - disabled second pass:
|
|
||||||
- requires:
|
|
||||||
cluster_features: ["mapper.synthetic_source_keep"]
|
|
||||||
reason: requires tracking ignored source
|
|
||||||
|
|
||||||
- do:
|
|
||||||
indices.create:
|
|
||||||
index: test
|
|
||||||
body:
|
|
||||||
settings:
|
|
||||||
index:
|
|
||||||
synthetic_source:
|
|
||||||
enable_second_doc_parsing_pass: false
|
|
||||||
mappings:
|
|
||||||
_source:
|
|
||||||
mode: synthetic
|
|
||||||
properties:
|
|
||||||
name:
|
|
||||||
type: keyword
|
|
||||||
path:
|
|
||||||
properties:
|
|
||||||
to:
|
|
||||||
properties:
|
|
||||||
some:
|
|
||||||
synthetic_source_keep: arrays
|
|
||||||
properties:
|
|
||||||
id:
|
|
||||||
type: integer
|
|
||||||
|
|
||||||
- do:
|
|
||||||
bulk:
|
|
||||||
index: test
|
|
||||||
refresh: true
|
|
||||||
body:
|
|
||||||
- '{ "create": { } }'
|
|
||||||
- '{ "name": "A", "path": [ { "to": [ { "some" : [ { "id": 10 }, { "id": [1, 3, 2] } ] }, { "some": { "id": 100 } } ] }, { "to": { "some": { "id": [1000, 2000] } } } ] }'
|
|
||||||
- match: { errors: false }
|
|
||||||
|
|
||||||
- do:
|
|
||||||
search:
|
|
||||||
index: test
|
|
||||||
sort: name
|
|
||||||
- match: { hits.hits.0._source.name: A }
|
|
||||||
- length: { hits.hits.0._source.path.to.some: 2}
|
|
||||||
- match: { hits.hits.0._source.path.to.some.0.id: 10 }
|
|
||||||
- match: { hits.hits.0._source.path.to.some.1.id: [ 1, 3, 2] }
|
|
||||||
|
|
||||||
|
|
||||||
---
|
---
|
||||||
# 112156
|
# 112156
|
||||||
stored field under object with store_array_source:
|
stored field under object with store_array_source:
|
||||||
|
@ -925,8 +876,10 @@ index param - root arrays:
|
||||||
- match: { hits.hits.1._source.obj.1.span.id: "2" }
|
- match: { hits.hits.1._source.obj.1.span.id: "2" }
|
||||||
|
|
||||||
- match: { hits.hits.2._source.id: 3 }
|
- match: { hits.hits.2._source.id: 3 }
|
||||||
- match: { hits.hits.2._source.obj_default.trace.id: [aa, bb] }
|
- match: { hits.hits.2._source.obj_default.trace.0.id: bb }
|
||||||
- match: { hits.hits.2._source.obj_default.span.id: "2" }
|
- match: { hits.hits.2._source.obj_default.trace.1.id: aa }
|
||||||
|
- match: { hits.hits.2._source.obj_default.span.0.id: "2" }
|
||||||
|
- match: { hits.hits.2._source.obj_default.span.1.id: "2" }
|
||||||
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
|
@ -188,7 +188,6 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
|
||||||
FieldMapper.SYNTHETIC_SOURCE_KEEP_INDEX_SETTING,
|
FieldMapper.SYNTHETIC_SOURCE_KEEP_INDEX_SETTING,
|
||||||
IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING,
|
IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING,
|
||||||
IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING,
|
IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING,
|
||||||
IndexSettings.SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING,
|
|
||||||
SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING,
|
SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING,
|
||||||
|
|
||||||
// validate that built-in similarities don't get redefined
|
// validate that built-in similarities don't get redefined
|
||||||
|
|
|
@ -656,13 +656,6 @@ public final class IndexSettings {
|
||||||
Property.Final
|
Property.Final
|
||||||
);
|
);
|
||||||
|
|
||||||
public static final Setting<Boolean> SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING = Setting.boolSetting(
|
|
||||||
"index.synthetic_source.enable_second_doc_parsing_pass",
|
|
||||||
true,
|
|
||||||
Property.IndexScope,
|
|
||||||
Property.Dynamic
|
|
||||||
);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns <code>true</code> if TSDB encoding is enabled. The default is <code>true</code>
|
* Returns <code>true</code> if TSDB encoding is enabled. The default is <code>true</code>
|
||||||
*/
|
*/
|
||||||
|
@ -832,7 +825,6 @@ public final class IndexSettings {
|
||||||
private volatile long mappingDimensionFieldsLimit;
|
private volatile long mappingDimensionFieldsLimit;
|
||||||
private volatile boolean skipIgnoredSourceWrite;
|
private volatile boolean skipIgnoredSourceWrite;
|
||||||
private volatile boolean skipIgnoredSourceRead;
|
private volatile boolean skipIgnoredSourceRead;
|
||||||
private volatile boolean syntheticSourceSecondDocParsingPassEnabled;
|
|
||||||
private final SourceFieldMapper.Mode indexMappingSourceMode;
|
private final SourceFieldMapper.Mode indexMappingSourceMode;
|
||||||
private final boolean recoverySourceEnabled;
|
private final boolean recoverySourceEnabled;
|
||||||
|
|
||||||
|
@ -995,7 +987,6 @@ public final class IndexSettings {
|
||||||
es87TSDBCodecEnabled = scopedSettings.get(TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING);
|
es87TSDBCodecEnabled = scopedSettings.get(TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING);
|
||||||
skipIgnoredSourceWrite = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING);
|
skipIgnoredSourceWrite = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING);
|
||||||
skipIgnoredSourceRead = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING);
|
skipIgnoredSourceRead = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING);
|
||||||
syntheticSourceSecondDocParsingPassEnabled = scopedSettings.get(SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING);
|
|
||||||
indexMappingSourceMode = scopedSettings.get(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING);
|
indexMappingSourceMode = scopedSettings.get(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING);
|
||||||
recoverySourceEnabled = RecoverySettings.INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(nodeSettings);
|
recoverySourceEnabled = RecoverySettings.INDICES_RECOVERY_SOURCE_ENABLED_SETTING.get(nodeSettings);
|
||||||
|
|
||||||
|
@ -1085,10 +1076,6 @@ public final class IndexSettings {
|
||||||
this::setSkipIgnoredSourceWrite
|
this::setSkipIgnoredSourceWrite
|
||||||
);
|
);
|
||||||
scopedSettings.addSettingsUpdateConsumer(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING, this::setSkipIgnoredSourceRead);
|
scopedSettings.addSettingsUpdateConsumer(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING, this::setSkipIgnoredSourceRead);
|
||||||
scopedSettings.addSettingsUpdateConsumer(
|
|
||||||
SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING,
|
|
||||||
this::setSyntheticSourceSecondDocParsingPassEnabled
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void setSearchIdleAfter(TimeValue searchIdleAfter) {
|
private void setSearchIdleAfter(TimeValue searchIdleAfter) {
|
||||||
|
@ -1681,14 +1668,6 @@ public final class IndexSettings {
|
||||||
this.skipIgnoredSourceRead = value;
|
this.skipIgnoredSourceRead = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void setSyntheticSourceSecondDocParsingPassEnabled(boolean syntheticSourceSecondDocParsingPassEnabled) {
|
|
||||||
this.syntheticSourceSecondDocParsingPassEnabled = syntheticSourceSecondDocParsingPassEnabled;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isSyntheticSourceSecondDocParsingPassEnabled() {
|
|
||||||
return syntheticSourceSecondDocParsingPassEnabled;
|
|
||||||
}
|
|
||||||
|
|
||||||
public SourceFieldMapper.Mode getIndexMappingSourceMode() {
|
public SourceFieldMapper.Mode getIndexMappingSourceMode() {
|
||||||
return indexMappingSourceMode;
|
return indexMappingSourceMode;
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,16 +35,13 @@ import org.elasticsearch.xcontent.XContentType;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.MAX_DIMS_COUNT;
|
import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.MAX_DIMS_COUNT;
|
||||||
|
@ -148,9 +145,6 @@ public final class DocumentParser {
|
||||||
|
|
||||||
executeIndexTimeScripts(context);
|
executeIndexTimeScripts(context);
|
||||||
|
|
||||||
// Record additional entries for {@link IgnoredSourceFieldMapper} before calling #postParse, so that they get stored.
|
|
||||||
addIgnoredSourceMissingValues(context);
|
|
||||||
|
|
||||||
for (MetadataFieldMapper metadataMapper : metadataFieldsMappers) {
|
for (MetadataFieldMapper metadataMapper : metadataFieldsMappers) {
|
||||||
metadataMapper.postParse(context);
|
metadataMapper.postParse(context);
|
||||||
}
|
}
|
||||||
|
@ -159,128 +153,6 @@ public final class DocumentParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addIgnoredSourceMissingValues(DocumentParserContext context) throws IOException {
|
|
||||||
Collection<IgnoredSourceFieldMapper.NameValue> ignoredFieldsMissingValues = context.getIgnoredFieldsMissingValues();
|
|
||||||
if (ignoredFieldsMissingValues.isEmpty()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clean up any conflicting ignored values, to avoid double-printing them as array elements in synthetic source.
|
|
||||||
Map<String, IgnoredSourceFieldMapper.NameValue> fields = new HashMap<>(ignoredFieldsMissingValues.size());
|
|
||||||
for (var field : ignoredFieldsMissingValues) {
|
|
||||||
fields.put(field.name(), field);
|
|
||||||
}
|
|
||||||
context.deduplicateIgnoredFieldValues(fields.keySet());
|
|
||||||
|
|
||||||
assert context.mappingLookup().isSourceSynthetic();
|
|
||||||
try (
|
|
||||||
XContentParser parser = XContentHelper.createParser(
|
|
||||||
parserConfiguration,
|
|
||||||
context.sourceToParse().source(),
|
|
||||||
context.sourceToParse().getXContentType()
|
|
||||||
)
|
|
||||||
) {
|
|
||||||
DocumentParserContext newContext = new RootDocumentParserContext(
|
|
||||||
context.mappingLookup(),
|
|
||||||
mappingParserContext,
|
|
||||||
context.sourceToParse(),
|
|
||||||
parser
|
|
||||||
);
|
|
||||||
var nameValues = parseDocForMissingValues(newContext, fields);
|
|
||||||
for (var nameValue : nameValues) {
|
|
||||||
context.addIgnoredField(nameValue);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Simplified parsing version for retrieving the source of a given set of fields.
|
|
||||||
*/
|
|
||||||
private static List<IgnoredSourceFieldMapper.NameValue> parseDocForMissingValues(
|
|
||||||
DocumentParserContext context,
|
|
||||||
Map<String, IgnoredSourceFieldMapper.NameValue> fields
|
|
||||||
) throws IOException {
|
|
||||||
// Generate all possible parent names for the given fields.
|
|
||||||
// This is used to skip processing objects that can't generate missing values.
|
|
||||||
Set<String> parentNames = getPossibleParentNames(fields.keySet());
|
|
||||||
List<IgnoredSourceFieldMapper.NameValue> result = new ArrayList<>();
|
|
||||||
|
|
||||||
XContentParser parser = context.parser();
|
|
||||||
XContentParser.Token currentToken = parser.nextToken();
|
|
||||||
List<String> path = new ArrayList<>();
|
|
||||||
List<Boolean> isObjectInPath = new ArrayList<>(); // Tracks if path components correspond to an object or an array.
|
|
||||||
String fieldName = null;
|
|
||||||
while (currentToken != null) {
|
|
||||||
while (currentToken != XContentParser.Token.FIELD_NAME) {
|
|
||||||
if (fieldName != null
|
|
||||||
&& (currentToken == XContentParser.Token.START_OBJECT || currentToken == XContentParser.Token.START_ARRAY)) {
|
|
||||||
if (parentNames.contains(getCurrentPath(path, fieldName)) == false) {
|
|
||||||
// No missing value under this parsing subtree, skip it.
|
|
||||||
parser.skipChildren();
|
|
||||||
} else {
|
|
||||||
path.add(fieldName);
|
|
||||||
isObjectInPath.add(currentToken == XContentParser.Token.START_OBJECT);
|
|
||||||
}
|
|
||||||
fieldName = null;
|
|
||||||
} else if (currentToken == XContentParser.Token.END_OBJECT || currentToken == XContentParser.Token.END_ARRAY) {
|
|
||||||
// Remove the path, if the scope type matches the one when the path was added.
|
|
||||||
if (isObjectInPath.isEmpty() == false
|
|
||||||
&& (isObjectInPath.get(isObjectInPath.size() - 1) && currentToken == XContentParser.Token.END_OBJECT
|
|
||||||
|| isObjectInPath.get(isObjectInPath.size() - 1) == false && currentToken == XContentParser.Token.END_ARRAY)) {
|
|
||||||
path.remove(path.size() - 1);
|
|
||||||
isObjectInPath.remove(isObjectInPath.size() - 1);
|
|
||||||
}
|
|
||||||
fieldName = null;
|
|
||||||
}
|
|
||||||
currentToken = parser.nextToken();
|
|
||||||
if (currentToken == null) {
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fieldName = parser.currentName();
|
|
||||||
String fullName = getCurrentPath(path, fieldName);
|
|
||||||
var leaf = fields.get(fullName); // There may be multiple matches for array elements, don't use #remove.
|
|
||||||
if (leaf != null) {
|
|
||||||
parser.nextToken(); // Advance the parser to the value to be read.
|
|
||||||
result.add(leaf.cloneWithValue(context.encodeFlattenedToken()));
|
|
||||||
fieldName = null;
|
|
||||||
}
|
|
||||||
currentToken = parser.nextToken();
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String getCurrentPath(List<String> path, String fieldName) {
|
|
||||||
assert fieldName != null;
|
|
||||||
return path.isEmpty() ? fieldName : String.join(".", path) + "." + fieldName;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates all possible parent object names for the given full names.
|
|
||||||
* For instance, for input ['path.to.foo', 'another.path.to.bar'], it returns:
|
|
||||||
* [ 'path', 'path.to', 'another', 'another.path', 'another.path.to' ]
|
|
||||||
*/
|
|
||||||
private static Set<String> getPossibleParentNames(Set<String> fullPaths) {
|
|
||||||
if (fullPaths.isEmpty()) {
|
|
||||||
return Collections.emptySet();
|
|
||||||
}
|
|
||||||
Set<String> paths = new HashSet<>();
|
|
||||||
for (String fullPath : fullPaths) {
|
|
||||||
String[] split = fullPath.split("\\.");
|
|
||||||
if (split.length < 2) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
StringBuilder builder = new StringBuilder(split[0]);
|
|
||||||
paths.add(builder.toString());
|
|
||||||
for (int i = 1; i < split.length - 1; i++) {
|
|
||||||
builder.append(".");
|
|
||||||
builder.append(split[i]);
|
|
||||||
paths.add(builder.toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return paths;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void executeIndexTimeScripts(DocumentParserContext context) {
|
private static void executeIndexTimeScripts(DocumentParserContext context) {
|
||||||
List<FieldMapper> indexTimeScriptMappers = context.mappingLookup().indexTimeScriptMappers();
|
List<FieldMapper> indexTimeScriptMappers = context.mappingLookup().indexTimeScriptMappers();
|
||||||
if (indexTimeScriptMappers.isEmpty()) {
|
if (indexTimeScriptMappers.isEmpty()) {
|
||||||
|
@ -426,7 +298,10 @@ public final class DocumentParser {
|
||||||
throwOnConcreteValue(context.parent(), currentFieldName, context);
|
throwOnConcreteValue(context.parent(), currentFieldName, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (context.canAddIgnoredField() && getSourceKeepMode(context, context.parent().sourceKeepMode()) == Mapper.SourceKeepMode.ALL) {
|
var sourceKeepMode = getSourceKeepMode(context, context.parent().sourceKeepMode());
|
||||||
|
if (context.canAddIgnoredField()
|
||||||
|
&& (sourceKeepMode == Mapper.SourceKeepMode.ALL
|
||||||
|
|| (sourceKeepMode == Mapper.SourceKeepMode.ARRAYS && context.inArrayScope()))) {
|
||||||
context = context.addIgnoredFieldFromContext(
|
context = context.addIgnoredFieldFromContext(
|
||||||
new IgnoredSourceFieldMapper.NameValue(
|
new IgnoredSourceFieldMapper.NameValue(
|
||||||
context.parent().fullPath(),
|
context.parent().fullPath(),
|
||||||
|
@ -571,9 +446,11 @@ public final class DocumentParser {
|
||||||
parseObjectOrNested(context.createFlattenContext(currentFieldName));
|
parseObjectOrNested(context.createFlattenContext(currentFieldName));
|
||||||
context.path().add(currentFieldName);
|
context.path().add(currentFieldName);
|
||||||
} else {
|
} else {
|
||||||
|
var sourceKeepMode = getSourceKeepMode(context, fieldMapper.sourceKeepMode());
|
||||||
if (context.canAddIgnoredField()
|
if (context.canAddIgnoredField()
|
||||||
&& (fieldMapper.syntheticSourceMode() == FieldMapper.SyntheticSourceMode.FALLBACK
|
&& (fieldMapper.syntheticSourceMode() == FieldMapper.SyntheticSourceMode.FALLBACK
|
||||||
|| getSourceKeepMode(context, fieldMapper.sourceKeepMode()) == Mapper.SourceKeepMode.ALL
|
|| sourceKeepMode == Mapper.SourceKeepMode.ALL
|
||||||
|
|| (sourceKeepMode == Mapper.SourceKeepMode.ARRAYS && context.inArrayScope())
|
||||||
|| (context.isWithinCopyTo() == false && context.isCopyToDestinationField(mapper.fullPath())))) {
|
|| (context.isWithinCopyTo() == false && context.isCopyToDestinationField(mapper.fullPath())))) {
|
||||||
context = context.addIgnoredFieldFromContext(
|
context = context.addIgnoredFieldFromContext(
|
||||||
IgnoredSourceFieldMapper.NameValue.fromContext(context, fieldMapper.fullPath(), null)
|
IgnoredSourceFieldMapper.NameValue.fromContext(context, fieldMapper.fullPath(), null)
|
||||||
|
@ -810,8 +687,8 @@ public final class DocumentParser {
|
||||||
boolean objectWithFallbackSyntheticSource = false;
|
boolean objectWithFallbackSyntheticSource = false;
|
||||||
if (mapper instanceof ObjectMapper objectMapper) {
|
if (mapper instanceof ObjectMapper objectMapper) {
|
||||||
mode = getSourceKeepMode(context, objectMapper.sourceKeepMode());
|
mode = getSourceKeepMode(context, objectMapper.sourceKeepMode());
|
||||||
objectWithFallbackSyntheticSource = (mode == Mapper.SourceKeepMode.ALL
|
objectWithFallbackSyntheticSource = mode == Mapper.SourceKeepMode.ALL
|
||||||
|| (mode == Mapper.SourceKeepMode.ARRAYS && objectMapper instanceof NestedObjectMapper == false));
|
|| (mode == Mapper.SourceKeepMode.ARRAYS && objectMapper instanceof NestedObjectMapper == false);
|
||||||
}
|
}
|
||||||
boolean fieldWithFallbackSyntheticSource = false;
|
boolean fieldWithFallbackSyntheticSource = false;
|
||||||
boolean fieldWithStoredArraySource = false;
|
boolean fieldWithStoredArraySource = false;
|
||||||
|
|
|
@ -104,15 +104,23 @@ public abstract class DocumentParserContext {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Defines the scope parser is currently in.
|
||||||
|
* This is used for synthetic source related logic during parsing.
|
||||||
|
*/
|
||||||
|
private enum Scope {
|
||||||
|
SINGLETON,
|
||||||
|
ARRAY,
|
||||||
|
NESTED
|
||||||
|
}
|
||||||
|
|
||||||
private final MappingLookup mappingLookup;
|
private final MappingLookup mappingLookup;
|
||||||
private final MappingParserContext mappingParserContext;
|
private final MappingParserContext mappingParserContext;
|
||||||
private final SourceToParse sourceToParse;
|
private final SourceToParse sourceToParse;
|
||||||
|
|
||||||
private final Set<String> ignoredFields;
|
private final Set<String> ignoredFields;
|
||||||
private final List<IgnoredSourceFieldMapper.NameValue> ignoredFieldValues;
|
private final List<IgnoredSourceFieldMapper.NameValue> ignoredFieldValues;
|
||||||
private final List<IgnoredSourceFieldMapper.NameValue> ignoredFieldsMissingValues;
|
private Scope currentScope;
|
||||||
private boolean inArrayScopeEnabled;
|
|
||||||
private boolean inArrayScope;
|
|
||||||
|
|
||||||
private final Map<String, List<Mapper>> dynamicMappers;
|
private final Map<String, List<Mapper>> dynamicMappers;
|
||||||
private final DynamicMapperSize dynamicMappersSize;
|
private final DynamicMapperSize dynamicMappersSize;
|
||||||
|
@ -143,9 +151,7 @@ public abstract class DocumentParserContext {
|
||||||
SourceToParse sourceToParse,
|
SourceToParse sourceToParse,
|
||||||
Set<String> ignoreFields,
|
Set<String> ignoreFields,
|
||||||
List<IgnoredSourceFieldMapper.NameValue> ignoredFieldValues,
|
List<IgnoredSourceFieldMapper.NameValue> ignoredFieldValues,
|
||||||
List<IgnoredSourceFieldMapper.NameValue> ignoredFieldsWithNoSource,
|
Scope currentScope,
|
||||||
boolean inArrayScopeEnabled,
|
|
||||||
boolean inArrayScope,
|
|
||||||
Map<String, List<Mapper>> dynamicMappers,
|
Map<String, List<Mapper>> dynamicMappers,
|
||||||
Map<String, ObjectMapper> dynamicObjectMappers,
|
Map<String, ObjectMapper> dynamicObjectMappers,
|
||||||
Map<String, List<RuntimeField>> dynamicRuntimeFields,
|
Map<String, List<RuntimeField>> dynamicRuntimeFields,
|
||||||
|
@ -165,9 +171,7 @@ public abstract class DocumentParserContext {
|
||||||
this.sourceToParse = sourceToParse;
|
this.sourceToParse = sourceToParse;
|
||||||
this.ignoredFields = ignoreFields;
|
this.ignoredFields = ignoreFields;
|
||||||
this.ignoredFieldValues = ignoredFieldValues;
|
this.ignoredFieldValues = ignoredFieldValues;
|
||||||
this.ignoredFieldsMissingValues = ignoredFieldsWithNoSource;
|
this.currentScope = currentScope;
|
||||||
this.inArrayScopeEnabled = inArrayScopeEnabled;
|
|
||||||
this.inArrayScope = inArrayScope;
|
|
||||||
this.dynamicMappers = dynamicMappers;
|
this.dynamicMappers = dynamicMappers;
|
||||||
this.dynamicObjectMappers = dynamicObjectMappers;
|
this.dynamicObjectMappers = dynamicObjectMappers;
|
||||||
this.dynamicRuntimeFields = dynamicRuntimeFields;
|
this.dynamicRuntimeFields = dynamicRuntimeFields;
|
||||||
|
@ -190,9 +194,7 @@ public abstract class DocumentParserContext {
|
||||||
in.sourceToParse,
|
in.sourceToParse,
|
||||||
in.ignoredFields,
|
in.ignoredFields,
|
||||||
in.ignoredFieldValues,
|
in.ignoredFieldValues,
|
||||||
in.ignoredFieldsMissingValues,
|
in.currentScope,
|
||||||
in.inArrayScopeEnabled,
|
|
||||||
in.inArrayScope,
|
|
||||||
in.dynamicMappers,
|
in.dynamicMappers,
|
||||||
in.dynamicObjectMappers,
|
in.dynamicObjectMappers,
|
||||||
in.dynamicRuntimeFields,
|
in.dynamicRuntimeFields,
|
||||||
|
@ -222,9 +224,7 @@ public abstract class DocumentParserContext {
|
||||||
source,
|
source,
|
||||||
new HashSet<>(),
|
new HashSet<>(),
|
||||||
new ArrayList<>(),
|
new ArrayList<>(),
|
||||||
new ArrayList<>(),
|
Scope.SINGLETON,
|
||||||
mappingParserContext.getIndexSettings().isSyntheticSourceSecondDocParsingPassEnabled(),
|
|
||||||
false,
|
|
||||||
new HashMap<>(),
|
new HashMap<>(),
|
||||||
new HashMap<>(),
|
new HashMap<>(),
|
||||||
new HashMap<>(),
|
new HashMap<>(),
|
||||||
|
@ -314,13 +314,6 @@ public abstract class DocumentParserContext {
|
||||||
return Collections.unmodifiableCollection(ignoredFieldValues);
|
return Collections.unmodifiableCollection(ignoredFieldValues);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove duplicate ignored values, using the passed set of field names as reference
|
|
||||||
*/
|
|
||||||
public final void deduplicateIgnoredFieldValues(final Set<String> fullNames) {
|
|
||||||
ignoredFieldValues.removeIf(nv -> fullNames.contains(nv.name()));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds an ignored field from the parser context, capturing an object or an array.
|
* Adds an ignored field from the parser context, capturing an object or an array.
|
||||||
*
|
*
|
||||||
|
@ -335,17 +328,11 @@ public abstract class DocumentParserContext {
|
||||||
public final DocumentParserContext addIgnoredFieldFromContext(IgnoredSourceFieldMapper.NameValue ignoredFieldWithNoSource)
|
public final DocumentParserContext addIgnoredFieldFromContext(IgnoredSourceFieldMapper.NameValue ignoredFieldWithNoSource)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
if (canAddIgnoredField()) {
|
if (canAddIgnoredField()) {
|
||||||
if (inArrayScope) {
|
assert ignoredFieldWithNoSource != null;
|
||||||
// The field is an array within an array, store all sub-array elements.
|
assert ignoredFieldWithNoSource.value() == null;
|
||||||
ignoredFieldsMissingValues.add(ignoredFieldWithNoSource);
|
Tuple<DocumentParserContext, XContentBuilder> tuple = XContentDataHelper.cloneSubContext(this);
|
||||||
return cloneWithRecordedSource();
|
addIgnoredField(ignoredFieldWithNoSource.cloneWithValue(XContentDataHelper.encodeXContentBuilder(tuple.v2())));
|
||||||
} else {
|
return tuple.v1();
|
||||||
assert ignoredFieldWithNoSource != null;
|
|
||||||
assert ignoredFieldWithNoSource.value() == null;
|
|
||||||
Tuple<DocumentParserContext, XContentBuilder> tuple = XContentDataHelper.cloneSubContext(this);
|
|
||||||
addIgnoredField(ignoredFieldWithNoSource.cloneWithValue(XContentDataHelper.encodeXContentBuilder(tuple.v2())));
|
|
||||||
return tuple.v1();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -364,13 +351,6 @@ public abstract class DocumentParserContext {
|
||||||
return encoded;
|
return encoded;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Return the collection of fields that are missing their source values.
|
|
||||||
*/
|
|
||||||
public final Collection<IgnoredSourceFieldMapper.NameValue> getIgnoredFieldsMissingValues() {
|
|
||||||
return Collections.unmodifiableCollection(ignoredFieldsMissingValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clones the current context to mark it as an array, if it's not already marked, or restore it if it's within a nested object.
|
* Clones the current context to mark it as an array, if it's not already marked, or restore it if it's within a nested object.
|
||||||
* Applies to synthetic source only.
|
* Applies to synthetic source only.
|
||||||
|
@ -379,10 +359,9 @@ public abstract class DocumentParserContext {
|
||||||
if (canAddIgnoredField()
|
if (canAddIgnoredField()
|
||||||
&& mapper instanceof ObjectMapper
|
&& mapper instanceof ObjectMapper
|
||||||
&& mapper instanceof NestedObjectMapper == false
|
&& mapper instanceof NestedObjectMapper == false
|
||||||
&& inArrayScope == false
|
&& currentScope != Scope.ARRAY) {
|
||||||
&& inArrayScopeEnabled) {
|
|
||||||
DocumentParserContext subcontext = switchParser(parser());
|
DocumentParserContext subcontext = switchParser(parser());
|
||||||
subcontext.inArrayScope = true;
|
subcontext.currentScope = Scope.ARRAY;
|
||||||
return subcontext;
|
return subcontext;
|
||||||
}
|
}
|
||||||
return this;
|
return this;
|
||||||
|
@ -669,6 +648,10 @@ public abstract class DocumentParserContext {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
boolean inArrayScope() {
|
||||||
|
return currentScope == Scope.ARRAY;
|
||||||
|
}
|
||||||
|
|
||||||
public final DocumentParserContext createChildContext(ObjectMapper parent) {
|
public final DocumentParserContext createChildContext(ObjectMapper parent) {
|
||||||
return new Wrapper(parent, this);
|
return new Wrapper(parent, this);
|
||||||
}
|
}
|
||||||
|
@ -712,11 +695,8 @@ public abstract class DocumentParserContext {
|
||||||
return document;
|
return document;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
// Disable tracking array scopes for ignored source, as it would be added to the parent doc.
|
|
||||||
// Nested documents are added to preserve object structure within arrays of objects, so the use
|
cloned.currentScope = Scope.NESTED;
|
||||||
// of ignored source for arrays inside them should be mostly redundant.
|
|
||||||
cloned.inArrayScope = false;
|
|
||||||
cloned.inArrayScopeEnabled = false;
|
|
||||||
return cloned;
|
return cloned;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -642,7 +642,7 @@ public class IgnoredSourceFieldMapperTests extends MapperServiceTestCase {
|
||||||
b.field("bool_value", true);
|
b.field("bool_value", true);
|
||||||
});
|
});
|
||||||
assertEquals("""
|
assertEquals("""
|
||||||
{"bool_value":true,"path":{"int_value":[10,20]}}""", syntheticSource);
|
{"bool_value":true,"path":{"int_value":[20,10]}}""", syntheticSource);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIndexStoredArraySourceNestedValueArray() throws IOException {
|
public void testIndexStoredArraySourceNestedValueArray() throws IOException {
|
||||||
|
@ -706,7 +706,7 @@ public class IgnoredSourceFieldMapperTests extends MapperServiceTestCase {
|
||||||
b.endObject();
|
b.endObject();
|
||||||
});
|
});
|
||||||
assertEquals("""
|
assertEquals("""
|
||||||
{"path":{"bool_value":true,"int_value":[10,20,30],"obj":{"foo":[1,2]}}}""", syntheticSource);
|
{"path":{"bool_value":true,"int_value":[10,20,30],"obj":{"foo":[2,1]}}}""", syntheticSource);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFieldStoredArraySourceNestedValueArray() throws IOException {
|
public void testFieldStoredArraySourceNestedValueArray() throws IOException {
|
||||||
|
@ -962,6 +962,94 @@ public class IgnoredSourceFieldMapperTests extends MapperServiceTestCase {
|
||||||
{"path":{"to":[{"id":[1,20,3]},{"id":10},{"id":0}]}}""", syntheticSource);
|
{"path":{"to":[{"id":[1,20,3]},{"id":10},{"id":0}]}}""", syntheticSource);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testObjectArrayWithinNestedObjects() throws IOException {
|
||||||
|
DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> {
|
||||||
|
b.startObject("path").startObject("properties");
|
||||||
|
{
|
||||||
|
b.startObject("to").field("type", "nested").startObject("properties");
|
||||||
|
{
|
||||||
|
b.startObject("obj").startObject("properties");
|
||||||
|
{
|
||||||
|
b.startObject("id").field("type", "integer").field("synthetic_source_keep", "arrays").endObject();
|
||||||
|
}
|
||||||
|
b.endObject().endObject();
|
||||||
|
}
|
||||||
|
b.endObject().endObject();
|
||||||
|
}
|
||||||
|
b.endObject().endObject();
|
||||||
|
})).documentMapper();
|
||||||
|
|
||||||
|
var syntheticSource = syntheticSource(documentMapper, b -> {
|
||||||
|
b.startObject("path");
|
||||||
|
{
|
||||||
|
b.startObject("to");
|
||||||
|
{
|
||||||
|
b.startArray("obj");
|
||||||
|
{
|
||||||
|
b.startObject().array("id", 1, 20, 3).endObject();
|
||||||
|
b.startObject().field("id", 10).endObject();
|
||||||
|
}
|
||||||
|
b.endArray();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
});
|
||||||
|
assertEquals("""
|
||||||
|
{"path":{"to":{"obj":{"id":[1,20,3,10]}}}}""", syntheticSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testObjectArrayWithinNestedObjectsArray() throws IOException {
|
||||||
|
DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> {
|
||||||
|
b.startObject("path").startObject("properties");
|
||||||
|
{
|
||||||
|
b.startObject("to").field("type", "nested").startObject("properties");
|
||||||
|
{
|
||||||
|
b.startObject("obj").startObject("properties");
|
||||||
|
{
|
||||||
|
b.startObject("id").field("type", "integer").field("synthetic_source_keep", "arrays").endObject();
|
||||||
|
}
|
||||||
|
b.endObject().endObject();
|
||||||
|
}
|
||||||
|
b.endObject().endObject();
|
||||||
|
}
|
||||||
|
b.endObject().endObject();
|
||||||
|
})).documentMapper();
|
||||||
|
|
||||||
|
var syntheticSource = syntheticSource(documentMapper, b -> {
|
||||||
|
b.startObject("path");
|
||||||
|
{
|
||||||
|
b.startArray("to");
|
||||||
|
{
|
||||||
|
b.startObject();
|
||||||
|
{
|
||||||
|
b.startArray("obj");
|
||||||
|
{
|
||||||
|
b.startObject().array("id", 1, 20, 3).endObject();
|
||||||
|
b.startObject().field("id", 10).endObject();
|
||||||
|
}
|
||||||
|
b.endArray();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
b.startObject();
|
||||||
|
{
|
||||||
|
b.startArray("obj");
|
||||||
|
{
|
||||||
|
b.startObject().array("id", 200, 300, 500).endObject();
|
||||||
|
b.startObject().field("id", 100).endObject();
|
||||||
|
}
|
||||||
|
b.endArray();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endArray();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
});
|
||||||
|
assertEquals("""
|
||||||
|
{"path":{"to":[{"obj":{"id":[1,20,3,10]}},{"obj":{"id":[200,300,500,100]}}]}}""", syntheticSource);
|
||||||
|
}
|
||||||
|
|
||||||
public void testArrayWithinArray() throws IOException {
|
public void testArrayWithinArray() throws IOException {
|
||||||
DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> {
|
DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> {
|
||||||
b.startObject("path");
|
b.startObject("path");
|
||||||
|
|
|
@ -333,7 +333,6 @@ public class TransportResumeFollowActionTests extends ESTestCase {
|
||||||
replicatedSettings.add(IndexSettings.MAX_SHINGLE_DIFF_SETTING);
|
replicatedSettings.add(IndexSettings.MAX_SHINGLE_DIFF_SETTING);
|
||||||
replicatedSettings.add(IndexSettings.TIME_SERIES_END_TIME);
|
replicatedSettings.add(IndexSettings.TIME_SERIES_END_TIME);
|
||||||
replicatedSettings.add(IndexSettings.PREFER_ILM_SETTING);
|
replicatedSettings.add(IndexSettings.PREFER_ILM_SETTING);
|
||||||
replicatedSettings.add(IndexSettings.SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING);
|
|
||||||
replicatedSettings.add(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING);
|
replicatedSettings.add(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING);
|
||||||
replicatedSettings.add(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING);
|
replicatedSettings.add(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING);
|
||||||
replicatedSettings.add(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING);
|
replicatedSettings.add(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING);
|
||||||
|
|
Loading…
Reference in New Issue