Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions docs/reference/mapping/types/date.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,7 @@ The following parameters are accepted by `date` fields:
`locale`::

The locale to use when parsing dates since months do not have the same names
and/or abbreviations in all languages. The default is the
https://docs.oracle.com/javase/8/docs/api/java/util/Locale.html#ROOT[`ROOT` locale].
and/or abbreviations in all languages. The default is ENGLISH.

<<ignore-malformed,`ignore_malformed`>>::

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

package org.elasticsearch.aggregations.bucket.histogram;

import org.elasticsearch.TransportVersion;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.aggregations.bucket.AggregationMultiBucketAggregationTestCase;
import org.elasticsearch.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo;
Expand All @@ -28,7 +27,6 @@
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.elasticsearch.test.TransportVersionUtils;

import java.io.IOException;
import java.time.Instant;
Expand Down Expand Up @@ -459,33 +457,6 @@ public void testCreateWithReplacementBuckets() {
assertThat(copy.getInterval(), equalTo(orig.getInterval()));
}

public void testSerializationPre830() throws IOException {
// we need to test without sub-aggregations, otherwise we need to also update the interval within the inner aggs
InternalAutoDateHistogram instance = createTestInstance(
randomAlphaOfLengthBetween(3, 7),
createTestMetadata(),
InternalAggregations.EMPTY
);
TransportVersion version = TransportVersionUtils.randomVersionBetween(
random(),
TransportVersions.MINIMUM_COMPATIBLE,
TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_3_0)
);
InternalAutoDateHistogram deserialized = copyInstance(instance, version);
assertEquals(1, deserialized.getBucketInnerInterval());

InternalAutoDateHistogram modified = new InternalAutoDateHistogram(
deserialized.getName(),
deserialized.getBuckets(),
deserialized.getTargetBuckets(),
deserialized.getBucketInfo(),
deserialized.getFormatter(),
deserialized.getMetadata(),
instance.getBucketInnerInterval()
);
assertEqualInstances(instance, modified);
}

public void testReadFromPre830() throws IOException {
byte[] bytes = Base64.getDecoder()
.decode(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
---
setup:
- requires:
cluster_features: ["gte_v7.7.0"]
reason: "Start of the week Monday was enabled in a backport to 7.7 PR#50916"
cluster_features: ["gte_v8.16.0"]
reason: "Start of the week Sunday was changed in 8.16 as part of the locale changes"

- do:
indices.create:
Expand All @@ -25,7 +25,7 @@ setup:

---
# The inserted document has a field date=2009-11-15T14:12:12 which is Sunday.
# When aggregating per day of the week this should be considered as last day of the week (7)
# When aggregating per day of the week this should be considered as first day of the week (1)
# and this value should be used in 'key_as_string'
"Date aggregartion per day of week":
- do:
Expand All @@ -44,4 +44,4 @@ setup:

- match: {hits.total: 1}
- length: { aggregations.test.buckets: 1 }
- match: { aggregations.test.buckets.0.key_as_string: "7" }
- match: { aggregations.test.buckets.0.key_as_string: "1" }
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ setup:
cluster_features: "gte_v8.15.0"
reason: fixed in 8.15.0
- do:
catch: /Cannot format stat \[sum\] with format \[DocValueFormat.DateTime\(format\[date_hour_minute_second_millis\] locale\[\], Z, MILLISECONDS\)\]/
catch: /Cannot format stat \[sum\] with format \[DocValueFormat.DateTime\(format\[date_hour_minute_second_millis\] locale\[(en)?\], Z, MILLISECONDS\)\]/
search:
index: test_date
body:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,7 @@ static TransportVersion def(int id) {
public static final TransportVersion RRF_QUERY_REWRITE = def(8_758_00_0);
public static final TransportVersion SEARCH_FAILURE_STATS = def(8_759_00_0);
public static final TransportVersion INGEST_GEO_DATABASE_PROVIDERS = def(8_760_00_0);
public static final TransportVersion DATE_TIME_DOC_VALUES_LOCALES = def(8_761_00_0);

/*
* STOP! READ THIS FIRST! No, really,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,10 +165,11 @@ public void doValidate(MappingLookup lookup) {
Map<?, ?> configuredSettings = XContentHelper.convertToMap(BytesReference.bytes(builder), false, XContentType.JSON).v2();
configuredSettings = (Map<?, ?>) configuredSettings.values().iterator().next();

// Only type, meta and format attributes are allowed:
// Only type, meta, format, and locale attributes are allowed:
configuredSettings.remove("type");
configuredSettings.remove("meta");
configuredSettings.remove("format");
configuredSettings.remove("locale");

// ignoring malformed values is disallowed (see previous check),
// however if `index.mapping.ignore_malformed` has been set to true then
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,11 +83,16 @@ public final class DateFieldMapper extends FieldMapper {

public static final String CONTENT_TYPE = "date";
public static final String DATE_NANOS_CONTENT_TYPE = "date_nanos";
public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time||epoch_millis");
public static final Locale DEFAULT_LOCALE = Locale.ENGLISH;
// although the locale doesn't affect the results, tests still check formatter equality, which does include locale
public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time||epoch_millis")
.withLocale(DEFAULT_LOCALE);
public static final DateFormatter DEFAULT_DATE_TIME_NANOS_FORMATTER = DateFormatter.forPattern(
"strict_date_optional_time_nanos||epoch_millis"
);
private static final DateMathParser EPOCH_MILLIS_PARSER = DateFormatter.forPattern("epoch_millis").toDateMathParser();
).withLocale(DEFAULT_LOCALE);
private static final DateMathParser EPOCH_MILLIS_PARSER = DateFormatter.forPattern("epoch_millis")
.withLocale(DEFAULT_LOCALE)
.toDateMathParser();

public enum Resolution {
MILLISECONDS(CONTENT_TYPE, NumericType.DATE, DateMillisDocValuesField::new) {
Expand Down Expand Up @@ -236,7 +241,7 @@ public static final class Builder extends FieldMapper.Builder {
private final Parameter<Locale> locale = new Parameter<>(
"locale",
false,
() -> Locale.ROOT,
() -> DEFAULT_LOCALE,
(n, c, o) -> LocaleUtils.parse(o.toString()),
m -> toType(m).locale,
(xContentBuilder, n, v) -> xContentBuilder.field(n, v.toString()),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ private static class Builder extends AbstractScriptFieldType.Builder<DateFieldSc
(n, c, o) -> o == null ? null : LocaleUtils.parse(o.toString()),
RuntimeField.initializerNotSupported(),
(b, n, v) -> {
if (v != null && false == v.equals(Locale.ROOT)) {
if (v != null && false == v.equals(DateFieldMapper.DEFAULT_LOCALE)) {
b.field(n, v.toString());
}
},
Expand Down Expand Up @@ -97,7 +97,7 @@ protected AbstractScriptFieldType<?> createFieldType(
OnScriptError onScriptError
) {
String pattern = format.getValue() == null ? DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.pattern() : format.getValue();
Locale locale = this.locale.getValue() == null ? Locale.ROOT : this.locale.getValue();
Locale locale = this.locale.getValue() == null ? DateFieldMapper.DEFAULT_LOCALE : this.locale.getValue();
DateFormatter dateTimeFormatter = DateFormatter.forPattern(pattern, supportedVersion).withLocale(locale);
return new DateScriptFieldType(name, factory, dateTimeFormatter, script, meta, onScriptError);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ public class RangeFieldMapper extends FieldMapper {

public static class Defaults {
public static final DateFormatter DATE_FORMATTER = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER;
public static final Locale LOCALE = DateFieldMapper.DEFAULT_LOCALE;
}

// this is private since it has a different default
Expand All @@ -83,7 +84,7 @@ public static class Builder extends FieldMapper.Builder {
private final Parameter<Locale> locale = new Parameter<>(
"locale",
false,
() -> Locale.ROOT,
() -> Defaults.LOCALE,
(n, c, o) -> LocaleUtils.parse(o.toString()),
m -> toType(m).locale,
(xContentBuilder, n, v) -> xContentBuilder.field(n, v.toString()),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ public static boolean parseMultiField(

public static DateFormatter parseDateTimeFormatter(Object node) {
if (node instanceof String) {
return DateFormatter.forPattern((String) node);
return DateFormatter.forPattern((String) node).withLocale(DateFieldMapper.DEFAULT_LOCALE);
}
throw new IllegalArgumentException("Invalid format: [" + node.toString() + "]: expected string value");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.geometry.utils.Geohash;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper;
Expand Down Expand Up @@ -236,9 +237,12 @@ private DateTime(DateFormatter formatter, ZoneId timeZone, DateFieldMapper.Resol

public DateTime(StreamInput in) throws IOException {
String formatterPattern = in.readString();
Locale locale = in.getTransportVersion().onOrAfter(TransportVersions.DATE_TIME_DOC_VALUES_LOCALES)
? LocaleUtils.parse(in.readString())
: DateFieldMapper.DEFAULT_LOCALE;
String zoneId = in.readString();
this.timeZone = ZoneId.of(zoneId);
this.formatter = DateFormatter.forPattern(formatterPattern).withZone(this.timeZone);
this.formatter = DateFormatter.forPattern(formatterPattern).withZone(this.timeZone).withLocale(locale);
this.parser = formatter.toDateMathParser();
this.resolution = DateFieldMapper.Resolution.ofOrdinal(in.readVInt());
if (in.getTransportVersion().between(TransportVersions.V_7_7_0, TransportVersions.V_8_0_0)) {
Expand All @@ -259,6 +263,9 @@ public String getWriteableName() {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(formatter.pattern());
if (out.getTransportVersion().onOrAfter(TransportVersions.DATE_TIME_DOC_VALUES_LOCALES)) {
out.writeString(formatter.locale().toString());
}
out.writeString(timeZone.getId());
out.writeVInt(resolution.ordinal());
if (out.getTransportVersion().between(TransportVersions.V_7_7_0, TransportVersions.V_8_0_0)) {
Expand Down