Skip to content

Commit

Permalink
Merge branch '8.x' into backport/8.x/pr-113623
Browse files Browse the repository at this point in the history
  • Loading branch information
elasticmachine authored Oct 7, 2024
2 parents e0d729b + 5416540 commit 3884d14
Show file tree
Hide file tree
Showing 61 changed files with 1,567 additions and 1,012 deletions.
5 changes: 5 additions & 0 deletions docs/changelog/112081.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 112081
summary: "[ES|QL] Validate index name in parser"
area: ES|QL
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/113900.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 113900
summary: Fix BWC for file-settings based role mappings
area: Authentication
type: bug
issues: []
40 changes: 24 additions & 16 deletions docs/reference/connector/docs/connectors-zoom.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -63,18 +63,22 @@ To connect to Zoom you need to https://developers.zoom.us/docs/internal-apps/s2s
6. Click on the "Create" button to create the app registration.
7. After the registration is complete, you will be redirected to the app's overview page. Take note of the "App Credentials" value, as you'll need it later.
8. Navigate to the "Scopes" section and click on the "Add Scopes" button.
9. The following scopes need to be added to the app.
9. The following granular scopes need to be added to the app.
+
[source,bash]
----
user:read:admin
meeting:read:admin
chat_channel:read:admin
recording:read:admin
chat_message:read:admin
report:read:admin
user:read:list_users:admin
meeting:read:list_meetings:admin
meeting:read:list_past_participants:admin
cloud_recording:read:list_user_recordings:admin
team_chat:read:list_user_channels:admin
team_chat:read:list_user_messages:admin
----
[NOTE]
====
The connector requires a minimum scope of `user:read:list_users:admin` to ingest data into Elasticsearch.
====
+
10. Click on the "Done" button to add the selected scopes to your app.
11. Navigate to the "Activation" section and input the necessary information to activate the app.
Expand Down Expand Up @@ -220,18 +224,22 @@ To connect to Zoom you need to https://developers.zoom.us/docs/internal-apps/s2s
6. Click on the "Create" button to create the app registration.
7. After the registration is complete, you will be redirected to the app's overview page. Take note of the "App Credentials" value, as you'll need it later.
8. Navigate to the "Scopes" section and click on the "Add Scopes" button.
9. The following scopes need to be added to the app.
9. The following granular scopes need to be added to the app.
+
[source,bash]
----
user:read:admin
meeting:read:admin
chat_channel:read:admin
recording:read:admin
chat_message:read:admin
report:read:admin
user:read:list_users:admin
meeting:read:list_meetings:admin
meeting:read:list_past_participants:admin
cloud_recording:read:list_user_recordings:admin
team_chat:read:list_user_channels:admin
team_chat:read:list_user_messages:admin
----
[NOTE]
====
The connector requires a minimum scope of `user:read:list_users:admin` to ingest data into Elasticsearch.
====
+
10. Click on the "Done" button to add the selected scopes to your app.
11. Navigate to the "Activation" section and input the necessary information to activate the app.
Expand Down
3 changes: 1 addition & 2 deletions docs/reference/mapping/types/date.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,7 @@ The following parameters are accepted by `date` fields:
`locale`::

The locale to use when parsing dates since months do not have the same names
and/or abbreviations in all languages. The default is the
https://docs.oracle.com/javase/8/docs/api/java/util/Locale.html#ROOT[`ROOT` locale].
and/or abbreviations in all languages. The default is ENGLISH.

<<ignore-malformed,`ignore_malformed`>>::

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

package org.elasticsearch.aggregations.bucket.histogram;

import org.elasticsearch.TransportVersion;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.aggregations.bucket.AggregationMultiBucketAggregationTestCase;
import org.elasticsearch.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo;
Expand All @@ -28,7 +27,6 @@
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.elasticsearch.test.TransportVersionUtils;

import java.io.IOException;
import java.time.Instant;
Expand Down Expand Up @@ -459,33 +457,6 @@ public void testCreateWithReplacementBuckets() {
assertThat(copy.getInterval(), equalTo(orig.getInterval()));
}

public void testSerializationPre830() throws IOException {
// we need to test without sub-aggregations, otherwise we need to also update the interval within the inner aggs
InternalAutoDateHistogram instance = createTestInstance(
randomAlphaOfLengthBetween(3, 7),
createTestMetadata(),
InternalAggregations.EMPTY
);
TransportVersion version = TransportVersionUtils.randomVersionBetween(
random(),
TransportVersions.MINIMUM_COMPATIBLE,
TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_3_0)
);
InternalAutoDateHistogram deserialized = copyInstance(instance, version);
assertEquals(1, deserialized.getBucketInnerInterval());

InternalAutoDateHistogram modified = new InternalAutoDateHistogram(
deserialized.getName(),
deserialized.getBuckets(),
deserialized.getTargetBuckets(),
deserialized.getBucketInfo(),
deserialized.getFormatter(),
deserialized.getMetadata(),
instance.getBucketInnerInterval()
);
assertEqualInstances(instance, modified);
}

public void testReadFromPre830() throws IOException {
byte[] bytes = Base64.getDecoder()
.decode(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
---
setup:
- requires:
cluster_features: ["gte_v7.7.0"]
reason: "Start of the week Monday was enabled in a backport to 7.7 PR#50916"
cluster_features: ["gte_v8.16.0"]
reason: "Start of the week Sunday was changed in 8.16 as part of the locale changes"

- do:
indices.create:
Expand All @@ -25,7 +25,7 @@ setup:

---
# The inserted document has a field date=2009-11-15T14:12:12 which is Sunday.
# When aggregating per day of the week this should be considered as last day of the week (7)
# When aggregating per day of the week this should be considered as first day of the week (1)
# and this value should be used in 'key_as_string'
"Date aggregartion per day of week":
- do:
Expand All @@ -44,4 +44,4 @@ setup:

- match: {hits.total: 1}
- length: { aggregations.test.buckets: 1 }
- match: { aggregations.test.buckets.0.key_as_string: "7" }
- match: { aggregations.test.buckets.0.key_as_string: "1" }
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ setup:
cluster_features: "gte_v8.15.0"
reason: fixed in 8.15.0
- do:
catch: /Cannot format stat \[sum\] with format \[DocValueFormat.DateTime\(format\[date_hour_minute_second_millis\] locale\[\], Z, MILLISECONDS\)\]/
catch: /Cannot format stat \[sum\] with format \[DocValueFormat.DateTime\(format\[date_hour_minute_second_millis\] locale\[(en)?\], Z, MILLISECONDS\)\]/
search:
index: test_date
body:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ private void assertValidDatabase(DatabaseNodeService databaseNodeService, String
IpDatabase database = databaseNodeService.getDatabase(databaseFileName);
assertNotNull(database);
assertThat(database.getDatabaseType(), equalTo(databaseType));
CountryResponse countryResponse = database.getCountry("89.160.20.128");
CountryResponse countryResponse = database.getResponse("89.160.20.128", GeoIpTestUtils::getCountry);
assertNotNull(countryResponse);
Country country = countryResponse.getCountry();
assertNotNull(country);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -205,10 +205,10 @@ private static DatabaseNodeService createRegistry(Path geoIpConfigDir, Path geoI
private static void lazyLoadReaders(DatabaseNodeService databaseNodeService) throws IOException {
if (databaseNodeService.get("GeoLite2-City.mmdb") != null) {
databaseNodeService.get("GeoLite2-City.mmdb").getDatabaseType();
databaseNodeService.get("GeoLite2-City.mmdb").getCity("2.125.160.216");
databaseNodeService.get("GeoLite2-City.mmdb").getResponse("2.125.160.216", GeoIpTestUtils::getCity);
}
databaseNodeService.get("GeoLite2-City-Test.mmdb").getDatabaseType();
databaseNodeService.get("GeoLite2-City-Test.mmdb").getCity("2.125.160.216");
databaseNodeService.get("GeoLite2-City-Test.mmdb").getResponse("2.125.160.216", GeoIpTestUtils::getCity);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

package org.elasticsearch.ingest.geoip;

import org.elasticsearch.common.Strings;
import org.elasticsearch.core.Nullable;

import java.util.Arrays;
Expand All @@ -19,12 +18,10 @@
import java.util.Set;

/**
* A high-level representation of a kind of geoip database that is supported by the {@link GeoIpProcessor}.
* A high-level representation of a kind of ip location database that is supported by the {@link GeoIpProcessor}.
* <p>
* A database has a set of properties that are valid to use with it (see {@link Database#properties()}),
* as well as a list of default properties to use if no properties are specified (see {@link Database#defaultProperties()}).
* <p>
* See especially {@link Database#getDatabase(String, String)} which is used to obtain instances of this class.
*/
enum Database {

Expand Down Expand Up @@ -142,61 +139,6 @@ enum Database {
)
);

private static final String CITY_DB_SUFFIX = "-City";
private static final String COUNTRY_DB_SUFFIX = "-Country";
private static final String ASN_DB_SUFFIX = "-ASN";
private static final String ANONYMOUS_IP_DB_SUFFIX = "-Anonymous-IP";
private static final String CONNECTION_TYPE_DB_SUFFIX = "-Connection-Type";
private static final String DOMAIN_DB_SUFFIX = "-Domain";
private static final String ENTERPRISE_DB_SUFFIX = "-Enterprise";
private static final String ISP_DB_SUFFIX = "-ISP";

@Nullable
private static Database getMaxmindDatabase(final String databaseType) {
if (databaseType.endsWith(Database.CITY_DB_SUFFIX)) {
return Database.City;
} else if (databaseType.endsWith(Database.COUNTRY_DB_SUFFIX)) {
return Database.Country;
} else if (databaseType.endsWith(Database.ASN_DB_SUFFIX)) {
return Database.Asn;
} else if (databaseType.endsWith(Database.ANONYMOUS_IP_DB_SUFFIX)) {
return Database.AnonymousIp;
} else if (databaseType.endsWith(Database.CONNECTION_TYPE_DB_SUFFIX)) {
return Database.ConnectionType;
} else if (databaseType.endsWith(Database.DOMAIN_DB_SUFFIX)) {
return Database.Domain;
} else if (databaseType.endsWith(Database.ENTERPRISE_DB_SUFFIX)) {
return Database.Enterprise;
} else if (databaseType.endsWith(Database.ISP_DB_SUFFIX)) {
return Database.Isp;
} else {
return null; // no match was found
}
}

/**
* Parses the passed-in databaseType (presumably from the passed-in databaseFile) and return the Database instance that is
* associated with that databaseType.
*
* @param databaseType the database type String from the metadata of the database file
* @param databaseFile the database file from which the database type was obtained
* @throws IllegalArgumentException if the databaseType is not associated with a Database instance
* @return the Database instance that is associated with the databaseType
*/
public static Database getDatabase(final String databaseType, final String databaseFile) {
Database database = null;

if (Strings.hasText(databaseType)) {
database = getMaxmindDatabase(databaseType);
}

if (database == null) {
throw new IllegalArgumentException("Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]");
}

return database;
}

private final Set<Property> properties;
private final Set<Property> defaultProperties;

Expand Down
Loading

0 comments on commit 3884d14

Please sign in to comment.