();
- parameters.put("subobjects", subobjects.toString());
- if (ESTestCase.randomBoolean()) {
- parameters.put("dynamic", "true");
- }
- if (ESTestCase.randomBoolean()) {
- parameters.put("enabled", "true");
- }
- return parameters;
- });
- }
- }))
+ var specificationBuilder = DataGeneratorSpecification.builder()
+ .withFullyDynamicMapping(ESTestCase.randomBoolean())
.withPredefinedFields(
List.of(
// Customized because it always needs doc_values for aggregations.
@@ -136,11 +95,7 @@ void logsDbMapping(XContentBuilder builder) throws IOException {
}
void standardMapping(XContentBuilder builder) throws IOException {
- if (subobjects != ObjectMapper.Subobjects.ENABLED) {
- dataGenerator.writeMapping(builder, Map.of("subobjects", subobjects.toString()));
- } else {
- dataGenerator.writeMapping(builder);
- }
+ dataGenerator.writeMapping(builder);
}
void logsDbSettings(Settings.Builder builder) {
diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestGetDataStreamsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestGetDataStreamsAction.java
index da55376fb403b..3456f4b679474 100644
--- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestGetDataStreamsAction.java
+++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestGetDataStreamsAction.java
@@ -11,6 +11,7 @@
import org.elasticsearch.action.datastreams.GetDataStreamAction;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.internal.node.NodeClient;
+import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.cluster.metadata.DataStreamLifecycle;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.util.set.Sets;
@@ -35,14 +36,14 @@ public class RestGetDataStreamsAction extends BaseRestHandler {
Set.of(
"name",
"include_defaults",
- "timeout",
"master_timeout",
IndicesOptions.WildcardOptions.EXPAND_WILDCARDS,
IndicesOptions.ConcreteTargetOptions.IGNORE_UNAVAILABLE,
IndicesOptions.WildcardOptions.ALLOW_NO_INDICES,
IndicesOptions.GatekeeperOptions.IGNORE_THROTTLED,
"verbose"
- )
+ ),
+ DataStream.isFailureStoreFeatureFlagEnabled() ? Set.of(IndicesOptions.FailureStoreOptions.FAILURE_STORE) : Set.of()
)
);
diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle
index b50fc86282d1f..4312221b33937 100644
--- a/modules/ingest-geoip/build.gradle
+++ b/modules/ingest-geoip/build.gradle
@@ -88,3 +88,8 @@ tasks.named("dependencyLicenses").configure {
artifacts {
restTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test"))
}
+
+tasks.named("yamlRestCompatTestTransform").configure({ task ->
+ task.skipTest("ingest_geoip/40_geoip_databases/Test adding, getting, and removing geoip databases",
+ "get databases behavior began returning more results in 8.16")
+})
diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java
index 73d8976c3a4b7..786f091e0c024 100644
--- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java
+++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java
@@ -84,7 +84,7 @@ private void assertValidDatabase(DatabaseNodeService databaseNodeService, String
IpDatabase database = databaseNodeService.getDatabase(databaseFileName);
assertNotNull(database);
assertThat(database.getDatabaseType(), equalTo(databaseType));
- CountryResponse countryResponse = database.getCountry("89.160.20.128");
+ CountryResponse countryResponse = database.getResponse("89.160.20.128", GeoIpTestUtils::getCountry);
assertNotNull(countryResponse);
Country country = countryResponse.getCountry();
assertNotNull(country);
diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java
index 2c7d5fbcc56b7..b28926673069d 100644
--- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java
+++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/ReloadingDatabasesWhilePerformingGeoLookupsIT.java
@@ -205,10 +205,10 @@ private static DatabaseNodeService createRegistry(Path geoIpConfigDir, Path geoI
private static void lazyLoadReaders(DatabaseNodeService databaseNodeService) throws IOException {
if (databaseNodeService.get("GeoLite2-City.mmdb") != null) {
databaseNodeService.get("GeoLite2-City.mmdb").getDatabaseType();
- databaseNodeService.get("GeoLite2-City.mmdb").getCity("2.125.160.216");
+ databaseNodeService.get("GeoLite2-City.mmdb").getResponse("2.125.160.216", GeoIpTestUtils::getCity);
}
databaseNodeService.get("GeoLite2-City-Test.mmdb").getDatabaseType();
- databaseNodeService.get("GeoLite2-City-Test.mmdb").getCity("2.125.160.216");
+ databaseNodeService.get("GeoLite2-City-Test.mmdb").getResponse("2.125.160.216", GeoIpTestUtils::getCity);
}
}
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java
index dccda0d58cfbf..52ca5eea52c1a 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java
@@ -9,7 +9,6 @@
package org.elasticsearch.ingest.geoip;
-import org.elasticsearch.common.Strings;
import org.elasticsearch.core.Nullable;
import java.util.Arrays;
@@ -19,12 +18,10 @@
import java.util.Set;
/**
- * A high-level representation of a kind of geoip database that is supported by the {@link GeoIpProcessor}.
+ * A high-level representation of a kind of ip location database that is supported by the {@link GeoIpProcessor}.
*
* A database has a set of properties that are valid to use with it (see {@link Database#properties()}),
* as well as a list of default properties to use if no properties are specified (see {@link Database#defaultProperties()}).
- *
- * See especially {@link Database#getDatabase(String, String)} which is used to obtain instances of this class.
*/
enum Database {
@@ -142,61 +139,6 @@ enum Database {
)
);
- private static final String CITY_DB_SUFFIX = "-City";
- private static final String COUNTRY_DB_SUFFIX = "-Country";
- private static final String ASN_DB_SUFFIX = "-ASN";
- private static final String ANONYMOUS_IP_DB_SUFFIX = "-Anonymous-IP";
- private static final String CONNECTION_TYPE_DB_SUFFIX = "-Connection-Type";
- private static final String DOMAIN_DB_SUFFIX = "-Domain";
- private static final String ENTERPRISE_DB_SUFFIX = "-Enterprise";
- private static final String ISP_DB_SUFFIX = "-ISP";
-
- @Nullable
- private static Database getMaxmindDatabase(final String databaseType) {
- if (databaseType.endsWith(Database.CITY_DB_SUFFIX)) {
- return Database.City;
- } else if (databaseType.endsWith(Database.COUNTRY_DB_SUFFIX)) {
- return Database.Country;
- } else if (databaseType.endsWith(Database.ASN_DB_SUFFIX)) {
- return Database.Asn;
- } else if (databaseType.endsWith(Database.ANONYMOUS_IP_DB_SUFFIX)) {
- return Database.AnonymousIp;
- } else if (databaseType.endsWith(Database.CONNECTION_TYPE_DB_SUFFIX)) {
- return Database.ConnectionType;
- } else if (databaseType.endsWith(Database.DOMAIN_DB_SUFFIX)) {
- return Database.Domain;
- } else if (databaseType.endsWith(Database.ENTERPRISE_DB_SUFFIX)) {
- return Database.Enterprise;
- } else if (databaseType.endsWith(Database.ISP_DB_SUFFIX)) {
- return Database.Isp;
- } else {
- return null; // no match was found
- }
- }
-
- /**
- * Parses the passed-in databaseType (presumably from the passed-in databaseFile) and return the Database instance that is
- * associated with that databaseType.
- *
- * @param databaseType the database type String from the metadata of the database file
- * @param databaseFile the database file from which the database type was obtained
- * @throws IllegalArgumentException if the databaseType is not associated with a Database instance
- * @return the Database instance that is associated with the databaseType
- */
- public static Database getDatabase(final String databaseType, final String databaseFile) {
- Database database = null;
-
- if (Strings.hasText(databaseType)) {
- database = getMaxmindDatabase(databaseType);
- }
-
- if (database == null) {
- throw new IllegalArgumentException("Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]");
- }
-
- return database;
- }
-
private final Set properties;
private final Set defaultProperties;
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java
index ce15e02e6efcc..940231b12c894 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java
@@ -20,11 +20,13 @@
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.Strings;
import org.elasticsearch.common.hash.MessageDigests;
import org.elasticsearch.common.logging.HeaderWarning;
import org.elasticsearch.core.CheckedConsumer;
import org.elasticsearch.core.CheckedRunnable;
import org.elasticsearch.core.IOUtils;
+import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.env.Environment;
import org.elasticsearch.gateway.GatewayService;
@@ -37,6 +39,7 @@
import org.elasticsearch.watcher.ResourceWatcherService;
import java.io.Closeable;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.FileAlreadyExistsException;
@@ -51,8 +54,10 @@
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.HashMap;
import java.util.List;
import java.util.Locale;
+import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
@@ -541,6 +546,35 @@ public Set getConfigDatabases() {
return configDatabases.getConfigDatabases().keySet();
}
+ public Map getConfigDatabasesDetail() {
+ Map allDatabases = new HashMap<>();
+ for (Map.Entry entry : configDatabases.getConfigDatabases().entrySet()) {
+ DatabaseReaderLazyLoader databaseReaderLazyLoader = entry.getValue();
+ try {
+ allDatabases.put(
+ entry.getKey(),
+ new ConfigDatabaseDetail(
+ entry.getKey(),
+ databaseReaderLazyLoader.getMd5(),
+ databaseReaderLazyLoader.getBuildDateMillis(),
+ databaseReaderLazyLoader.getDatabaseType()
+ )
+ );
+ } catch (FileNotFoundException e) {
+ /*
+ * Since there is nothing to prevent a database from being deleted while this method is running, it is possible we get an
+ * exception here because the file no longer exists. We just log it and move on -- it's preferable to synchronization.
+ */
+ logger.trace(Strings.format("Unable to get metadata for config database %s", entry.getKey()), e);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ return allDatabases;
+ }
+
+ public record ConfigDatabaseDetail(String name, @Nullable String md5, @Nullable Long buildDateInMillis, @Nullable String type) {}
+
public Set getFilesInTemp() {
try (Stream files = Files.list(geoipTmpDirectory)) {
return files.map(Path::getFileName).map(Path::toString).collect(Collectors.toSet());
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java
index dff083ea0cde8..120afe0e9e815 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java
@@ -9,18 +9,8 @@
package org.elasticsearch.ingest.geoip;
-import com.maxmind.db.DatabaseRecord;
-import com.maxmind.db.Network;
import com.maxmind.db.NoCache;
import com.maxmind.db.Reader;
-import com.maxmind.geoip2.model.AnonymousIpResponse;
-import com.maxmind.geoip2.model.AsnResponse;
-import com.maxmind.geoip2.model.CityResponse;
-import com.maxmind.geoip2.model.ConnectionTypeResponse;
-import com.maxmind.geoip2.model.CountryResponse;
-import com.maxmind.geoip2.model.DomainResponse;
-import com.maxmind.geoip2.model.EnterpriseResponse;
-import com.maxmind.geoip2.model.IspResponse;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@@ -28,8 +18,6 @@
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.CheckedBiFunction;
import org.elasticsearch.common.CheckedSupplier;
-import org.elasticsearch.common.network.InetAddresses;
-import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.core.Booleans;
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.core.Nullable;
@@ -37,19 +25,16 @@
import java.io.File;
import java.io.IOException;
-import java.net.InetAddress;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.util.List;
import java.util.Objects;
-import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Facilitates lazy loading of the database reader, so that when the geoip plugin is installed, but not used,
* no memory is being wasted on the database reader.
*/
-class DatabaseReaderLazyLoader implements IpDatabase {
+public class DatabaseReaderLazyLoader implements IpDatabase {
private static final boolean LOAD_DATABASE_ON_HEAP = Booleans.parseBoolean(System.getProperty("es.geoip.load_db_on_heap", "false"));
@@ -63,6 +48,7 @@ class DatabaseReaderLazyLoader implements IpDatabase {
// cache the database type so that we do not re-read it on every pipeline execution
final SetOnce databaseType;
+ final SetOnce buildDate;
private volatile boolean deleteDatabaseFileOnShutdown;
private final AtomicInteger currentUsages = new AtomicInteger(0);
@@ -74,6 +60,7 @@ class DatabaseReaderLazyLoader implements IpDatabase {
this.loader = createDatabaseLoader(databasePath);
this.databaseReader = new SetOnce<>();
this.databaseType = new SetOnce<>();
+ this.buildDate = new SetOnce<>();
}
/**
@@ -94,94 +81,6 @@ public final String getDatabaseType() throws IOException {
return databaseType.get();
}
- @Nullable
- @Override
- public CityResponse getCity(String ipAddress) {
- return getResponse(ipAddress, (reader, ip) -> lookup(reader, ip, CityResponse.class, CityResponse::new));
- }
-
- @Nullable
- @Override
- public CountryResponse getCountry(String ipAddress) {
- return getResponse(ipAddress, (reader, ip) -> lookup(reader, ip, CountryResponse.class, CountryResponse::new));
- }
-
- @Nullable
- @Override
- public AsnResponse getAsn(String ipAddress) {
- return getResponse(
- ipAddress,
- (reader, ip) -> lookup(
- reader,
- ip,
- AsnResponse.class,
- (response, responseIp, network, locales) -> new AsnResponse(response, responseIp, network)
- )
- );
- }
-
- @Nullable
- @Override
- public AnonymousIpResponse getAnonymousIp(String ipAddress) {
- return getResponse(
- ipAddress,
- (reader, ip) -> lookup(
- reader,
- ip,
- AnonymousIpResponse.class,
- (response, responseIp, network, locales) -> new AnonymousIpResponse(response, responseIp, network)
- )
- );
- }
-
- @Nullable
- @Override
- public ConnectionTypeResponse getConnectionType(String ipAddress) {
- return getResponse(
- ipAddress,
- (reader, ip) -> lookup(
- reader,
- ip,
- ConnectionTypeResponse.class,
- (response, responseIp, network, locales) -> new ConnectionTypeResponse(response, responseIp, network)
- )
- );
- }
-
- @Nullable
- @Override
- public DomainResponse getDomain(String ipAddress) {
- return getResponse(
- ipAddress,
- (reader, ip) -> lookup(
- reader,
- ip,
- DomainResponse.class,
- (response, responseIp, network, locales) -> new DomainResponse(response, responseIp, network)
- )
- );
- }
-
- @Nullable
- @Override
- public EnterpriseResponse getEnterprise(String ipAddress) {
- return getResponse(ipAddress, (reader, ip) -> lookup(reader, ip, EnterpriseResponse.class, EnterpriseResponse::new));
- }
-
- @Nullable
- @Override
- public IspResponse getIsp(String ipAddress) {
- return getResponse(
- ipAddress,
- (reader, ip) -> lookup(
- reader,
- ip,
- IspResponse.class,
- (response, responseIp, network, locales) -> new IspResponse(response, responseIp, network)
- )
- );
- }
-
boolean preLookup() {
return currentUsages.updateAndGet(current -> current < 0 ? current : current + 1) > 0;
}
@@ -197,14 +96,12 @@ int current() {
return currentUsages.get();
}
+ @Override
@Nullable
- private RESPONSE getResponse(
- String ipAddress,
- CheckedBiFunction, Exception> responseProvider
- ) {
+ public RESPONSE getResponse(String ipAddress, CheckedBiFunction responseProvider) {
return cache.putIfAbsent(ipAddress, databasePath.toString(), ip -> {
try {
- return responseProvider.apply(get(), ipAddress).orElse(null);
+ return responseProvider.apply(get(), ipAddress);
} catch (Exception e) {
throw ExceptionsHelper.convertToRuntime(e);
}
@@ -261,20 +158,14 @@ private static File pathToFile(Path databasePath) {
return databasePath.toFile();
}
- @FunctionalInterface
- private interface ResponseBuilder {
- RESPONSE build(RESPONSE response, String responseIp, Network network, List locales);
- }
-
- private Optional lookup(Reader reader, String ip, Class clazz, ResponseBuilder builder)
- throws IOException {
- InetAddress inetAddress = InetAddresses.forString(ip);
- DatabaseRecord record = reader.getRecord(inetAddress, clazz);
- RESPONSE result = record.getData();
- if (result == null) {
- return Optional.empty();
- } else {
- return Optional.of(builder.build(result, NetworkAddress.format(inetAddress), record.getNetwork(), List.of("en")));
+ long getBuildDateMillis() throws IOException {
+ if (buildDate.get() == null) {
+ synchronized (buildDate) {
+ if (buildDate.get() == null) {
+ buildDate.set(loader.get().getMetadata().getBuildDate().getTime());
+ }
+ }
}
+ return buildDate.get();
}
}
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java
index acc51c1bb0b53..fa46540e29f7a 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloader.java
@@ -444,7 +444,9 @@ private void scheduleNextRun(TimeValue time) {
}
private ProviderDownload downloaderFor(DatabaseConfiguration database) {
- return new MaxmindDownload(database.name(), database.maxmind());
+ assert database.provider() instanceof DatabaseConfiguration.Maxmind
+ : "Attempt to use maxmind downloader with a provider of type" + database.provider().getClass();
+ return new MaxmindDownload(database.name(), (DatabaseConfiguration.Maxmind) database.provider());
}
class MaxmindDownload implements ProviderDownload {
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java
index 335331ac0ab9d..d9c9c3aaf3266 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java
@@ -26,7 +26,7 @@
* cost of deserialization for each lookup (cached or not). This comes at slight expense of higher memory usage, but significant
* reduction of CPU usage.
*/
-final class GeoIpCache {
+public final class GeoIpCache {
/**
* Internal-only sentinel object for recording that a result from the geoip database was null (i.e. there was no result). By caching
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java
index ce160b060ae4c..e2b516bf5b943 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java
@@ -9,23 +9,6 @@
package org.elasticsearch.ingest.geoip;
-import com.maxmind.db.Network;
-import com.maxmind.geoip2.model.AnonymousIpResponse;
-import com.maxmind.geoip2.model.AsnResponse;
-import com.maxmind.geoip2.model.CityResponse;
-import com.maxmind.geoip2.model.ConnectionTypeResponse;
-import com.maxmind.geoip2.model.ConnectionTypeResponse.ConnectionType;
-import com.maxmind.geoip2.model.CountryResponse;
-import com.maxmind.geoip2.model.DomainResponse;
-import com.maxmind.geoip2.model.EnterpriseResponse;
-import com.maxmind.geoip2.model.IspResponse;
-import com.maxmind.geoip2.record.City;
-import com.maxmind.geoip2.record.Continent;
-import com.maxmind.geoip2.record.Country;
-import com.maxmind.geoip2.record.Location;
-import com.maxmind.geoip2.record.Subdivision;
-
-import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
@@ -34,10 +17,10 @@
import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.Processor;
import org.elasticsearch.ingest.geoip.Database.Property;
+import org.elasticsearch.ingest.geoip.IpDataLookupFactories.IpDataLookupFactory;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -60,7 +43,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
private final Supplier isValid;
private final String targetField;
private final CheckedSupplier supplier;
- private final Set properties;
+ private final IpDataLookup ipDataLookup;
private final boolean ignoreMissing;
private final boolean firstOnly;
private final String databaseFile;
@@ -73,7 +56,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
* @param supplier a supplier of a geo-IP database reader; ideally this is lazily-loaded once on first use
* @param isValid a supplier that determines if the available database files are up-to-date and license compliant
* @param targetField the target field
- * @param properties the properties; ideally this is lazily-loaded once on first use
+ * @param ipDataLookup a lookup capable of retrieving a result from an available geo-IP database reader
* @param ignoreMissing true if documents with a missing value for the field should be ignored
* @param firstOnly true if only first result should be returned in case of array
* @param databaseFile the name of the database file being queried; used only for tagging documents if the database is unavailable
@@ -85,7 +68,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
final CheckedSupplier supplier,
final Supplier isValid,
final String targetField,
- final Set properties,
+ final IpDataLookup ipDataLookup,
final boolean ignoreMissing,
final boolean firstOnly,
final String databaseFile
@@ -95,7 +78,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
this.isValid = isValid;
this.targetField = targetField;
this.supplier = supplier;
- this.properties = properties;
+ this.ipDataLookup = ipDataLookup;
this.ignoreMissing = ignoreMissing;
this.firstOnly = firstOnly;
this.databaseFile = databaseFile;
@@ -127,7 +110,7 @@ public IngestDocument execute(IngestDocument document) throws IOException {
}
if (ip instanceof String ipString) {
- Map data = getGeoData(ipDatabase, ipString);
+ Map data = ipDataLookup.getData(ipDatabase, ipString);
if (data.isEmpty() == false) {
document.setFieldValue(targetField, data);
}
@@ -138,7 +121,7 @@ public IngestDocument execute(IngestDocument document) throws IOException {
if (ipAddr instanceof String == false) {
throw new IllegalArgumentException("array in field [" + field + "] should only contain strings");
}
- Map data = getGeoData(ipDatabase, (String) ipAddr);
+ Map data = ipDataLookup.getData(ipDatabase, (String) ipAddr);
if (data.isEmpty()) {
dataList.add(null);
continue;
@@ -161,26 +144,6 @@ public IngestDocument execute(IngestDocument document) throws IOException {
return document;
}
- private Map getGeoData(IpDatabase ipDatabase, String ipAddress) throws IOException {
- final String databaseType = ipDatabase.getDatabaseType();
- final Database database;
- try {
- database = Database.getDatabase(databaseType, databaseFile);
- } catch (IllegalArgumentException e) {
- throw new ElasticsearchParseException(e.getMessage(), e);
- }
- return switch (database) {
- case City -> retrieveCityGeoData(ipDatabase, ipAddress);
- case Country -> retrieveCountryGeoData(ipDatabase, ipAddress);
- case Asn -> retrieveAsnGeoData(ipDatabase, ipAddress);
- case AnonymousIp -> retrieveAnonymousIpGeoData(ipDatabase, ipAddress);
- case ConnectionType -> retrieveConnectionTypeGeoData(ipDatabase, ipAddress);
- case Domain -> retrieveDomainGeoData(ipDatabase, ipAddress);
- case Enterprise -> retrieveEnterpriseGeoData(ipDatabase, ipAddress);
- case Isp -> retrieveIspGeoData(ipDatabase, ipAddress);
- };
- }
-
@Override
public String getType() {
return TYPE;
@@ -199,478 +162,7 @@ String getDatabaseType() throws IOException {
}
Set getProperties() {
- return properties;
- }
-
- private Map retrieveCityGeoData(IpDatabase ipDatabase, String ipAddress) {
- CityResponse response = ipDatabase.getCity(ipAddress);
- if (response == null) {
- return Map.of();
- }
- Country country = response.getCountry();
- City city = response.getCity();
- Location location = response.getLocation();
- Continent continent = response.getContinent();
- Subdivision subdivision = response.getMostSpecificSubdivision();
-
- Map geoData = new HashMap<>();
- for (Property property : this.properties) {
- switch (property) {
- case IP -> geoData.put("ip", response.getTraits().getIpAddress());
- case COUNTRY_ISO_CODE -> {
- String countryIsoCode = country.getIsoCode();
- if (countryIsoCode != null) {
- geoData.put("country_iso_code", countryIsoCode);
- }
- }
- case COUNTRY_NAME -> {
- String countryName = country.getName();
- if (countryName != null) {
- geoData.put("country_name", countryName);
- }
- }
- case CONTINENT_CODE -> {
- String continentCode = continent.getCode();
- if (continentCode != null) {
- geoData.put("continent_code", continentCode);
- }
- }
- case CONTINENT_NAME -> {
- String continentName = continent.getName();
- if (continentName != null) {
- geoData.put("continent_name", continentName);
- }
- }
- case REGION_ISO_CODE -> {
- // ISO 3166-2 code for country subdivisions.
- // See iso.org/iso-3166-country-codes.html
- String countryIso = country.getIsoCode();
- String subdivisionIso = subdivision.getIsoCode();
- if (countryIso != null && subdivisionIso != null) {
- String regionIsoCode = countryIso + "-" + subdivisionIso;
- geoData.put("region_iso_code", regionIsoCode);
- }
- }
- case REGION_NAME -> {
- String subdivisionName = subdivision.getName();
- if (subdivisionName != null) {
- geoData.put("region_name", subdivisionName);
- }
- }
- case CITY_NAME -> {
- String cityName = city.getName();
- if (cityName != null) {
- geoData.put("city_name", cityName);
- }
- }
- case TIMEZONE -> {
- String locationTimeZone = location.getTimeZone();
- if (locationTimeZone != null) {
- geoData.put("timezone", locationTimeZone);
- }
- }
- case LOCATION -> {
- Double latitude = location.getLatitude();
- Double longitude = location.getLongitude();
- if (latitude != null && longitude != null) {
- Map locationObject = new HashMap<>();
- locationObject.put("lat", latitude);
- locationObject.put("lon", longitude);
- geoData.put("location", locationObject);
- }
- }
- }
- }
- return geoData;
- }
-
- private Map retrieveCountryGeoData(IpDatabase ipDatabase, String ipAddress) {
- CountryResponse response = ipDatabase.getCountry(ipAddress);
- if (response == null) {
- return Map.of();
- }
- Country country = response.getCountry();
- Continent continent = response.getContinent();
-
- Map geoData = new HashMap<>();
- for (Property property : this.properties) {
- switch (property) {
- case IP -> geoData.put("ip", response.getTraits().getIpAddress());
- case COUNTRY_ISO_CODE -> {
- String countryIsoCode = country.getIsoCode();
- if (countryIsoCode != null) {
- geoData.put("country_iso_code", countryIsoCode);
- }
- }
- case COUNTRY_NAME -> {
- String countryName = country.getName();
- if (countryName != null) {
- geoData.put("country_name", countryName);
- }
- }
- case CONTINENT_CODE -> {
- String continentCode = continent.getCode();
- if (continentCode != null) {
- geoData.put("continent_code", continentCode);
- }
- }
- case CONTINENT_NAME -> {
- String continentName = continent.getName();
- if (continentName != null) {
- geoData.put("continent_name", continentName);
- }
- }
- }
- }
- return geoData;
- }
-
- private Map retrieveAsnGeoData(IpDatabase ipDatabase, String ipAddress) {
- AsnResponse response = ipDatabase.getAsn(ipAddress);
- if (response == null) {
- return Map.of();
- }
- Long asn = response.getAutonomousSystemNumber();
- String organizationName = response.getAutonomousSystemOrganization();
- Network network = response.getNetwork();
-
- Map geoData = new HashMap<>();
- for (Property property : this.properties) {
- switch (property) {
- case IP -> geoData.put("ip", response.getIpAddress());
- case ASN -> {
- if (asn != null) {
- geoData.put("asn", asn);
- }
- }
- case ORGANIZATION_NAME -> {
- if (organizationName != null) {
- geoData.put("organization_name", organizationName);
- }
- }
- case NETWORK -> {
- if (network != null) {
- geoData.put("network", network.toString());
- }
- }
- }
- }
- return geoData;
- }
-
- private Map retrieveAnonymousIpGeoData(IpDatabase ipDatabase, String ipAddress) {
- AnonymousIpResponse response = ipDatabase.getAnonymousIp(ipAddress);
- if (response == null) {
- return Map.of();
- }
-
- boolean isHostingProvider = response.isHostingProvider();
- boolean isTorExitNode = response.isTorExitNode();
- boolean isAnonymousVpn = response.isAnonymousVpn();
- boolean isAnonymous = response.isAnonymous();
- boolean isPublicProxy = response.isPublicProxy();
- boolean isResidentialProxy = response.isResidentialProxy();
-
- Map geoData = new HashMap<>();
- for (Property property : this.properties) {
- switch (property) {
- case IP -> geoData.put("ip", response.getIpAddress());
- case HOSTING_PROVIDER -> {
- geoData.put("hosting_provider", isHostingProvider);
- }
- case TOR_EXIT_NODE -> {
- geoData.put("tor_exit_node", isTorExitNode);
- }
- case ANONYMOUS_VPN -> {
- geoData.put("anonymous_vpn", isAnonymousVpn);
- }
- case ANONYMOUS -> {
- geoData.put("anonymous", isAnonymous);
- }
- case PUBLIC_PROXY -> {
- geoData.put("public_proxy", isPublicProxy);
- }
- case RESIDENTIAL_PROXY -> {
- geoData.put("residential_proxy", isResidentialProxy);
- }
- }
- }
- return geoData;
- }
-
- private Map retrieveConnectionTypeGeoData(IpDatabase ipDatabase, String ipAddress) {
- ConnectionTypeResponse response = ipDatabase.getConnectionType(ipAddress);
- if (response == null) {
- return Map.of();
- }
-
- ConnectionType connectionType = response.getConnectionType();
-
- Map geoData = new HashMap<>();
- for (Property property : this.properties) {
- switch (property) {
- case IP -> geoData.put("ip", response.getIpAddress());
- case CONNECTION_TYPE -> {
- if (connectionType != null) {
- geoData.put("connection_type", connectionType.toString());
- }
- }
- }
- }
- return geoData;
- }
-
- private Map retrieveDomainGeoData(IpDatabase ipDatabase, String ipAddress) {
- DomainResponse response = ipDatabase.getDomain(ipAddress);
- if (response == null) {
- return Map.of();
- }
-
- String domain = response.getDomain();
-
- Map geoData = new HashMap<>();
- for (Property property : this.properties) {
- switch (property) {
- case IP -> geoData.put("ip", response.getIpAddress());
- case DOMAIN -> {
- if (domain != null) {
- geoData.put("domain", domain);
- }
- }
- }
- }
- return geoData;
- }
-
- private Map retrieveEnterpriseGeoData(IpDatabase ipDatabase, String ipAddress) {
- EnterpriseResponse response = ipDatabase.getEnterprise(ipAddress);
- if (response == null) {
- return Map.of();
- }
-
- Country country = response.getCountry();
- City city = response.getCity();
- Location location = response.getLocation();
- Continent continent = response.getContinent();
- Subdivision subdivision = response.getMostSpecificSubdivision();
-
- Long asn = response.getTraits().getAutonomousSystemNumber();
- String organizationName = response.getTraits().getAutonomousSystemOrganization();
- Network network = response.getTraits().getNetwork();
-
- String isp = response.getTraits().getIsp();
- String ispOrganization = response.getTraits().getOrganization();
- String mobileCountryCode = response.getTraits().getMobileCountryCode();
- String mobileNetworkCode = response.getTraits().getMobileNetworkCode();
-
- boolean isHostingProvider = response.getTraits().isHostingProvider();
- boolean isTorExitNode = response.getTraits().isTorExitNode();
- boolean isAnonymousVpn = response.getTraits().isAnonymousVpn();
- boolean isAnonymous = response.getTraits().isAnonymous();
- boolean isPublicProxy = response.getTraits().isPublicProxy();
- boolean isResidentialProxy = response.getTraits().isResidentialProxy();
-
- String userType = response.getTraits().getUserType();
-
- String domain = response.getTraits().getDomain();
-
- ConnectionType connectionType = response.getTraits().getConnectionType();
-
- Map geoData = new HashMap<>();
- for (Property property : this.properties) {
- switch (property) {
- case IP -> geoData.put("ip", response.getTraits().getIpAddress());
- case COUNTRY_ISO_CODE -> {
- String countryIsoCode = country.getIsoCode();
- if (countryIsoCode != null) {
- geoData.put("country_iso_code", countryIsoCode);
- }
- }
- case COUNTRY_NAME -> {
- String countryName = country.getName();
- if (countryName != null) {
- geoData.put("country_name", countryName);
- }
- }
- case CONTINENT_CODE -> {
- String continentCode = continent.getCode();
- if (continentCode != null) {
- geoData.put("continent_code", continentCode);
- }
- }
- case CONTINENT_NAME -> {
- String continentName = continent.getName();
- if (continentName != null) {
- geoData.put("continent_name", continentName);
- }
- }
- case REGION_ISO_CODE -> {
- // ISO 3166-2 code for country subdivisions.
- // See iso.org/iso-3166-country-codes.html
- String countryIso = country.getIsoCode();
- String subdivisionIso = subdivision.getIsoCode();
- if (countryIso != null && subdivisionIso != null) {
- String regionIsoCode = countryIso + "-" + subdivisionIso;
- geoData.put("region_iso_code", regionIsoCode);
- }
- }
- case REGION_NAME -> {
- String subdivisionName = subdivision.getName();
- if (subdivisionName != null) {
- geoData.put("region_name", subdivisionName);
- }
- }
- case CITY_NAME -> {
- String cityName = city.getName();
- if (cityName != null) {
- geoData.put("city_name", cityName);
- }
- }
- case TIMEZONE -> {
- String locationTimeZone = location.getTimeZone();
- if (locationTimeZone != null) {
- geoData.put("timezone", locationTimeZone);
- }
- }
- case LOCATION -> {
- Double latitude = location.getLatitude();
- Double longitude = location.getLongitude();
- if (latitude != null && longitude != null) {
- Map locationObject = new HashMap<>();
- locationObject.put("lat", latitude);
- locationObject.put("lon", longitude);
- geoData.put("location", locationObject);
- }
- }
- case ASN -> {
- if (asn != null) {
- geoData.put("asn", asn);
- }
- }
- case ORGANIZATION_NAME -> {
- if (organizationName != null) {
- geoData.put("organization_name", organizationName);
- }
- }
- case NETWORK -> {
- if (network != null) {
- geoData.put("network", network.toString());
- }
- }
- case HOSTING_PROVIDER -> {
- geoData.put("hosting_provider", isHostingProvider);
- }
- case TOR_EXIT_NODE -> {
- geoData.put("tor_exit_node", isTorExitNode);
- }
- case ANONYMOUS_VPN -> {
- geoData.put("anonymous_vpn", isAnonymousVpn);
- }
- case ANONYMOUS -> {
- geoData.put("anonymous", isAnonymous);
- }
- case PUBLIC_PROXY -> {
- geoData.put("public_proxy", isPublicProxy);
- }
- case RESIDENTIAL_PROXY -> {
- geoData.put("residential_proxy", isResidentialProxy);
- }
- case DOMAIN -> {
- if (domain != null) {
- geoData.put("domain", domain);
- }
- }
- case ISP -> {
- if (isp != null) {
- geoData.put("isp", isp);
- }
- }
- case ISP_ORGANIZATION_NAME -> {
- if (ispOrganization != null) {
- geoData.put("isp_organization_name", ispOrganization);
- }
- }
- case MOBILE_COUNTRY_CODE -> {
- if (mobileCountryCode != null) {
- geoData.put("mobile_country_code", mobileCountryCode);
- }
- }
- case MOBILE_NETWORK_CODE -> {
- if (mobileNetworkCode != null) {
- geoData.put("mobile_network_code", mobileNetworkCode);
- }
- }
- case USER_TYPE -> {
- if (userType != null) {
- geoData.put("user_type", userType);
- }
- }
- case CONNECTION_TYPE -> {
- if (connectionType != null) {
- geoData.put("connection_type", connectionType.toString());
- }
- }
- }
- }
- return geoData;
- }
-
- private Map retrieveIspGeoData(IpDatabase ipDatabase, String ipAddress) {
- IspResponse response = ipDatabase.getIsp(ipAddress);
- if (response == null) {
- return Map.of();
- }
-
- String isp = response.getIsp();
- String ispOrganization = response.getOrganization();
- String mobileNetworkCode = response.getMobileNetworkCode();
- String mobileCountryCode = response.getMobileCountryCode();
- Long asn = response.getAutonomousSystemNumber();
- String organizationName = response.getAutonomousSystemOrganization();
- Network network = response.getNetwork();
-
- Map geoData = new HashMap<>();
- for (Property property : this.properties) {
- switch (property) {
- case IP -> geoData.put("ip", response.getIpAddress());
- case ASN -> {
- if (asn != null) {
- geoData.put("asn", asn);
- }
- }
- case ORGANIZATION_NAME -> {
- if (organizationName != null) {
- geoData.put("organization_name", organizationName);
- }
- }
- case NETWORK -> {
- if (network != null) {
- geoData.put("network", network.toString());
- }
- }
- case ISP -> {
- if (isp != null) {
- geoData.put("isp", isp);
- }
- }
- case ISP_ORGANIZATION_NAME -> {
- if (ispOrganization != null) {
- geoData.put("isp_organization_name", ispOrganization);
- }
- }
- case MOBILE_COUNTRY_CODE -> {
- if (mobileCountryCode != null) {
- geoData.put("mobile_country_code", mobileCountryCode);
- }
- }
- case MOBILE_NETWORK_CODE -> {
- if (mobileNetworkCode != null) {
- geoData.put("mobile_network_code", mobileNetworkCode);
- }
- }
- }
- }
- return geoData;
+ return ipDataLookup.getProperties();
}
/**
@@ -752,19 +244,20 @@ public Processor create(
databaseType = ipDatabase.getDatabaseType();
}
- final Database database;
+ final IpDataLookupFactory factory;
try {
- database = Database.getDatabase(databaseType, databaseFile);
+ factory = IpDataLookupFactories.get(databaseType, databaseFile);
} catch (IllegalArgumentException e) {
throw newConfigurationException(TYPE, processorTag, "database_file", e.getMessage());
}
- final Set properties;
+ final IpDataLookup ipDataLookup;
try {
- properties = database.parseProperties(propertyNames);
+ ipDataLookup = factory.create(propertyNames);
} catch (IllegalArgumentException e) {
throw newConfigurationException(TYPE, processorTag, "properties", e.getMessage());
}
+
return new GeoIpProcessor(
processorTag,
description,
@@ -772,7 +265,7 @@ public Processor create(
new DatabaseVerifyingSupplier(ipDatabaseProvider, databaseFile, databaseType),
() -> ipDatabaseProvider.isValid(databaseFile),
targetField,
- properties,
+ ipDataLookup,
ignoreMissing,
firstOnly,
databaseFile
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java
index 721493ac5ee5d..09ed10568ce8d 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java
@@ -41,7 +41,7 @@
import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg;
-class GeoIpTaskState implements PersistentTaskState, VersionedNamedWriteable {
+public class GeoIpTaskState implements PersistentTaskState, VersionedNamedWriteable {
private static boolean includeSha256(TransportVersion version) {
return version.isPatchFrom(TransportVersions.ENTERPRISE_GEOIP_DOWNLOADER_BACKPORT_8_15)
@@ -150,7 +150,7 @@ public void writeTo(StreamOutput out) throws IOException {
});
}
- record Metadata(long lastUpdate, int firstChunk, int lastChunk, String md5, long lastCheck, @Nullable String sha256)
+ public record Metadata(long lastUpdate, int firstChunk, int lastChunk, String md5, long lastCheck, @Nullable String sha256)
implements
ToXContentObject {
@@ -198,7 +198,7 @@ public static Metadata fromXContent(XContentParser parser) {
}
}
- Metadata {
+ public Metadata {
Objects.requireNonNull(md5);
}
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java
index c7d3db5f5b572..b6e73f3f33f7c 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadata.java
@@ -15,10 +15,9 @@
import org.elasticsearch.cluster.DiffableUtils;
import org.elasticsearch.cluster.NamedDiff;
import org.elasticsearch.cluster.metadata.Metadata;
-import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
+import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.ingest.geoip.direct.DatabaseConfigurationMetadata;
import org.elasticsearch.xcontent.ConstructingObjectParser;
import org.elasticsearch.xcontent.ParseField;
@@ -91,8 +90,8 @@ public static IngestGeoIpMetadata fromXContent(XContentParser parser) throws IOE
}
@Override
- public Iterator extends ToXContent> toXContentChunked(ToXContent.Params ignored) {
- return Iterators.concat(ChunkedToXContentHelper.xContentValuesMap(DATABASES_FIELD.getPreferredName(), databases));
+ public Iterator extends ToXContent> toXContentChunked(ToXContent.Params params) {
+ return ChunkedToXContent.builder(params).xContentObjectFields(DATABASES_FIELD.getPreferredName(), databases);
}
@Override
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java
index 9551ca29294ab..f5ae869841b82 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java
@@ -31,6 +31,7 @@
import org.elasticsearch.ingest.EnterpriseGeoIpTask.EnterpriseGeoIpTaskParams;
import org.elasticsearch.ingest.IngestService;
import org.elasticsearch.ingest.Processor;
+import org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration;
import org.elasticsearch.ingest.geoip.direct.DeleteDatabaseConfigurationAction;
import org.elasticsearch.ingest.geoip.direct.GetDatabaseConfigurationAction;
import org.elasticsearch.ingest.geoip.direct.PutDatabaseConfigurationAction;
@@ -232,7 +233,22 @@ public List getNamedWriteables() {
new NamedWriteableRegistry.Entry(PersistentTaskParams.class, GEOIP_DOWNLOADER, GeoIpTaskParams::new),
new NamedWriteableRegistry.Entry(PersistentTaskState.class, ENTERPRISE_GEOIP_DOWNLOADER, EnterpriseGeoIpTaskState::new),
new NamedWriteableRegistry.Entry(PersistentTaskParams.class, ENTERPRISE_GEOIP_DOWNLOADER, EnterpriseGeoIpTaskParams::new),
- new NamedWriteableRegistry.Entry(Task.Status.class, GEOIP_DOWNLOADER, GeoIpDownloaderStats::new)
+ new NamedWriteableRegistry.Entry(Task.Status.class, GEOIP_DOWNLOADER, GeoIpDownloaderStats::new),
+ new NamedWriteableRegistry.Entry(
+ DatabaseConfiguration.Provider.class,
+ DatabaseConfiguration.Maxmind.NAME,
+ DatabaseConfiguration.Maxmind::new
+ ),
+ new NamedWriteableRegistry.Entry(
+ DatabaseConfiguration.Provider.class,
+ DatabaseConfiguration.Local.NAME,
+ DatabaseConfiguration.Local::new
+ ),
+ new NamedWriteableRegistry.Entry(
+ DatabaseConfiguration.Provider.class,
+ DatabaseConfiguration.Web.NAME,
+ DatabaseConfiguration.Web::new
+ )
);
}
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookup.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookup.java
new file mode 100644
index 0000000000000..7442c8e930886
--- /dev/null
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookup.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.ingest.geoip;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.Set;
+
+interface IpDataLookup {
+ /**
+ * Gets data from the provided {@code ipDatabase} for the provided {@code ip}
+ *
+ * @param ipDatabase the database from which to lookup a result
+ * @param ip the ip address
+ * @return a map of data corresponding to the configured properties
+ * @throws IOException if the implementation encounters any problem while retrieving the response
+ */
+ Map getData(IpDatabase ipDatabase, String ip) throws IOException;
+
+ /**
+ * @return the set of properties this lookup will provide
+ */
+ Set getProperties();
+}
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookupFactories.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookupFactories.java
new file mode 100644
index 0000000000000..990788978a0ca
--- /dev/null
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookupFactories.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.ingest.geoip;
+
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.core.Nullable;
+
+import java.util.List;
+import java.util.Set;
+import java.util.function.Function;
+
+final class IpDataLookupFactories {
+
+ private IpDataLookupFactories() {
+ // utility class
+ }
+
+ interface IpDataLookupFactory {
+ IpDataLookup create(List properties);
+ }
+
+ private static final String CITY_DB_SUFFIX = "-City";
+ private static final String COUNTRY_DB_SUFFIX = "-Country";
+ private static final String ASN_DB_SUFFIX = "-ASN";
+ private static final String ANONYMOUS_IP_DB_SUFFIX = "-Anonymous-IP";
+ private static final String CONNECTION_TYPE_DB_SUFFIX = "-Connection-Type";
+ private static final String DOMAIN_DB_SUFFIX = "-Domain";
+ private static final String ENTERPRISE_DB_SUFFIX = "-Enterprise";
+ private static final String ISP_DB_SUFFIX = "-ISP";
+
+ @Nullable
+ private static Database getMaxmindDatabase(final String databaseType) {
+ if (databaseType.endsWith(CITY_DB_SUFFIX)) {
+ return Database.City;
+ } else if (databaseType.endsWith(COUNTRY_DB_SUFFIX)) {
+ return Database.Country;
+ } else if (databaseType.endsWith(ASN_DB_SUFFIX)) {
+ return Database.Asn;
+ } else if (databaseType.endsWith(ANONYMOUS_IP_DB_SUFFIX)) {
+ return Database.AnonymousIp;
+ } else if (databaseType.endsWith(CONNECTION_TYPE_DB_SUFFIX)) {
+ return Database.ConnectionType;
+ } else if (databaseType.endsWith(DOMAIN_DB_SUFFIX)) {
+ return Database.Domain;
+ } else if (databaseType.endsWith(ENTERPRISE_DB_SUFFIX)) {
+ return Database.Enterprise;
+ } else if (databaseType.endsWith(ISP_DB_SUFFIX)) {
+ return Database.Isp;
+ } else {
+ return null; // no match was found
+ }
+ }
+
+ /**
+ * Parses the passed-in databaseType and return the Database instance that is
+ * associated with that databaseType.
+ *
+ * @param databaseType the database type String from the metadata of the database file
+ * @return the Database instance that is associated with the databaseType
+ */
+ @Nullable
+ static Database getDatabase(final String databaseType) {
+ Database database = null;
+
+ if (Strings.hasText(databaseType)) {
+ database = getMaxmindDatabase(databaseType);
+ }
+
+ return database;
+ }
+
+ static Function, IpDataLookup> getMaxmindLookup(final Database database) {
+ return switch (database) {
+ case City -> MaxmindIpDataLookups.City::new;
+ case Country -> MaxmindIpDataLookups.Country::new;
+ case Asn -> MaxmindIpDataLookups.Asn::new;
+ case AnonymousIp -> MaxmindIpDataLookups.AnonymousIp::new;
+ case ConnectionType -> MaxmindIpDataLookups.ConnectionType::new;
+ case Domain -> MaxmindIpDataLookups.Domain::new;
+ case Enterprise -> MaxmindIpDataLookups.Enterprise::new;
+ case Isp -> MaxmindIpDataLookups.Isp::new;
+ };
+ }
+
+ static IpDataLookupFactory get(final String databaseType, final String databaseFile) {
+ final Database database = getDatabase(databaseType);
+ if (database == null) {
+ throw new IllegalArgumentException("Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]");
+ }
+
+ final Function, IpDataLookup> factoryMethod = getMaxmindLookup(database);
+
+ // note: this can't presently be null, but keep this check -- it will be useful in the near future
+ if (factoryMethod == null) {
+ throw new IllegalArgumentException("Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]");
+ }
+
+ return (properties) -> factoryMethod.apply(database.parseProperties(properties));
+ }
+}
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDatabase.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDatabase.java
index f416259a87d27..db1ffc1c682b8 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDatabase.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDatabase.java
@@ -9,15 +9,9 @@
package org.elasticsearch.ingest.geoip;
-import com.maxmind.geoip2.model.AnonymousIpResponse;
-import com.maxmind.geoip2.model.AsnResponse;
-import com.maxmind.geoip2.model.CityResponse;
-import com.maxmind.geoip2.model.ConnectionTypeResponse;
-import com.maxmind.geoip2.model.CountryResponse;
-import com.maxmind.geoip2.model.DomainResponse;
-import com.maxmind.geoip2.model.EnterpriseResponse;
-import com.maxmind.geoip2.model.IspResponse;
+import com.maxmind.db.Reader;
+import org.elasticsearch.common.CheckedBiFunction;
import org.elasticsearch.core.Nullable;
import java.io.IOException;
@@ -34,44 +28,15 @@ public interface IpDatabase extends AutoCloseable {
String getDatabaseType() throws IOException;
/**
- * @param ipAddress the IP address to look up
- * @return a response containing the city data for the given address if it exists, or null
if it could not be found
- * @throws UnsupportedOperationException may be thrown if the implementation does not support retrieving city data
- */
- @Nullable
- CityResponse getCity(String ipAddress);
-
- /**
- * @param ipAddress the IP address to look up
- * @return a response containing the country data for the given address if it exists, or null
if it could not be found
- * @throws UnsupportedOperationException may be thrown if the implementation does not support retrieving country data
- */
- @Nullable
- CountryResponse getCountry(String ipAddress);
-
- /**
- * @param ipAddress the IP address to look up
- * @return a response containing the Autonomous System Number for the given address if it exists, or null
if it could not
- * be found
- * @throws UnsupportedOperationException may be thrown if the implementation does not support retrieving ASN data
+ * Returns a response from this database's reader for the given IP address.
+ *
+ * @param ipAddress the address to lookup
+ * @param responseProvider a method for extracting a response from a {@link Reader}, usually this will be a method reference
+ * @return a possibly-null response
+ * @param the type of response that will be returned
*/
@Nullable
- AsnResponse getAsn(String ipAddress);
-
- @Nullable
- AnonymousIpResponse getAnonymousIp(String ipAddress);
-
- @Nullable
- ConnectionTypeResponse getConnectionType(String ipAddress);
-
- @Nullable
- DomainResponse getDomain(String ipAddress);
-
- @Nullable
- EnterpriseResponse getEnterprise(String ipAddress);
-
- @Nullable
- IspResponse getIsp(String ipAddress);
+ RESPONSE getResponse(String ipAddress, CheckedBiFunction responseProvider);
/**
* Releases the current database object. Called after processing a single document. Databases should be closed or returned to a
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java
new file mode 100644
index 0000000000000..5b22b3f4005a9
--- /dev/null
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java
@@ -0,0 +1,606 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.ingest.geoip;
+
+import com.maxmind.db.DatabaseRecord;
+import com.maxmind.db.Network;
+import com.maxmind.db.Reader;
+import com.maxmind.geoip2.model.AbstractResponse;
+import com.maxmind.geoip2.model.AnonymousIpResponse;
+import com.maxmind.geoip2.model.AsnResponse;
+import com.maxmind.geoip2.model.CityResponse;
+import com.maxmind.geoip2.model.ConnectionTypeResponse;
+import com.maxmind.geoip2.model.CountryResponse;
+import com.maxmind.geoip2.model.DomainResponse;
+import com.maxmind.geoip2.model.EnterpriseResponse;
+import com.maxmind.geoip2.model.IspResponse;
+import com.maxmind.geoip2.record.Continent;
+import com.maxmind.geoip2.record.Location;
+import com.maxmind.geoip2.record.Subdivision;
+
+import org.elasticsearch.common.network.InetAddresses;
+import org.elasticsearch.common.network.NetworkAddress;
+import org.elasticsearch.core.Nullable;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * A collection of {@link IpDataLookup} implementations for MaxMind databases
+ */
+final class MaxmindIpDataLookups {
+
+ private MaxmindIpDataLookups() {
+ // utility class
+ }
+
+ static class AnonymousIp extends AbstractBase {
+ AnonymousIp(final Set properties) {
+ super(
+ properties,
+ AnonymousIpResponse.class,
+ (response, ipAddress, network, locales) -> new AnonymousIpResponse(response, ipAddress, network)
+ );
+ }
+
+ @Override
+ protected Map transform(final AnonymousIpResponse response) {
+ boolean isHostingProvider = response.isHostingProvider();
+ boolean isTorExitNode = response.isTorExitNode();
+ boolean isAnonymousVpn = response.isAnonymousVpn();
+ boolean isAnonymous = response.isAnonymous();
+ boolean isPublicProxy = response.isPublicProxy();
+ boolean isResidentialProxy = response.isResidentialProxy();
+
+ Map data = new HashMap<>();
+ for (Database.Property property : this.properties) {
+ switch (property) {
+ case IP -> data.put("ip", response.getIpAddress());
+ case HOSTING_PROVIDER -> {
+ data.put("hosting_provider", isHostingProvider);
+ }
+ case TOR_EXIT_NODE -> {
+ data.put("tor_exit_node", isTorExitNode);
+ }
+ case ANONYMOUS_VPN -> {
+ data.put("anonymous_vpn", isAnonymousVpn);
+ }
+ case ANONYMOUS -> {
+ data.put("anonymous", isAnonymous);
+ }
+ case PUBLIC_PROXY -> {
+ data.put("public_proxy", isPublicProxy);
+ }
+ case RESIDENTIAL_PROXY -> {
+ data.put("residential_proxy", isResidentialProxy);
+ }
+ }
+ }
+ return data;
+ }
+ }
+
+ static class Asn extends AbstractBase {
+ Asn(Set properties) {
+ super(properties, AsnResponse.class, (response, ipAddress, network, locales) -> new AsnResponse(response, ipAddress, network));
+ }
+
+ @Override
+ protected Map transform(final AsnResponse response) {
+ Long asn = response.getAutonomousSystemNumber();
+ String organizationName = response.getAutonomousSystemOrganization();
+ Network network = response.getNetwork();
+
+ Map data = new HashMap<>();
+ for (Database.Property property : this.properties) {
+ switch (property) {
+ case IP -> data.put("ip", response.getIpAddress());
+ case ASN -> {
+ if (asn != null) {
+ data.put("asn", asn);
+ }
+ }
+ case ORGANIZATION_NAME -> {
+ if (organizationName != null) {
+ data.put("organization_name", organizationName);
+ }
+ }
+ case NETWORK -> {
+ if (network != null) {
+ data.put("network", network.toString());
+ }
+ }
+ }
+ }
+ return data;
+ }
+ }
+
+ static class City extends AbstractBase {
+ City(final Set properties) {
+ super(properties, CityResponse.class, CityResponse::new);
+ }
+
+ @Override
+ protected Map transform(final CityResponse response) {
+ com.maxmind.geoip2.record.Country country = response.getCountry();
+ com.maxmind.geoip2.record.City city = response.getCity();
+ Location location = response.getLocation();
+ Continent continent = response.getContinent();
+ Subdivision subdivision = response.getMostSpecificSubdivision();
+
+ Map data = new HashMap<>();
+ for (Database.Property property : this.properties) {
+ switch (property) {
+ case IP -> data.put("ip", response.getTraits().getIpAddress());
+ case COUNTRY_ISO_CODE -> {
+ String countryIsoCode = country.getIsoCode();
+ if (countryIsoCode != null) {
+ data.put("country_iso_code", countryIsoCode);
+ }
+ }
+ case COUNTRY_NAME -> {
+ String countryName = country.getName();
+ if (countryName != null) {
+ data.put("country_name", countryName);
+ }
+ }
+ case CONTINENT_CODE -> {
+ String continentCode = continent.getCode();
+ if (continentCode != null) {
+ data.put("continent_code", continentCode);
+ }
+ }
+ case CONTINENT_NAME -> {
+ String continentName = continent.getName();
+ if (continentName != null) {
+ data.put("continent_name", continentName);
+ }
+ }
+ case REGION_ISO_CODE -> {
+ // ISO 3166-2 code for country subdivisions.
+ // See iso.org/iso-3166-country-codes.html
+ String countryIso = country.getIsoCode();
+ String subdivisionIso = subdivision.getIsoCode();
+ if (countryIso != null && subdivisionIso != null) {
+ String regionIsoCode = countryIso + "-" + subdivisionIso;
+ data.put("region_iso_code", regionIsoCode);
+ }
+ }
+ case REGION_NAME -> {
+ String subdivisionName = subdivision.getName();
+ if (subdivisionName != null) {
+ data.put("region_name", subdivisionName);
+ }
+ }
+ case CITY_NAME -> {
+ String cityName = city.getName();
+ if (cityName != null) {
+ data.put("city_name", cityName);
+ }
+ }
+ case TIMEZONE -> {
+ String locationTimeZone = location.getTimeZone();
+ if (locationTimeZone != null) {
+ data.put("timezone", locationTimeZone);
+ }
+ }
+ case LOCATION -> {
+ Double latitude = location.getLatitude();
+ Double longitude = location.getLongitude();
+ if (latitude != null && longitude != null) {
+ Map locationObject = new HashMap<>();
+ locationObject.put("lat", latitude);
+ locationObject.put("lon", longitude);
+ data.put("location", locationObject);
+ }
+ }
+ }
+ }
+ return data;
+ }
+ }
+
+ static class ConnectionType extends AbstractBase {
+ ConnectionType(final Set properties) {
+ super(
+ properties,
+ ConnectionTypeResponse.class,
+ (response, ipAddress, network, locales) -> new ConnectionTypeResponse(response, ipAddress, network)
+ );
+ }
+
+ @Override
+ protected Map transform(final ConnectionTypeResponse response) {
+ ConnectionTypeResponse.ConnectionType connectionType = response.getConnectionType();
+
+ Map data = new HashMap<>();
+ for (Database.Property property : this.properties) {
+ switch (property) {
+ case IP -> data.put("ip", response.getIpAddress());
+ case CONNECTION_TYPE -> {
+ if (connectionType != null) {
+ data.put("connection_type", connectionType.toString());
+ }
+ }
+ }
+ }
+ return data;
+ }
+ }
+
+ static class Country extends AbstractBase {
+ Country(final Set properties) {
+ super(properties, CountryResponse.class, CountryResponse::new);
+ }
+
+ @Override
+ protected Map transform(final CountryResponse response) {
+ com.maxmind.geoip2.record.Country country = response.getCountry();
+ Continent continent = response.getContinent();
+
+ Map data = new HashMap<>();
+ for (Database.Property property : this.properties) {
+ switch (property) {
+ case IP -> data.put("ip", response.getTraits().getIpAddress());
+ case COUNTRY_ISO_CODE -> {
+ String countryIsoCode = country.getIsoCode();
+ if (countryIsoCode != null) {
+ data.put("country_iso_code", countryIsoCode);
+ }
+ }
+ case COUNTRY_NAME -> {
+ String countryName = country.getName();
+ if (countryName != null) {
+ data.put("country_name", countryName);
+ }
+ }
+ case CONTINENT_CODE -> {
+ String continentCode = continent.getCode();
+ if (continentCode != null) {
+ data.put("continent_code", continentCode);
+ }
+ }
+ case CONTINENT_NAME -> {
+ String continentName = continent.getName();
+ if (continentName != null) {
+ data.put("continent_name", continentName);
+ }
+ }
+ }
+ }
+ return data;
+ }
+ }
+
+ static class Domain extends AbstractBase {
+ Domain(final Set properties) {
+ super(
+ properties,
+ DomainResponse.class,
+ (response, ipAddress, network, locales) -> new DomainResponse(response, ipAddress, network)
+ );
+ }
+
+ @Override
+ protected Map transform(final DomainResponse response) {
+ String domain = response.getDomain();
+
+ Map data = new HashMap<>();
+ for (Database.Property property : this.properties) {
+ switch (property) {
+ case IP -> data.put("ip", response.getIpAddress());
+ case DOMAIN -> {
+ if (domain != null) {
+ data.put("domain", domain);
+ }
+ }
+ }
+ }
+ return data;
+ }
+ }
+
+ static class Enterprise extends AbstractBase {
+ Enterprise(final Set properties) {
+ super(properties, EnterpriseResponse.class, EnterpriseResponse::new);
+ }
+
+ @Override
+ protected Map transform(final EnterpriseResponse response) {
+ com.maxmind.geoip2.record.Country country = response.getCountry();
+ com.maxmind.geoip2.record.City city = response.getCity();
+ Location location = response.getLocation();
+ Continent continent = response.getContinent();
+ Subdivision subdivision = response.getMostSpecificSubdivision();
+
+ Long asn = response.getTraits().getAutonomousSystemNumber();
+ String organizationName = response.getTraits().getAutonomousSystemOrganization();
+ Network network = response.getTraits().getNetwork();
+
+ String isp = response.getTraits().getIsp();
+ String ispOrganization = response.getTraits().getOrganization();
+ String mobileCountryCode = response.getTraits().getMobileCountryCode();
+ String mobileNetworkCode = response.getTraits().getMobileNetworkCode();
+
+ boolean isHostingProvider = response.getTraits().isHostingProvider();
+ boolean isTorExitNode = response.getTraits().isTorExitNode();
+ boolean isAnonymousVpn = response.getTraits().isAnonymousVpn();
+ boolean isAnonymous = response.getTraits().isAnonymous();
+ boolean isPublicProxy = response.getTraits().isPublicProxy();
+ boolean isResidentialProxy = response.getTraits().isResidentialProxy();
+
+ String userType = response.getTraits().getUserType();
+
+ String domain = response.getTraits().getDomain();
+
+ ConnectionTypeResponse.ConnectionType connectionType = response.getTraits().getConnectionType();
+
+ Map data = new HashMap<>();
+ for (Database.Property property : this.properties) {
+ switch (property) {
+ case IP -> data.put("ip", response.getTraits().getIpAddress());
+ case COUNTRY_ISO_CODE -> {
+ String countryIsoCode = country.getIsoCode();
+ if (countryIsoCode != null) {
+ data.put("country_iso_code", countryIsoCode);
+ }
+ }
+ case COUNTRY_NAME -> {
+ String countryName = country.getName();
+ if (countryName != null) {
+ data.put("country_name", countryName);
+ }
+ }
+ case CONTINENT_CODE -> {
+ String continentCode = continent.getCode();
+ if (continentCode != null) {
+ data.put("continent_code", continentCode);
+ }
+ }
+ case CONTINENT_NAME -> {
+ String continentName = continent.getName();
+ if (continentName != null) {
+ data.put("continent_name", continentName);
+ }
+ }
+ case REGION_ISO_CODE -> {
+ // ISO 3166-2 code for country subdivisions.
+ // See iso.org/iso-3166-country-codes.html
+ String countryIso = country.getIsoCode();
+ String subdivisionIso = subdivision.getIsoCode();
+ if (countryIso != null && subdivisionIso != null) {
+ String regionIsoCode = countryIso + "-" + subdivisionIso;
+ data.put("region_iso_code", regionIsoCode);
+ }
+ }
+ case REGION_NAME -> {
+ String subdivisionName = subdivision.getName();
+ if (subdivisionName != null) {
+ data.put("region_name", subdivisionName);
+ }
+ }
+ case CITY_NAME -> {
+ String cityName = city.getName();
+ if (cityName != null) {
+ data.put("city_name", cityName);
+ }
+ }
+ case TIMEZONE -> {
+ String locationTimeZone = location.getTimeZone();
+ if (locationTimeZone != null) {
+ data.put("timezone", locationTimeZone);
+ }
+ }
+ case LOCATION -> {
+ Double latitude = location.getLatitude();
+ Double longitude = location.getLongitude();
+ if (latitude != null && longitude != null) {
+ Map locationObject = new HashMap<>();
+ locationObject.put("lat", latitude);
+ locationObject.put("lon", longitude);
+ data.put("location", locationObject);
+ }
+ }
+ case ASN -> {
+ if (asn != null) {
+ data.put("asn", asn);
+ }
+ }
+ case ORGANIZATION_NAME -> {
+ if (organizationName != null) {
+ data.put("organization_name", organizationName);
+ }
+ }
+ case NETWORK -> {
+ if (network != null) {
+ data.put("network", network.toString());
+ }
+ }
+ case HOSTING_PROVIDER -> {
+ data.put("hosting_provider", isHostingProvider);
+ }
+ case TOR_EXIT_NODE -> {
+ data.put("tor_exit_node", isTorExitNode);
+ }
+ case ANONYMOUS_VPN -> {
+ data.put("anonymous_vpn", isAnonymousVpn);
+ }
+ case ANONYMOUS -> {
+ data.put("anonymous", isAnonymous);
+ }
+ case PUBLIC_PROXY -> {
+ data.put("public_proxy", isPublicProxy);
+ }
+ case RESIDENTIAL_PROXY -> {
+ data.put("residential_proxy", isResidentialProxy);
+ }
+ case DOMAIN -> {
+ if (domain != null) {
+ data.put("domain", domain);
+ }
+ }
+ case ISP -> {
+ if (isp != null) {
+ data.put("isp", isp);
+ }
+ }
+ case ISP_ORGANIZATION_NAME -> {
+ if (ispOrganization != null) {
+ data.put("isp_organization_name", ispOrganization);
+ }
+ }
+ case MOBILE_COUNTRY_CODE -> {
+ if (mobileCountryCode != null) {
+ data.put("mobile_country_code", mobileCountryCode);
+ }
+ }
+ case MOBILE_NETWORK_CODE -> {
+ if (mobileNetworkCode != null) {
+ data.put("mobile_network_code", mobileNetworkCode);
+ }
+ }
+ case USER_TYPE -> {
+ if (userType != null) {
+ data.put("user_type", userType);
+ }
+ }
+ case CONNECTION_TYPE -> {
+ if (connectionType != null) {
+ data.put("connection_type", connectionType.toString());
+ }
+ }
+ }
+ }
+ return data;
+ }
+ }
+
+ static class Isp extends AbstractBase {
+ Isp(final Set properties) {
+ super(properties, IspResponse.class, (response, ipAddress, network, locales) -> new IspResponse(response, ipAddress, network));
+ }
+
+ @Override
+ protected Map transform(final IspResponse response) {
+ String isp = response.getIsp();
+ String ispOrganization = response.getOrganization();
+ String mobileNetworkCode = response.getMobileNetworkCode();
+ String mobileCountryCode = response.getMobileCountryCode();
+ Long asn = response.getAutonomousSystemNumber();
+ String organizationName = response.getAutonomousSystemOrganization();
+ Network network = response.getNetwork();
+
+ Map data = new HashMap<>();
+ for (Database.Property property : this.properties) {
+ switch (property) {
+ case IP -> data.put("ip", response.getIpAddress());
+ case ASN -> {
+ if (asn != null) {
+ data.put("asn", asn);
+ }
+ }
+ case ORGANIZATION_NAME -> {
+ if (organizationName != null) {
+ data.put("organization_name", organizationName);
+ }
+ }
+ case NETWORK -> {
+ if (network != null) {
+ data.put("network", network.toString());
+ }
+ }
+ case ISP -> {
+ if (isp != null) {
+ data.put("isp", isp);
+ }
+ }
+ case ISP_ORGANIZATION_NAME -> {
+ if (ispOrganization != null) {
+ data.put("isp_organization_name", ispOrganization);
+ }
+ }
+ case MOBILE_COUNTRY_CODE -> {
+ if (mobileCountryCode != null) {
+ data.put("mobile_country_code", mobileCountryCode);
+ }
+ }
+ case MOBILE_NETWORK_CODE -> {
+ if (mobileNetworkCode != null) {
+ data.put("mobile_network_code", mobileNetworkCode);
+ }
+ }
+ }
+ }
+ return data;
+ }
+ }
+
+ /**
+ * As an internal detail, the {@code com.maxmind.geoip2.model } classes that are populated by
+ * {@link Reader#getRecord(InetAddress, Class)} are kinda half-populated and need to go through a second round of construction
+ * with context from the querying caller. This method gives us a place do that additional binding. Cleverly, the signature
+ * here matches the constructor for many of these model classes exactly, so an appropriate implementation can 'just' be a method
+ * reference in some cases (in other cases it needs to be a lambda).
+ */
+ @FunctionalInterface
+ private interface ResponseBuilder {
+ RESPONSE build(RESPONSE resp, String address, Network network, List locales);
+ }
+
+ /**
+ * The {@link MaxmindIpDataLookups.AbstractBase} is an abstract base implementation of {@link IpDataLookup} that
+ * provides common functionality for getting a specific kind of {@link AbstractResponse} from a {@link IpDatabase}.
+ *
+ * @param the intermediate type of {@link AbstractResponse}
+ */
+ private abstract static class AbstractBase implements IpDataLookup {
+
+ protected final Set properties;
+ protected final Class clazz;
+ protected final ResponseBuilder builder;
+
+ AbstractBase(final Set properties, final Class clazz, final ResponseBuilder builder) {
+ this.properties = Set.copyOf(properties);
+ this.clazz = clazz;
+ this.builder = builder;
+ }
+
+ @Override
+ public Set getProperties() {
+ return this.properties;
+ }
+
+ @Override
+ public final Map getData(final IpDatabase ipDatabase, final String ipAddress) {
+ final RESPONSE response = ipDatabase.getResponse(ipAddress, this::lookup);
+ return (response == null) ? Map.of() : transform(response);
+ }
+
+ @Nullable
+ private RESPONSE lookup(final Reader reader, final String ipAddress) throws IOException {
+ final InetAddress ip = InetAddresses.forString(ipAddress);
+ final DatabaseRecord record = reader.getRecord(ip, clazz);
+ final RESPONSE data = record.getData();
+ return (data == null) ? null : builder.build(data, NetworkAddress.format(ip), record.getNetwork(), List.of("en"));
+ }
+
+ /**
+ * Extract the configured properties from the retrieved response
+ * @param response the non-null response that was retrieved
+ * @return a mapping of properties for the ip from the response
+ */
+ protected abstract Map transform(RESPONSE response);
+ }
+}
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java
index b8b48e0f738a5..3399b71879e26 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfiguration.java
@@ -9,13 +9,16 @@
package org.elasticsearch.ingest.geoip.direct;
+import org.elasticsearch.TransportVersions;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.cluster.metadata.MetadataCreateIndexService;
import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.xcontent.ConstructingObjectParser;
+import org.elasticsearch.xcontent.ObjectParser;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
@@ -34,19 +37,19 @@
* That is, it has an id e.g. "my_db_config_1" and it says "download the file named XXXX from SomeCompany, and here's the
* magic token to use to do that."
*/
-public record DatabaseConfiguration(String id, String name, Maxmind maxmind) implements Writeable, ToXContentObject {
+public record DatabaseConfiguration(String id, String name, Provider provider) implements Writeable, ToXContentObject {
// id is a user selected signifier like 'my_domain_db'
// name is the name of a file that can be downloaded (like 'GeoIP2-Domain')
- // a configuration will have a 'type' like "maxmind", and that might have some more details,
+ // a configuration will have a 'provider' like "maxmind", and that might have some more details,
// for now, though the important thing is that the json has to have it even though we don't model it meaningfully in this class
public DatabaseConfiguration {
// these are invariants, not actual validation
Objects.requireNonNull(id);
Objects.requireNonNull(name);
- Objects.requireNonNull(maxmind);
+ Objects.requireNonNull(provider);
}
/**
@@ -76,25 +79,49 @@ public record DatabaseConfiguration(String id, String name, Maxmind maxmind) imp
);
private static final ParseField NAME = new ParseField("name");
- private static final ParseField MAXMIND = new ParseField("maxmind");
+ private static final ParseField MAXMIND = new ParseField(Maxmind.NAME);
+ private static final ParseField WEB = new ParseField(Web.NAME);
+ private static final ParseField LOCAL = new ParseField(Local.NAME);
private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
"database",
false,
(a, id) -> {
String name = (String) a[0];
- Maxmind maxmind = (Maxmind) a[1];
- return new DatabaseConfiguration(id, name, maxmind);
+ Provider provider;
+ if (a[1] != null) {
+ provider = (Maxmind) a[1];
+ } else if (a[2] != null) {
+ provider = (Web) a[2];
+ } else {
+ provider = (Local) a[3];
+ }
+ return new DatabaseConfiguration(id, name, provider);
}
);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME);
- PARSER.declareObject(ConstructingObjectParser.constructorArg(), (parser, id) -> Maxmind.PARSER.apply(parser, null), MAXMIND);
+ PARSER.declareObject(
+ ConstructingObjectParser.optionalConstructorArg(),
+ (parser, id) -> Maxmind.PARSER.apply(parser, null),
+ MAXMIND
+ );
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (parser, id) -> Web.PARSER.apply(parser, null), WEB);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (parser, id) -> Local.PARSER.apply(parser, null), LOCAL);
}
public DatabaseConfiguration(StreamInput in) throws IOException {
- this(in.readString(), in.readString(), new Maxmind(in));
+ this(in.readString(), in.readString(), readProvider(in));
+ }
+
+ private static Provider readProvider(StreamInput in) throws IOException {
+ if (in.getTransportVersion().onOrAfter(TransportVersions.INGEST_GEO_DATABASE_PROVIDERS)) {
+ return in.readNamedWriteable(Provider.class);
+ } else {
+ // prior to the above version, everything was always a maxmind, so this half of the if is logical
+ return new Maxmind(in.readString());
+ }
}
public static DatabaseConfiguration parse(XContentParser parser, String id) {
@@ -105,14 +132,27 @@ public static DatabaseConfiguration parse(XContentParser parser, String id) {
public void writeTo(StreamOutput out) throws IOException {
out.writeString(id);
out.writeString(name);
- maxmind.writeTo(out);
+ if (out.getTransportVersion().onOrAfter(TransportVersions.INGEST_GEO_DATABASE_PROVIDERS)) {
+ out.writeNamedWriteable(provider);
+ } else {
+ if (provider instanceof Maxmind maxmind) {
+ out.writeString(maxmind.accountId);
+ } else {
+ /*
+ * The existence of a non-Maxmind providers is gated on the feature get_database_configuration_action.multi_node, and
+ * get_database_configuration_action.multi_node is only available on or after
+ * TransportVersions.INGEST_GEO_DATABASE_PROVIDERS.
+ */
+ assert false : "non-maxmind DatabaseConfiguration.Provider [" + provider.getWriteableName() + "]";
+ }
+ }
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("name", name);
- builder.field("maxmind", maxmind);
+ builder.field(provider.getWriteableName(), provider);
builder.endObject();
return builder;
}
@@ -168,7 +208,24 @@ public ActionRequestValidationException validate() {
return err.validationErrors().isEmpty() ? null : err;
}
- public record Maxmind(String accountId) implements Writeable, ToXContentObject {
+ public boolean isReadOnly() {
+ return provider.isReadOnly();
+ }
+
+ /**
+ * A marker interface that all providers need to implement.
+ */
+ public interface Provider extends NamedWriteable, ToXContentObject {
+ boolean isReadOnly();
+ }
+
+ public record Maxmind(String accountId) implements Provider {
+ public static final String NAME = "maxmind";
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
public Maxmind {
// this is an invariant, not actual validation
@@ -206,5 +263,90 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.endObject();
return builder;
}
+
+ @Override
+ public boolean isReadOnly() {
+ return false;
+ }
+ }
+
+ public record Local(String type) implements Provider {
+ public static final String NAME = "local";
+
+ private static final ParseField TYPE = new ParseField("type");
+
+ private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("database", false, (a, id) -> {
+ String type = (String) a[0];
+ return new Local(type);
+ });
+
+ static {
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), TYPE);
+ }
+
+ public Local(StreamInput in) throws IOException {
+ this(in.readString());
+ }
+
+ public static Local parse(XContentParser parser) {
+ return PARSER.apply(parser, null);
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(type);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field("type", type);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ @Override
+ public boolean isReadOnly() {
+ return true;
+ }
+ }
+
+ public record Web() implements Provider {
+ public static final String NAME = "web";
+
+ private static final ObjectParser PARSER = new ObjectParser<>("database", Web::new);
+
+ public Web(StreamInput in) throws IOException {
+ this();
+ }
+
+ public static Web parse(XContentParser parser) {
+ return PARSER.apply(parser, null);
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {}
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ @Override
+ public boolean isReadOnly() {
+ return true;
+ }
}
}
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/PutDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/PutDatabaseConfigurationAction.java
index 41be25987a31b..b5343f17e47b6 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/PutDatabaseConfigurationAction.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/PutDatabaseConfigurationAction.java
@@ -49,7 +49,12 @@ public DatabaseConfiguration getDatabase() {
}
public static Request parseRequest(TimeValue masterNodeTimeout, TimeValue ackTimeout, String id, XContentParser parser) {
- return new Request(masterNodeTimeout, ackTimeout, DatabaseConfiguration.parse(parser, id));
+ DatabaseConfiguration database = DatabaseConfiguration.parse(parser, id);
+ if (database.isReadOnly()) {
+ throw new IllegalArgumentException("Database " + id + " is read only");
+ } else {
+ return new Request(masterNodeTimeout, ackTimeout, database);
+ }
}
@Override
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportDeleteDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportDeleteDatabaseConfigurationAction.java
index 088cea04cef87..b73b2fd4beb08 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportDeleteDatabaseConfigurationAction.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportDeleteDatabaseConfigurationAction.java
@@ -91,6 +91,8 @@ protected void masterOperation(Task task, Request request, ClusterState state, A
final IngestGeoIpMetadata geoIpMeta = state.metadata().custom(IngestGeoIpMetadata.TYPE, IngestGeoIpMetadata.EMPTY);
if (geoIpMeta.getDatabases().containsKey(id) == false) {
throw new ResourceNotFoundException("Database configuration not found: {}", id);
+ } else if (geoIpMeta.getDatabases().get(id).database().isReadOnly()) {
+ throw new IllegalArgumentException("Database " + id + " is read only");
}
deleteDatabaseConfigurationTaskQueue.submitTask(
Strings.format("delete-geoip-database-configuration-[%s]", id),
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java
index 0660a9ff0491d..c83c40e56b749 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java
@@ -9,7 +9,6 @@
package org.elasticsearch.ingest.geoip.direct;
-import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.ActionFilters;
@@ -19,19 +18,28 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.features.FeatureService;
+import org.elasticsearch.ingest.geoip.DatabaseNodeService;
+import org.elasticsearch.ingest.geoip.GeoIpTaskState;
import org.elasticsearch.ingest.geoip.IngestGeoIpMetadata;
import org.elasticsearch.injection.guice.Inject;
+import org.elasticsearch.persistent.PersistentTasksCustomMetadata;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Base64;
+import java.util.Collection;
+import java.util.Comparator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import java.util.Set;
+import java.util.stream.Collectors;
import static org.elasticsearch.ingest.IngestGeoIpFeatures.GET_DATABASE_CONFIGURATION_ACTION_MULTI_NODE;
@@ -43,6 +51,7 @@ public class TransportGetDatabaseConfigurationAction extends TransportNodesActio
List> {
private final FeatureService featureService;
+ private final DatabaseNodeService databaseNodeService;
@Inject
public TransportGetDatabaseConfigurationAction(
@@ -50,7 +59,8 @@ public TransportGetDatabaseConfigurationAction(
ClusterService clusterService,
ThreadPool threadPool,
ActionFilters actionFilters,
- FeatureService featureService
+ FeatureService featureService,
+ DatabaseNodeService databaseNodeService
) {
super(
GetDatabaseConfigurationAction.NAME,
@@ -61,6 +71,7 @@ public TransportGetDatabaseConfigurationAction(
threadPool.executor(ThreadPool.Names.MANAGEMENT)
);
this.featureService = featureService;
+ this.databaseNodeService = databaseNodeService;
}
@Override
@@ -74,9 +85,19 @@ protected void doExecute(
* TransportGetDatabaseConfigurationAction used to be a TransportMasterNodeAction, and not all nodes in the cluster have been
* updated. So we don't want to send node requests to the other nodes because they will blow up. Instead, we just return
* the information that we used to return from the master node (it doesn't make any difference that this might not be the master
- * node, because we're only reading the cluster state).
+ * node, because we're only reading the cluster state). Because older nodes only know about the Maxmind provider type, we filter
+ * out all others here to avoid causing problems on those nodes.
*/
- newResponseAsync(task, request, createActionContext(task, request), List.of(), List.of(), listener);
+ newResponseAsync(
+ task,
+ request,
+ createActionContext(task, request).stream()
+ .filter(database -> database.database().provider() instanceof DatabaseConfiguration.Maxmind)
+ .toList(),
+ List.of(),
+ List.of(),
+ listener
+ );
} else {
super.doExecute(task, request, listener);
}
@@ -97,28 +118,79 @@ protected List createActionContext(Task task, Get
);
}
- final IngestGeoIpMetadata geoIpMeta = clusterService.state().metadata().custom(IngestGeoIpMetadata.TYPE, IngestGeoIpMetadata.EMPTY);
List results = new ArrayList<>();
-
+ PersistentTasksCustomMetadata tasksMetadata = PersistentTasksCustomMetadata.getPersistentTasksCustomMetadata(
+ clusterService.state()
+ );
for (String id : ids) {
- if (Regex.isSimpleMatchPattern(id)) {
- for (Map.Entry entry : geoIpMeta.getDatabases().entrySet()) {
- if (Regex.simpleMatch(id, entry.getKey())) {
- results.add(entry.getValue());
+ results.addAll(getWebDatabases(tasksMetadata, id));
+ results.addAll(getMaxmindDatabases(clusterService, id));
+ }
+ return results;
+ }
+
+ /*
+ * This returns read-only database information about the databases managed by the standard downloader
+ */
+ private static Collection getWebDatabases(PersistentTasksCustomMetadata tasksMetadata, String id) {
+ List webDatabases = new ArrayList<>();
+ if (tasksMetadata != null) {
+ PersistentTasksCustomMetadata.PersistentTask> maybeGeoIpTask = tasksMetadata.getTask("geoip-downloader");
+ if (maybeGeoIpTask != null) {
+ GeoIpTaskState geoIpTaskState = (GeoIpTaskState) maybeGeoIpTask.getState();
+ if (geoIpTaskState != null) {
+ Map databases = geoIpTaskState.getDatabases();
+ for (String databaseFileName : databases.keySet()) {
+ String databaseName = getDatabaseNameForFileName(databaseFileName);
+ String databaseId = getDatabaseIdForFileName(DatabaseConfiguration.Web.NAME, databaseFileName);
+ if ((Regex.isSimpleMatchPattern(id) && Regex.simpleMatch(id, databaseId)) || id.equals(databaseId)) {
+ webDatabases.add(
+ new DatabaseConfigurationMetadata(
+ new DatabaseConfiguration(databaseId, databaseName, new DatabaseConfiguration.Web()),
+ -1,
+ databases.get(databaseFileName).lastUpdate()
+ )
+ );
+ }
}
}
- } else {
- DatabaseConfigurationMetadata meta = geoIpMeta.getDatabases().get(id);
- if (meta == null) {
- throw new ResourceNotFoundException("database configuration not found: {}", id);
- } else {
- results.add(meta);
+ }
+ }
+ return webDatabases;
+ }
+
+ private static String getDatabaseIdForFileName(String providerType, String databaseFileName) {
+ return "_" + providerType + "_" + Base64.getEncoder().encodeToString(databaseFileName.getBytes(StandardCharsets.UTF_8));
+ }
+
+ private static String getDatabaseNameForFileName(String databaseFileName) {
+ return databaseFileName.endsWith(".mmdb")
+ ? databaseFileName.substring(0, databaseFileName.length() + 1 - ".mmmdb".length())
+ : databaseFileName;
+ }
+
+ /*
+ * This returns information about databases that are downloaded from maxmind.
+ */
+ private static Collection getMaxmindDatabases(ClusterService clusterService, String id) {
+ List maxmindDatabases = new ArrayList<>();
+ final IngestGeoIpMetadata geoIpMeta = clusterService.state().metadata().custom(IngestGeoIpMetadata.TYPE, IngestGeoIpMetadata.EMPTY);
+ if (Regex.isSimpleMatchPattern(id)) {
+ for (Map.Entry entry : geoIpMeta.getDatabases().entrySet()) {
+ if (Regex.simpleMatch(id, entry.getKey())) {
+ maxmindDatabases.add(entry.getValue());
}
}
+ } else {
+ DatabaseConfigurationMetadata meta = geoIpMeta.getDatabases().get(id);
+ if (meta != null) {
+ maxmindDatabases.add(meta);
+ }
}
- return results;
+ return maxmindDatabases;
}
+ @Override
protected void newResponseAsync(
Task task,
GetDatabaseConfigurationAction.Request request,
@@ -127,13 +199,47 @@ protected void newResponseAsync(
List failures,
ActionListener listener
) {
- ActionListener.run(
- listener,
- l -> ActionListener.respondAndRelease(
+ ActionListener.run(listener, l -> {
+ List combinedResults = new ArrayList<>(results);
+ combinedResults.addAll(
+ deduplicateNodeResponses(responses, results.stream().map(result -> result.database().name()).collect(Collectors.toSet()))
+ );
+ ActionListener.respondAndRelease(
l,
- new GetDatabaseConfigurationAction.Response(results, clusterService.getClusterName(), responses, failures)
+ new GetDatabaseConfigurationAction.Response(combinedResults, clusterService.getClusterName(), responses, failures)
+ );
+ });
+ }
+
+ /*
+ * This deduplicates the nodeResponses by name, favoring the most recent. This is because each node is reporting the local databases
+ * that it has, and we don't want to report duplicates to the user. It also filters out any that already exist in the set of
+ * preExistingNames. This is because the non-local databases take precedence, so any local database with the same name as a non-local
+ * one will not be used.
+ * Non-private for unit testing
+ */
+ static Collection deduplicateNodeResponses(
+ List nodeResponses,
+ Set preExistingNames
+ ) {
+ /*
+ * Each node reports the list of databases that are in its config/ingest-geoip directory. For the sake of this API we assume all
+ * local databases with the same name are the same database, and deduplicate by name and just return the newest.
+ */
+ return nodeResponses.stream()
+ .flatMap(response -> response.getDatabases().stream())
+ .collect(
+ Collectors.groupingBy(
+ database -> database.database().name(),
+ Collectors.maxBy(Comparator.comparing(DatabaseConfigurationMetadata::modifiedDate))
+ )
)
- );
+ .values()
+ .stream()
+ .filter(Optional::isPresent)
+ .map(Optional::get)
+ .filter(database -> preExistingNames.contains(database.database().name()) == false)
+ .toList();
}
@Override
@@ -157,7 +263,48 @@ protected GetDatabaseConfigurationAction.NodeResponse newNodeResponse(StreamInpu
@Override
protected GetDatabaseConfigurationAction.NodeResponse nodeOperation(GetDatabaseConfigurationAction.NodeRequest request, Task task) {
- return new GetDatabaseConfigurationAction.NodeResponse(transportService.getLocalNode(), List.of());
+ final Set ids;
+ if (request.getDatabaseIds().length == 0) {
+ // if we did not ask for a specific name, then return all databases
+ ids = Set.of("*");
+ } else {
+ ids = new LinkedHashSet<>(Arrays.asList(request.getDatabaseIds()));
+ }
+ if (ids.size() > 1 && ids.stream().anyMatch(Regex::isSimpleMatchPattern)) {
+ throw new IllegalArgumentException(
+ "wildcard only supports a single value, please use comma-separated values or a single wildcard value"
+ );
+ }
+
+ List results = new ArrayList<>();
+ for (String id : ids) {
+ results.addAll(getLocalDatabases(databaseNodeService, id));
+ }
+ return new GetDatabaseConfigurationAction.NodeResponse(transportService.getLocalNode(), results);
}
+ /*
+ * This returns information about the databases that users have put in the config/ingest-geoip directory on the node.
+ */
+ private static List getLocalDatabases(DatabaseNodeService databaseNodeService, String id) {
+ List localDatabases = new ArrayList<>();
+ Map configDatabases = databaseNodeService.getConfigDatabasesDetail();
+ for (DatabaseNodeService.ConfigDatabaseDetail configDatabase : configDatabases.values()) {
+ String databaseId = getDatabaseIdForFileName(DatabaseConfiguration.Local.NAME, configDatabase.name());
+ if ((Regex.isSimpleMatchPattern(id) && Regex.simpleMatch(id, databaseId)) || id.equals(databaseId)) {
+ localDatabases.add(
+ new DatabaseConfigurationMetadata(
+ new DatabaseConfiguration(
+ databaseId,
+ getDatabaseNameForFileName(configDatabase.name()),
+ new DatabaseConfiguration.Local(configDatabase.type())
+ ),
+ -1,
+ configDatabase.buildDateInMillis() == null ? -1 : configDatabase.buildDateInMillis()
+ )
+ );
+ }
+ }
+ return localDatabases;
+ }
}
diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/ConfigDatabasesTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/ConfigDatabasesTests.java
index 83b3d2cfbbc27..7f38a37b43edf 100644
--- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/ConfigDatabasesTests.java
+++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/ConfigDatabasesTests.java
@@ -126,7 +126,7 @@ public void testDatabasesUpdateExistingConfDatabase() throws Exception {
DatabaseReaderLazyLoader loader = configDatabases.getDatabase("GeoLite2-City.mmdb");
assertThat(loader.getDatabaseType(), equalTo("GeoLite2-City"));
- CityResponse cityResponse = loader.getCity("89.160.20.128");
+ CityResponse cityResponse = loader.getResponse("89.160.20.128", GeoIpTestUtils::getCity);
assertThat(cityResponse.getCity().getName(), equalTo("Tumba"));
assertThat(cache.count(), equalTo(1));
}
@@ -138,7 +138,7 @@ public void testDatabasesUpdateExistingConfDatabase() throws Exception {
DatabaseReaderLazyLoader loader = configDatabases.getDatabase("GeoLite2-City.mmdb");
assertThat(loader.getDatabaseType(), equalTo("GeoLite2-City"));
- CityResponse cityResponse = loader.getCity("89.160.20.128");
+ CityResponse cityResponse = loader.getResponse("89.160.20.128", GeoIpTestUtils::getCity);
assertThat(cityResponse.getCity().getName(), equalTo("Linköping"));
assertThat(cache.count(), equalTo(1));
});
diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java
index f5c3c08579855..793754ec316b2 100644
--- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java
+++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java
@@ -14,7 +14,6 @@
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.RandomDocumentPicks;
-import org.elasticsearch.ingest.geoip.Database.Property;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import org.junit.Before;
@@ -25,7 +24,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument;
@@ -40,7 +38,9 @@
public class GeoIpProcessorTests extends ESTestCase {
- private static final Set ALL_PROPERTIES = Set.of(Property.values());
+ private static IpDataLookup ipDataLookupAll(final Database database) {
+ return IpDataLookupFactories.getMaxmindLookup(database).apply(database.properties());
+ }
// a temporary directory that mmdb files can be copied to and read from
private Path tmpDir;
@@ -82,7 +82,7 @@ public void testCity() throws Exception {
loader("GeoLite2-City.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
false,
false,
"filename"
@@ -115,7 +115,7 @@ public void testNullValueWithIgnoreMissing() throws Exception {
loader("GeoLite2-City.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
true,
false,
"filename"
@@ -137,7 +137,7 @@ public void testNonExistentWithIgnoreMissing() throws Exception {
loader("GeoLite2-City.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
true,
false,
"filename"
@@ -156,7 +156,7 @@ public void testNullWithoutIgnoreMissing() {
loader("GeoLite2-City.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
false,
false,
"filename"
@@ -178,7 +178,7 @@ public void testNonExistentWithoutIgnoreMissing() {
loader("GeoLite2-City.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
false,
false,
"filename"
@@ -198,7 +198,7 @@ public void testCity_withIpV6() throws Exception {
loader("GeoLite2-City.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
false,
false,
"filename"
@@ -235,7 +235,7 @@ public void testCityWithMissingLocation() throws Exception {
loader("GeoLite2-City.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
false,
false,
"filename"
@@ -263,7 +263,7 @@ public void testCountry() throws Exception {
loader("GeoLite2-Country.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.Country),
false,
false,
"filename"
@@ -295,7 +295,7 @@ public void testCountryWithMissingLocation() throws Exception {
loader("GeoLite2-Country.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.Country),
false,
false,
"filename"
@@ -323,7 +323,7 @@ public void testAsn() throws Exception {
loader("GeoLite2-ASN.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.Asn),
false,
false,
"filename"
@@ -354,7 +354,7 @@ public void testAnonymmousIp() throws Exception {
loader("GeoIP2-Anonymous-IP-Test.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.AnonymousIp),
false,
false,
"filename"
@@ -388,7 +388,7 @@ public void testConnectionType() throws Exception {
loader("GeoIP2-Connection-Type-Test.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.ConnectionType),
false,
false,
"filename"
@@ -417,7 +417,7 @@ public void testDomain() throws Exception {
loader("GeoIP2-Domain-Test.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.Domain),
false,
false,
"filename"
@@ -446,7 +446,7 @@ public void testEnterprise() throws Exception {
loader("GeoIP2-Enterprise-Test.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.Enterprise),
false,
false,
"filename"
@@ -497,7 +497,7 @@ public void testIsp() throws Exception {
loader("GeoIP2-ISP-Test.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.Isp),
false,
false,
"filename"
@@ -531,7 +531,7 @@ public void testAddressIsNotInTheDatabase() throws Exception {
loader("GeoLite2-City.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
false,
false,
"filename"
@@ -555,7 +555,7 @@ public void testInvalid() {
loader("GeoLite2-City.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
false,
false,
"filename"
@@ -576,7 +576,7 @@ public void testListAllValid() throws Exception {
loader("GeoLite2-City.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
false,
false,
"filename"
@@ -603,7 +603,7 @@ public void testListPartiallyValid() throws Exception {
loader("GeoLite2-City.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
false,
false,
"filename"
@@ -630,7 +630,7 @@ public void testListNoMatches() throws Exception {
loader("GeoLite2-City.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
false,
false,
"filename"
@@ -650,7 +650,7 @@ public void testListDatabaseReferenceCounting() throws Exception {
GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", () -> {
loader.preLookup();
return loader;
- }, () -> true, "target_field", ALL_PROPERTIES, false, false, "filename");
+ }, () -> true, "target_field", ipDataLookupAll(Database.City), false, false, "filename");
Map document = new HashMap<>();
document.put("source_field", List.of("8.8.8.8", "82.171.64.0"));
@@ -678,7 +678,7 @@ public void testListFirstOnly() throws Exception {
loader("GeoLite2-City.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
false,
true,
"filename"
@@ -703,7 +703,7 @@ public void testListFirstOnlyNoMatches() throws Exception {
loader("GeoLite2-City.mmdb"),
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
false,
true,
"filename"
@@ -725,7 +725,7 @@ public void testInvalidDatabase() throws Exception {
loader("GeoLite2-City.mmdb"),
() -> false,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
false,
true,
"filename"
@@ -748,7 +748,7 @@ public void testNoDatabase() throws Exception {
() -> null,
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
false,
false,
"GeoLite2-City"
@@ -771,7 +771,7 @@ public void testNoDatabase_ignoreMissing() throws Exception {
() -> null,
() -> true,
"target_field",
- ALL_PROPERTIES,
+ ipDataLookupAll(Database.City),
true,
false,
"GeoLite2-City"
diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpTestUtils.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpTestUtils.java
index 461983bb24488..160671fd39001 100644
--- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpTestUtils.java
+++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpTestUtils.java
@@ -9,6 +9,13 @@
package org.elasticsearch.ingest.geoip;
+import com.maxmind.db.DatabaseRecord;
+import com.maxmind.db.Reader;
+import com.maxmind.geoip2.model.CityResponse;
+import com.maxmind.geoip2.model.CountryResponse;
+
+import org.elasticsearch.common.CheckedBiFunction;
+import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.core.SuppressForbidden;
import java.io.FileNotFoundException;
@@ -17,6 +24,7 @@
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.util.List;
import java.util.Set;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
@@ -58,4 +66,28 @@ public static void copyDefaultDatabases(final Path directory, ConfigDatabases co
configDatabases.updateDatabase(directory.resolve(database), true);
}
}
+
+ /**
+ * A static city-specific responseProvider for use with {@link IpDatabase#getResponse(String, CheckedBiFunction)} in
+ * tests.
+ *
+ * Like this: {@code CityResponse city = loader.getResponse("some.ip.address", GeoIpTestUtils::getCity);}
+ */
+ public static CityResponse getCity(Reader reader, String ip) throws IOException {
+ DatabaseRecord record = reader.getRecord(InetAddresses.forString(ip), CityResponse.class);
+ CityResponse data = record.getData();
+ return data == null ? null : new CityResponse(data, ip, record.getNetwork(), List.of("en"));
+ }
+
+ /**
+ * A static country-specific responseProvider for use with {@link IpDatabase#getResponse(String, CheckedBiFunction)} in
+ * tests.
+ *
+ * Like this: {@code CountryResponse country = loader.getResponse("some.ip.address", GeoIpTestUtils::getCountry);}
+ */
+ public static CountryResponse getCountry(Reader reader, String ip) throws IOException {
+ DatabaseRecord record = reader.getRecord(InetAddresses.forString(ip), CountryResponse.class);
+ CountryResponse data = record.getData();
+ return data == null ? null : new CountryResponse(data, ip, record.getNetwork(), List.of("en"));
+ }
}
diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadataTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadataTests.java
index 231a2a856815c..6a98cd532604b 100644
--- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadataTests.java
+++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpMetadataTests.java
@@ -9,6 +9,7 @@
package org.elasticsearch.ingest.geoip;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration;
import org.elasticsearch.ingest.geoip.direct.DatabaseConfigurationMetadata;
@@ -21,6 +22,12 @@
import java.util.Map;
public class IngestGeoIpMetadataTests extends AbstractChunkedSerializingTestCase {
+
+ @Override
+ protected NamedWriteableRegistry getNamedWriteableRegistry() {
+ return new NamedWriteableRegistry(new IngestGeoIpPlugin().getNamedWriteables());
+ }
+
@Override
protected IngestGeoIpMetadata doParseInstance(XContentParser parser) throws IOException {
return IngestGeoIpMetadata.fromXContent(parser);
diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java
index f1c7d809b98fe..46a34c2cdad56 100644
--- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java
+++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MMDBUtilTests.java
@@ -116,6 +116,6 @@ public void testDatabaseTypeParsing() throws IOException {
}
private Database parseDatabaseFromType(String databaseFile) throws IOException {
- return Database.getDatabase(MMDBUtil.getDatabaseType(tmpDir.resolve(databaseFile)), null);
+ return IpDataLookupFactories.getDatabase(MMDBUtil.getDatabaseType(tmpDir.resolve(databaseFile)));
}
}
diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java
index ec05054615bd8..84ea5fd584352 100644
--- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java
+++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java
@@ -468,36 +468,6 @@ public void testUnknownMaxMindResponseClassess() {
);
}
- /*
- * This tests that this test has a mapping in TYPE_TO_MAX_MIND_CLASS for all MaxMind classes exposed through IpDatabase.
- */
- public void testUsedMaxMindResponseClassesAreAccountedFor() {
- Set> usedMaxMindResponseClasses = getUsedMaxMindResponseClasses();
- Set> supportedMaxMindClasses = new HashSet<>(TYPE_TO_MAX_MIND_CLASS.values());
- Set> usedButNotSupportedMaxMindResponseClasses = Sets.difference(
- usedMaxMindResponseClasses,
- supportedMaxMindClasses
- );
- assertThat(
- "IpDatabase exposes MaxMind response classes that this test does not know what to do with. Add mappings to "
- + "TYPE_TO_MAX_MIND_CLASS for the following: "
- + usedButNotSupportedMaxMindResponseClasses,
- usedButNotSupportedMaxMindResponseClasses,
- empty()
- );
- Set> supportedButNotUsedMaxMindClasses = Sets.difference(
- supportedMaxMindClasses,
- usedMaxMindResponseClasses
- );
- assertThat(
- "This test claims to support MaxMind response classes that are not exposed in IpDatabase. Remove the following from "
- + "TYPE_TO_MAX_MIND_CLASS: "
- + supportedButNotUsedMaxMindClasses,
- supportedButNotUsedMaxMindClasses,
- empty()
- );
- }
-
/*
* This is the list of field types that causes us to stop recursing. That is, fields of these types are the lowest-level fields that
* we care about.
@@ -616,23 +586,4 @@ private static String getFormattedList(Set fields) {
}
return result.toString();
}
-
- /*
- * This returns all AbstractResponse classes that are returned from getter methods on IpDatabase.
- */
- private static Set> getUsedMaxMindResponseClasses() {
- Set> result = new HashSet<>();
- Method[] methods = IpDatabase.class.getMethods();
- for (Method method : methods) {
- if (method.getName().startsWith("get")) {
- Class> returnType = method.getReturnType();
- try {
- result.add(returnType.asSubclass(AbstractResponse.class));
- } catch (ClassCastException ignore) {
- // This is not what we were looking for, move on
- }
- }
- }
- return result;
- }
}
diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadataTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadataTests.java
index 847f9c5bf7d4a..476a30d86ee05 100644
--- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadataTests.java
+++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationMetadataTests.java
@@ -9,7 +9,9 @@
package org.elasticsearch.ingest.geoip.direct;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.ingest.geoip.IngestGeoIpPlugin;
import org.elasticsearch.test.AbstractXContentSerializingTestCase;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentParser;
@@ -21,6 +23,11 @@
public class DatabaseConfigurationMetadataTests extends AbstractXContentSerializingTestCase {
+ @Override
+ protected NamedWriteableRegistry getNamedWriteableRegistry() {
+ return new NamedWriteableRegistry(new IngestGeoIpPlugin().getNamedWriteables());
+ }
+
private String id;
@Override
diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationTests.java
index bb11f71b26d03..33356ad4235dc 100644
--- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationTests.java
+++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/DatabaseConfigurationTests.java
@@ -9,8 +9,12 @@
package org.elasticsearch.ingest.geoip.direct;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.ingest.geoip.IngestGeoIpPlugin;
+import org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration.Local;
import org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration.Maxmind;
+import org.elasticsearch.ingest.geoip.direct.DatabaseConfiguration.Web;
import org.elasticsearch.test.AbstractXContentSerializingTestCase;
import org.elasticsearch.xcontent.XContentParser;
@@ -21,6 +25,11 @@
public class DatabaseConfigurationTests extends AbstractXContentSerializingTestCase {
+ @Override
+ protected NamedWriteableRegistry getNamedWriteableRegistry() {
+ return new NamedWriteableRegistry(new IngestGeoIpPlugin().getNamedWriteables());
+ }
+
private String id;
@Override
@@ -35,26 +44,39 @@ protected DatabaseConfiguration createTestInstance() {
}
public static DatabaseConfiguration randomDatabaseConfiguration(String id) {
- return new DatabaseConfiguration(id, randomFrom(MAXMIND_NAMES), new Maxmind(randomAlphaOfLength(5)));
+ DatabaseConfiguration.Provider provider = switch (between(0, 2)) {
+ case 0 -> new Maxmind(randomAlphaOfLength(5));
+ case 1 -> new Web();
+ case 2 -> new Local(randomAlphaOfLength(10));
+ default -> throw new AssertionError("failure, got illegal switch case");
+ };
+ return new DatabaseConfiguration(id, randomFrom(MAXMIND_NAMES), provider);
}
@Override
protected DatabaseConfiguration mutateInstance(DatabaseConfiguration instance) {
switch (between(0, 2)) {
case 0:
- return new DatabaseConfiguration(instance.id() + randomAlphaOfLength(2), instance.name(), instance.maxmind());
+ return new DatabaseConfiguration(instance.id() + randomAlphaOfLength(2), instance.name(), instance.provider());
case 1:
return new DatabaseConfiguration(
instance.id(),
randomValueOtherThan(instance.name(), () -> randomFrom(MAXMIND_NAMES)),
- instance.maxmind()
+ instance.provider()
);
case 2:
- return new DatabaseConfiguration(
- instance.id(),
- instance.name(),
- new Maxmind(instance.maxmind().accountId() + randomAlphaOfLength(2))
- );
+ DatabaseConfiguration.Provider provider = instance.provider();
+ DatabaseConfiguration.Provider modifiedProvider;
+ if (provider instanceof Maxmind maxmind) {
+ modifiedProvider = new Maxmind(((Maxmind) instance.provider()).accountId() + randomAlphaOfLength(2));
+ } else if (provider instanceof Web) {
+ modifiedProvider = new Maxmind(randomAlphaOfLength(20)); // can't modify a Web
+ } else if (provider instanceof Local local) {
+ modifiedProvider = new Local(local.type() + randomAlphaOfLength(2));
+ } else {
+ throw new AssertionError("Unexpected provider type: " + provider.getClass());
+ }
+ return new DatabaseConfiguration(instance.id(), instance.name(), modifiedProvider);
default:
throw new AssertionError("failure, got illegal switch case");
}
diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationActionTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationActionTests.java
new file mode 100644
index 0000000000000..988b50311186d
--- /dev/null
+++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationActionTests.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.ingest.geoip.direct;
+
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import static org.elasticsearch.ingest.geoip.direct.GetDatabaseConfigurationAction.NodeResponse;
+import static org.hamcrest.Matchers.equalTo;
+import static org.mockito.Mockito.mock;
+
+public class TransportGetDatabaseConfigurationActionTests extends ESTestCase {
+ public void testDeduplicateNodeResponses() {
+ {
+ List nodeResponses = new ArrayList<>();
+ Set preExistingNames = Set.of();
+ Collection deduplicated = TransportGetDatabaseConfigurationAction.deduplicateNodeResponses(
+ nodeResponses,
+ preExistingNames
+ );
+ assertTrue(deduplicated.isEmpty());
+ }
+ {
+ List nodeResponses = List.of(
+ generateTestNodeResponse(List.of()),
+ generateTestNodeResponse(List.of()),
+ generateTestNodeResponse(List.of())
+ );
+ Set preExistingNames = Set.of();
+ Collection deduplicated = TransportGetDatabaseConfigurationAction.deduplicateNodeResponses(
+ nodeResponses,
+ preExistingNames
+ );
+ assertTrue(deduplicated.isEmpty());
+ }
+ {
+ // 3 nodes with 3 overlapping responses. We expect the deduplicated collection to include 1, 2, 3, and 4.
+ List nodeResponses = List.of(
+ generateTestNodeResponse(List.of("1", "2", "3")),
+ generateTestNodeResponse(List.of("1", "2", "3")),
+ generateTestNodeResponse(List.of("1", "4"))
+ );
+ Set preExistingNames = Set.of();
+ Collection deduplicated = TransportGetDatabaseConfigurationAction.deduplicateNodeResponses(
+ nodeResponses,
+ preExistingNames
+ );
+ assertThat(deduplicated.size(), equalTo(4));
+ assertThat(
+ deduplicated.stream().map(database -> database.database().name()).collect(Collectors.toSet()),
+ equalTo(Set.of("1", "2", "3", "4"))
+ );
+ }
+ {
+ /*
+ * 3 nodes with 3 overlapping responses, but this time we're also passing in a set of pre-existing names that overlap with
+ * two of them. So we expect the deduplicated collection to include 1 and 4.
+ */
+ List nodeResponses = List.of(
+ generateTestNodeResponse(List.of("1", "2", "3")),
+ generateTestNodeResponse(List.of("1", "2", "3")),
+ generateTestNodeResponse(List.of("1", "4"))
+ );
+ Set preExistingNames = Set.of("2", "3", "5");
+ Collection deduplicated = TransportGetDatabaseConfigurationAction.deduplicateNodeResponses(
+ nodeResponses,
+ preExistingNames
+ );
+ assertThat(deduplicated.size(), equalTo(2));
+ assertThat(
+ deduplicated.stream().map(database -> database.database().name()).collect(Collectors.toSet()),
+ equalTo(Set.of("1", "4"))
+ );
+ }
+ {
+ /*
+ * Here 3 nodes report the same database, but with different modified dates and versions. We expect the one with the highest
+ * modified date to win out.
+ */
+ List nodeResponses = List.of(
+ generateTestNodeResponseFromDatabases(List.of(generateTestDatabase("1", 1))),
+ generateTestNodeResponseFromDatabases(List.of(generateTestDatabase("1", 1000))),
+ generateTestNodeResponseFromDatabases(List.of(generateTestDatabase("1", 3)))
+ );
+ Set preExistingNames = Set.of("2", "3", "5");
+ Collection deduplicated = TransportGetDatabaseConfigurationAction.deduplicateNodeResponses(
+ nodeResponses,
+ preExistingNames
+ );
+ assertThat(deduplicated.size(), equalTo(1));
+ DatabaseConfigurationMetadata result = deduplicated.iterator().next();
+ assertThat(result, equalTo(nodeResponses.get(1).getDatabases().get(0)));
+ }
+ }
+
+ private NodeResponse generateTestNodeResponse(List databaseNames) {
+ List databases = databaseNames.stream().map(this::generateTestDatabase).toList();
+ return generateTestNodeResponseFromDatabases(databases);
+ }
+
+ private NodeResponse generateTestNodeResponseFromDatabases(List databases) {
+ DiscoveryNode discoveryNode = mock(DiscoveryNode.class);
+ return new NodeResponse(discoveryNode, databases);
+ }
+
+ private DatabaseConfigurationMetadata generateTestDatabase(String databaseName) {
+ return generateTestDatabase(databaseName, randomLongBetween(0, Long.MAX_VALUE));
+ }
+
+ private DatabaseConfigurationMetadata generateTestDatabase(String databaseName, long modifiedDate) {
+ DatabaseConfiguration databaseConfiguration = new DatabaseConfiguration(
+ randomAlphaOfLength(50),
+ databaseName,
+ new DatabaseConfiguration.Local(randomAlphaOfLength(20))
+ );
+ return new DatabaseConfigurationMetadata(databaseConfiguration, randomLongBetween(0, Long.MAX_VALUE), modifiedDate);
+ }
+}
diff --git a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml
index 6809443fdfbc3..04fd2ac6a8189 100644
--- a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml
+++ b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/40_geoip_databases.yml
@@ -1,7 +1,7 @@
setup:
- requires:
- cluster_features: ["geoip.downloader.database.configuration"]
- reason: "geoip downloader database configuration APIs added in 8.15"
+ cluster_features: ["geoip.downloader.database.configuration", "get_database_configuration_action.multi_node"]
+ reason: "geoip downloader database configuration APIs added in 8.15, and updated in 8.16 to return more results"
---
"Test adding, getting, and removing geoip databases":
@@ -41,6 +41,17 @@ setup:
}
- match: { acknowledged: true }
+ - do:
+ catch: /illegal_argument_exception/
+ ingest.put_geoip_database:
+ id: "_web_TXlDdXN0b21HZW9MaXRlMi1DaXR5Lm1tZGI="
+ body: >
+ {
+ "name": "GeoIP2-City",
+ "web": {
+ }
+ }
+
- do:
ingest.get_geoip_database:
id: "my_database_1"
@@ -52,19 +63,37 @@ setup:
- do:
ingest.get_geoip_database: {}
- - length: { databases: 2 }
+ - length: { databases: 6 }
- do:
ingest.get_geoip_database:
id: "my_database_1,my_database_2"
- length: { databases: 2 }
+ - do:
+ ingest.get_geoip_database:
+ id: "_web_TXlDdXN0b21HZW9MaXRlMi1DaXR5Lm1tZGI="
+ - length: { databases: 1 }
+ - match: { databases.0.id: "_web_TXlDdXN0b21HZW9MaXRlMi1DaXR5Lm1tZGI=" }
+ - gte: { databases.0.modified_date_millis: -1 }
+ - match: { databases.0.database.name: "MyCustomGeoLite2-City" }
+
- do:
ingest.delete_geoip_database:
id: "my_database_1"
+ - do:
+ catch: /resource_not_found_exception/
+ ingest.delete_geoip_database:
+ id: "_web_TXlDdXN0b21HZW9MaXRlMi1DaXR5Lm1tZGI="
+
- do:
ingest.get_geoip_database: {}
+ - length: { databases: 5 }
+
+ - do:
+ ingest.get_geoip_database:
+ id: "my_database_2"
- length: { databases: 1 }
- match: { databases.0.id: "my_database_2" }
- gte: { databases.0.modified_date_millis: 0 }
diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java
index ecc84ddca2e13..9bb80d5688b5f 100644
--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java
+++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java
@@ -107,7 +107,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
void innerToXContent(XContentBuilder builder, Params params) throws IOException {
if (hasResponse()) {
- ChunkedToXContent.wrapAsToXContent(p -> response.innerToXContentChunked(p)).toXContent(builder, params);
+ ChunkedToXContent.wrapAsToXContent(response::innerToXContentChunked).toXContent(builder, params);
} else {
// we can assume the template is always json as we convert it before compiling it
try (InputStream stream = source.streamInput()) {
diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java
index 429a81b02bd5e..6b4dd5ed86e2d 100644
--- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java
+++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java
@@ -235,7 +235,6 @@ public void testAbortRequestStats() throws Exception {
}
@TestIssueLogging(issueUrl = "https://github.com/elastic/elasticsearch/issues/101608", value = "com.amazonaws.request:DEBUG")
- @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101608")
public void testMetrics() throws Exception {
// Create the repository and perform some activities
final String repository = createRepository(randomRepositoryName(), false);
diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java
index f919284d8e897..af385eeac6a5b 100644
--- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java
+++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java
@@ -140,6 +140,11 @@ class S3Repository extends MeteredBlobStoreRepository {
MAX_FILE_SIZE_USING_MULTIPART
);
+ /**
+ * Maximum parts number for multipart upload. (see https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html)
+ */
+ static final Setting MAX_MULTIPART_PARTS = Setting.intSetting("max_multipart_parts", 10_000, 1, 10_000);
+
/**
* Sets the S3 storage class type for the backup files. Values may be standard, reduced_redundancy,
* standard_ia, onezone_ia and intelligent_tiering. Defaults to standard.
@@ -253,7 +258,9 @@ class S3Repository extends MeteredBlobStoreRepository {
}
this.bufferSize = BUFFER_SIZE_SETTING.get(metadata.settings());
- this.chunkSize = CHUNK_SIZE_SETTING.get(metadata.settings());
+ var maxChunkSize = CHUNK_SIZE_SETTING.get(metadata.settings());
+ var maxPartsNum = MAX_MULTIPART_PARTS.get(metadata.settings());
+ this.chunkSize = objectSizeLimit(maxChunkSize, bufferSize, maxPartsNum);
// We make sure that chunkSize is bigger or equal than/to bufferSize
if (this.chunkSize.getBytes() < bufferSize.getBytes()) {
@@ -302,6 +309,20 @@ private static Map buildLocation(RepositoryMetadata metadata) {
return Map.of("base_path", BASE_PATH_SETTING.get(metadata.settings()), "bucket", BUCKET_SETTING.get(metadata.settings()));
}
+ /**
+ * Calculates S3 object size limit based on 2 constraints: maximum object(chunk) size
+ * and maximum number of parts for multipart upload.
+ * https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html
+ *
+ * @param chunkSize s3 object size
+ * @param bufferSize s3 multipart upload part size
+ * @param maxPartsNum s3 multipart upload max parts number
+ */
+ private static ByteSizeValue objectSizeLimit(ByteSizeValue chunkSize, ByteSizeValue bufferSize, int maxPartsNum) {
+ var bytes = Math.min(chunkSize.getBytes(), bufferSize.getBytes() * maxPartsNum);
+ return ByteSizeValue.ofBytes(bytes);
+ }
+
/**
* Holds a reference to delayed repository operation {@link Scheduler.Cancellable} so it can be cancelled should the repository be
* closed concurrently.
diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java
index 1eab59ebb0eb7..3817af4def888 100644
--- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java
+++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java
@@ -175,4 +175,37 @@ public void testAnalysisFailureDetail() {
}
}
+ // ensures that chunkSize is limited to chunk_size setting, when buffer_size * parts_num is bigger
+ public void testChunkSizeLimit() {
+ var meta = new RepositoryMetadata(
+ "dummy-repo",
+ "mock",
+ Settings.builder()
+ .put(S3Repository.BUCKET_SETTING.getKey(), "bucket")
+ .put(S3Repository.CHUNK_SIZE_SETTING.getKey(), "1GB")
+ .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), "100MB")
+ .put(S3Repository.MAX_MULTIPART_PARTS.getKey(), 10_000) // ~1TB
+ .build()
+ );
+ try (var repo = createS3Repo(meta)) {
+ assertEquals(ByteSizeValue.ofGb(1), repo.chunkSize());
+ }
+ }
+
+ // ensures that chunkSize is limited to buffer_size * parts_num, when chunk_size setting is bigger
+ public void testPartsNumLimit() {
+ var meta = new RepositoryMetadata(
+ "dummy-repo",
+ "mock",
+ Settings.builder()
+ .put(S3Repository.BUCKET_SETTING.getKey(), "bucket")
+ .put(S3Repository.CHUNK_SIZE_SETTING.getKey(), "5TB")
+ .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), "100MB")
+ .put(S3Repository.MAX_MULTIPART_PARTS.getKey(), 10_000)
+ .build()
+ );
+ try (var repo = createS3Repo(meta)) {
+ assertEquals(ByteSizeValue.ofMb(1_000_000), repo.chunkSize());
+ }
+ }
}
diff --git a/muted-tests.yml b/muted-tests.yml
index 4305ebe3d2e02..8b756adce5457 100644
--- a/muted-tests.yml
+++ b/muted-tests.yml
@@ -26,9 +26,6 @@ tests:
- class: org.elasticsearch.index.store.FsDirectoryFactoryTests
method: testPreload
issue: https://github.com/elastic/elasticsearch/issues/110211
-- class: org.elasticsearch.upgrades.SecurityIndexRolesMetadataMigrationIT
- method: testMetadataMigratedAfterUpgrade
- issue: https://github.com/elastic/elasticsearch/issues/110232
- class: org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT
method: testMinVersionAsNewVersion
issue: https://github.com/elastic/elasticsearch/issues/95384
@@ -121,9 +118,6 @@ tests:
- class: org.elasticsearch.xpack.ml.integration.MlJobIT
method: testMultiIndexDelete
issue: https://github.com/elastic/elasticsearch/issues/112381
-- class: org.elasticsearch.search.retriever.RankDocRetrieverBuilderIT
- method: testRankDocsRetrieverWithNestedQuery
- issue: https://github.com/elastic/elasticsearch/issues/112421
- class: org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroidTests
method: "testAggregateIntermediate {TestCase= #2}"
issue: https://github.com/elastic/elasticsearch/issues/112461
@@ -278,9 +272,6 @@ tests:
- class: org.elasticsearch.xpack.ml.integration.MlJobIT
method: testCreateJobsWithIndexNameOption
issue: https://github.com/elastic/elasticsearch/issues/113528
-- class: org.elasticsearch.validation.DotPrefixClientYamlTestSuiteIT
- method: test {p0=dot_prefix/10_basic/Deprecated index template with a dot prefix index pattern}
- issue: https://github.com/elastic/elasticsearch/issues/113529
- class: org.elasticsearch.xpack.ml.integration.MlJobIT
method: testCantCreateJobWithSameID
issue: https://github.com/elastic/elasticsearch/issues/113581
@@ -351,6 +342,41 @@ tests:
- class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT
method: testPutE5WithTrainedModelAndInference
issue: https://github.com/elastic/elasticsearch/issues/114023
+- class: org.elasticsearch.xpack.rank.rrf.RRFRetrieverBuilderIT
+ method: testRRFWithCollapse
+ issue: https://github.com/elastic/elasticsearch/issues/114074
+- class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT
+ method: testPutE5Small_withPlatformSpecificVariant
+ issue: https://github.com/elastic/elasticsearch/issues/113950
+- class: org.elasticsearch.xpack.inference.services.openai.OpenAiServiceTests
+ method: testInfer_StreamRequest_ErrorResponse
+ issue: https://github.com/elastic/elasticsearch/issues/114105
+- class: org.elasticsearch.xpack.inference.InferenceCrudIT
+ method: testGet
+ issue: https://github.com/elastic/elasticsearch/issues/114135
+- class: org.elasticsearch.action.bulk.IncrementalBulkIT
+ method: testIncrementalBulkHighWatermarkBackOff
+ issue: https://github.com/elastic/elasticsearch/issues/114073
+- class: org.elasticsearch.xpack.esql.expression.function.aggregate.AvgTests
+ method: "testFold {TestCase= #7}"
+ issue: https://github.com/elastic/elasticsearch/issues/114175
+- class: org.elasticsearch.action.bulk.IncrementalBulkIT
+ method: testMultipleBulkPartsWithBackoff
+ issue: https://github.com/elastic/elasticsearch/issues/114181
+- class: org.elasticsearch.action.bulk.IncrementalBulkIT
+ method: testIncrementalBulkLowWatermarkBackOff
+ issue: https://github.com/elastic/elasticsearch/issues/114182
+- class: org.elasticsearch.aggregations.AggregationsClientYamlTestSuiteIT
+ method: test {yaml=aggregations/stats_metric_fail_formatting/fail formatting}
+ issue: https://github.com/elastic/elasticsearch/issues/114187
+- class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT
+ issue: https://github.com/elastic/elasticsearch/issues/114194
+- class: org.elasticsearch.xpack.ilm.ExplainLifecycleIT
+ method: testStepInfoPreservedOnAutoRetry
+ issue: https://github.com/elastic/elasticsearch/issues/114220
+- class: org.elasticsearch.xpack.inference.services.openai.OpenAiServiceTests
+ method: testInfer_StreamRequest
+ issue: https://github.com/elastic/elasticsearch/issues/114232
# Examples:
#
diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/EC2RetriesTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/EC2RetriesTests.java
index aa669a45bc0c7..78ea619d81f84 100644
--- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/EC2RetriesTests.java
+++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/EC2RetriesTests.java
@@ -101,6 +101,7 @@ public void testEC2DiscoveryRetriesOnRateLimiting() throws IOException {
exchange.getResponseHeaders().set("Content-Type", "text/xml; charset=UTF-8");
exchange.sendResponseHeaders(HttpStatus.SC_OK, responseBody.length);
exchange.getResponseBody().write(responseBody);
+ exchange.getResponseBody().flush();
return;
}
}
diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java
index 8a6f6b84fec0d..135ddcee8da44 100644
--- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java
+++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java
@@ -59,7 +59,7 @@ public class Ec2DiscoveryTests extends AbstractEC2MockAPITestCase {
private static final String PREFIX_PUBLIC_IP = "8.8.8.";
private static final String PREFIX_PRIVATE_IP = "10.0.0.";
- private Map poorMansDNS = new ConcurrentHashMap<>();
+ private final Map poorMansDNS = new ConcurrentHashMap<>();
protected MockTransportService createTransportService() {
final Transport transport = new Netty4Transport(
@@ -133,7 +133,7 @@ protected List buildDynamicHosts(Settings nodeSettings, int no
.stream()
.filter(t -> t.getKey().equals(entry.getKey()))
.map(Tag::getValue)
- .collect(Collectors.toList())
+ .toList()
.containsAll(entry.getValue())
)
)
@@ -144,6 +144,7 @@ protected List buildDynamicHosts(Settings nodeSettings, int no
exchange.getResponseHeaders().set("Content-Type", "text/xml; charset=UTF-8");
exchange.sendResponseHeaders(HttpStatus.SC_OK, responseBody.length);
exchange.getResponseBody().write(responseBody);
+ exchange.getResponseBody().flush();
return;
}
}
@@ -160,14 +161,14 @@ protected List buildDynamicHosts(Settings nodeSettings, int no
}
}
- public void testDefaultSettings() throws InterruptedException {
+ public void testDefaultSettings() {
int nodes = randomInt(10);
Settings nodeSettings = Settings.builder().build();
List discoveryNodes = buildDynamicHosts(nodeSettings, nodes);
assertThat(discoveryNodes, hasSize(nodes));
}
- public void testPrivateIp() throws InterruptedException {
+ public void testPrivateIp() {
int nodes = randomInt(10);
for (int i = 0; i < nodes; i++) {
poorMansDNS.put(PREFIX_PRIVATE_IP + (i + 1), buildNewFakeTransportAddress());
@@ -183,7 +184,7 @@ public void testPrivateIp() throws InterruptedException {
}
}
- public void testPublicIp() throws InterruptedException {
+ public void testPublicIp() {
int nodes = randomInt(10);
for (int i = 0; i < nodes; i++) {
poorMansDNS.put(PREFIX_PUBLIC_IP + (i + 1), buildNewFakeTransportAddress());
@@ -199,7 +200,7 @@ public void testPublicIp() throws InterruptedException {
}
}
- public void testPrivateDns() throws InterruptedException {
+ public void testPrivateDns() {
int nodes = randomInt(10);
for (int i = 0; i < nodes; i++) {
String instanceId = "node" + (i + 1);
@@ -217,7 +218,7 @@ public void testPrivateDns() throws InterruptedException {
}
}
- public void testPublicDns() throws InterruptedException {
+ public void testPublicDns() {
int nodes = randomInt(10);
for (int i = 0; i < nodes; i++) {
String instanceId = "node" + (i + 1);
@@ -235,14 +236,14 @@ public void testPublicDns() throws InterruptedException {
}
}
- public void testInvalidHostType() throws InterruptedException {
+ public void testInvalidHostType() {
Settings nodeSettings = Settings.builder().put(AwsEc2Service.HOST_TYPE_SETTING.getKey(), "does_not_exist").build();
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { buildDynamicHosts(nodeSettings, 1); });
assertThat(exception.getMessage(), containsString("does_not_exist is unknown for discovery.ec2.host_type"));
}
- public void testFilterByTags() throws InterruptedException {
+ public void testFilterByTags() {
int nodes = randomIntBetween(5, 10);
Settings nodeSettings = Settings.builder().put(AwsEc2Service.TAG_SETTING.getKey() + "stage", "prod").build();
@@ -265,7 +266,7 @@ public void testFilterByTags() throws InterruptedException {
assertThat(dynamicHosts, hasSize(prodInstances));
}
- public void testFilterByMultipleTags() throws InterruptedException {
+ public void testFilterByMultipleTags() {
int nodes = randomIntBetween(5, 10);
Settings nodeSettings = Settings.builder().putList(AwsEc2Service.TAG_SETTING.getKey() + "stage", "prod", "preprod").build();
diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java
index 9255281e7e5da..8570662f7b523 100644
--- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java
+++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java
@@ -1602,10 +1602,6 @@ public void testResize() throws Exception {
@SuppressWarnings("unchecked")
public void testSystemIndexMetadataIsUpgraded() throws Exception {
-
- @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // assumeTrue can be removed (condition always true)
- var originalClusterTaskIndexIsSystemIndex = oldClusterHasFeature(RestTestLegacyFeatures.TASK_INDEX_SYSTEM_INDEX);
- assumeTrue(".tasks became a system index in 7.10.0", originalClusterTaskIndexIsSystemIndex);
final String systemIndexWarning = "this request accesses system indices: [.tasks], but in a future major version, direct "
+ "access to system indices will be prevented by default";
if (isRunningAgainstOldCluster()) {
@@ -1665,29 +1661,6 @@ public void testSystemIndexMetadataIsUpgraded() throws Exception {
throw new AssertionError(".tasks index does not exist yet");
}
});
-
- // If we are on 7.x create an alias that includes both a system index and a non-system index so we can be sure it gets
- // upgraded properly. If we're already on 8.x, skip this part of the test.
- if (clusterHasFeature(RestTestLegacyFeatures.SYSTEM_INDICES_REST_ACCESS_ENFORCED) == false) {
- // Create an alias to make sure it gets upgraded properly
- Request putAliasRequest = newXContentRequest(HttpMethod.POST, "/_aliases", (builder, params) -> {
- builder.startArray("actions");
- for (var index : List.of(".tasks", "test_index_reindex")) {
- builder.startObject()
- .startObject("add")
- .field("index", index)
- .field("alias", "test-system-alias")
- .endObject()
- .endObject();
- }
- return builder.endArray();
- });
- putAliasRequest.setOptions(expectVersionSpecificWarnings(v -> {
- v.current(systemIndexWarning);
- v.compatible(systemIndexWarning);
- }));
- assertThat(client().performRequest(putAliasRequest).getStatusLine().getStatusCode(), is(200));
- }
} else {
assertBusy(() -> {
Request clusterStateRequest = new Request("GET", "/_cluster/state/metadata");
diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java
index f588b78c78cc8..2a3e0c16fdc2f 100644
--- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java
+++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java
@@ -170,7 +170,9 @@ public void test012SecurityCanBeDisabled() throws Exception {
public void test020PluginsListWithNoPlugins() {
assumeTrue(
"Only applies to non-Cloud images",
- distribution.packaging != Packaging.DOCKER_CLOUD && distribution().packaging != Packaging.DOCKER_CLOUD_ESS
+ distribution.packaging != Packaging.DOCKER_CLOUD
+ && distribution().packaging != Packaging.DOCKER_CLOUD_ESS
+ && distribution().packaging != Packaging.DOCKER_WOLFI_ESS
);
final Installation.Executables bin = installation.executables();
@@ -201,7 +203,10 @@ public void test021InstallPlugin() {
* Checks that ESS images can install plugins from the local archive.
*/
public void test022InstallPluginsFromLocalArchive() {
- assumeTrue("Only ESS images have a local archive", distribution().packaging == Packaging.DOCKER_CLOUD_ESS);
+ assumeTrue(
+ "Only ESS images have a local archive",
+ distribution().packaging == Packaging.DOCKER_CLOUD_ESS || distribution().packaging == Packaging.DOCKER_WOLFI_ESS
+ );
final String plugin = "analysis-icu";
final Installation.Executables bin = installation.executables();
@@ -254,7 +259,10 @@ public void test023InstallPluginUsingConfigFile() {
* Checks that ESS images can manage plugins from the local archive by deploying a plugins config file.
*/
public void test024InstallPluginFromArchiveUsingConfigFile() {
- assumeTrue("Only ESS image has a plugin archive", distribution().packaging == Packaging.DOCKER_CLOUD_ESS);
+ assumeTrue(
+ "Only ESS image has a plugin archive",
+ distribution().packaging == Packaging.DOCKER_CLOUD_ESS || distribution().packaging == Packaging.DOCKER_WOLFI_ESS
+ );
final String filename = "elasticsearch-plugins.yml";
append(tempDir.resolve(filename), """
@@ -386,7 +394,7 @@ public void test040JavaUsesTheOsProvidedKeystore() {
if (distribution.packaging == Packaging.DOCKER_UBI || distribution.packaging == Packaging.DOCKER_IRON_BANK) {
// In these images, the `cacerts` file ought to be a symlink here
assertThat(path, equalTo("/etc/pki/ca-trust/extracted/java/cacerts"));
- } else if (distribution.packaging == Packaging.DOCKER_WOLFI) {
+ } else if (distribution.packaging == Packaging.DOCKER_WOLFI || distribution.packaging == Packaging.DOCKER_WOLFI_ESS) {
// In these images, the `cacerts` file ought to be a symlink here
assertThat(path, equalTo("/etc/ssl/certs/java/cacerts"));
} else {
@@ -1113,8 +1121,10 @@ public void test170DefaultShellIsBash() {
*/
public void test171AdditionalCliOptionsAreForwarded() throws Exception {
assumeTrue(
- "Does not apply to Cloud images, because they don't use the default entrypoint",
- distribution.packaging != Packaging.DOCKER_CLOUD && distribution().packaging != Packaging.DOCKER_CLOUD_ESS
+ "Does not apply to Cloud and wolfi ess images, because they don't use the default entrypoint",
+ distribution.packaging != Packaging.DOCKER_CLOUD
+ && distribution().packaging != Packaging.DOCKER_CLOUD_ESS
+ && distribution().packaging != Packaging.DOCKER_WOLFI_ESS
);
runContainer(distribution(), builder().runArgs("bin/elasticsearch", "-Ecluster.name=kimchy").envVar("ELASTIC_PASSWORD", PASSWORD));
@@ -1201,7 +1211,11 @@ public void test310IronBankImageHasNoAdditionalLabels() throws Exception {
* Check that the Cloud image contains the required Beats
*/
public void test400CloudImageBundlesBeats() {
- assumeTrue(distribution.packaging == Packaging.DOCKER_CLOUD || distribution.packaging == Packaging.DOCKER_CLOUD_ESS);
+ assumeTrue(
+ distribution.packaging == Packaging.DOCKER_CLOUD
+ || distribution.packaging == Packaging.DOCKER_CLOUD_ESS
+ || distribution.packaging == Packaging.DOCKER_WOLFI_ESS
+ );
final List contents = listContents("/opt");
assertThat("Expected beats in /opt", contents, hasItems("filebeat", "metricbeat"));
diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java
index a988a446f561f..2aff1f258ed65 100644
--- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java
+++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java
@@ -436,7 +436,7 @@ private void verifyKeystorePermissions() {
switch (distribution.packaging) {
case TAR, ZIP -> assertThat(keystore, file(File, ARCHIVE_OWNER, ARCHIVE_OWNER, p660));
case DEB, RPM -> assertThat(keystore, file(File, "root", "elasticsearch", p660));
- case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> assertThat(
+ case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI, DOCKER_WOLFI_ESS -> assertThat(
keystore,
DockerFileMatcher.file(p660)
);
diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java
index 644990105f60f..487a00bdecac9 100644
--- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java
+++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java
@@ -245,7 +245,7 @@ protected static void install() throws Exception {
installation = Packages.installPackage(sh, distribution);
Packages.verifyPackageInstallation(installation, distribution, sh);
}
- case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> {
+ case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI, DOCKER_WOLFI_ESS -> {
installation = Docker.runContainer(distribution);
Docker.verifyContainerInstallation(installation);
}
@@ -338,6 +338,7 @@ public Shell.Result runElasticsearchStartCommand(String password, boolean daemon
case DOCKER_CLOUD:
case DOCKER_CLOUD_ESS:
case DOCKER_WOLFI:
+ case DOCKER_WOLFI_ESS:
// nothing, "installing" docker image is running it
return Shell.NO_OP;
default:
@@ -361,6 +362,7 @@ public void stopElasticsearch() throws Exception {
case DOCKER_CLOUD:
case DOCKER_CLOUD_ESS:
case DOCKER_WOLFI:
+ case DOCKER_WOLFI_ESS:
// nothing, "installing" docker image is running it
break;
default:
@@ -373,7 +375,8 @@ public void awaitElasticsearchStartup(Shell.Result result) throws Exception {
switch (distribution.packaging) {
case TAR, ZIP -> Archives.assertElasticsearchStarted(installation);
case DEB, RPM -> Packages.assertElasticsearchStarted(sh, installation);
- case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> Docker.waitForElasticsearchToStart();
+ case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI, DOCKER_WOLFI_ESS -> Docker
+ .waitForElasticsearchToStart();
default -> throw new IllegalStateException("Unknown Elasticsearch packaging type.");
}
}
diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java
index 05cef4a0818ba..d63d956dc5199 100644
--- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java
+++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java
@@ -39,6 +39,8 @@ public Distribution(Path path) {
this.packaging = Packaging.DOCKER_CLOUD_ESS;
} else if (filename.endsWith(".wolfi.tar")) {
this.packaging = Packaging.DOCKER_WOLFI;
+ } else if (filename.endsWith(".wolfi-ess.tar")) {
+ this.packaging = Packaging.DOCKER_WOLFI_ESS;
} else {
int lastDot = filename.lastIndexOf('.');
this.packaging = Packaging.valueOf(filename.substring(lastDot + 1).toUpperCase(Locale.ROOT));
@@ -63,7 +65,7 @@ public boolean isPackage() {
*/
public boolean isDocker() {
return switch (packaging) {
- case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> true;
+ case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD, DOCKER_CLOUD_ESS, DOCKER_WOLFI, DOCKER_WOLFI_ESS -> true;
default -> false;
};
}
@@ -79,7 +81,8 @@ public enum Packaging {
DOCKER_IRON_BANK(".ironbank.tar", Platforms.isDocker()),
DOCKER_CLOUD(".cloud.tar", Platforms.isDocker()),
DOCKER_CLOUD_ESS(".cloud-ess.tar", Platforms.isDocker()),
- DOCKER_WOLFI(".wolfi.tar", Platforms.isDocker());
+ DOCKER_WOLFI(".wolfi.tar", Platforms.isDocker()),
+ DOCKER_WOLFI_ESS(".wolfi-ess.tar", Platforms.isDocker());
/** The extension of this distribution's file */
public final String extension;
diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java
index c38eaa58f0552..6f7827663d46c 100644
--- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java
+++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java
@@ -532,7 +532,9 @@ public static void verifyContainerInstallation(Installation es) {
)
);
- if (es.distribution.packaging == Packaging.DOCKER_CLOUD || es.distribution.packaging == Packaging.DOCKER_CLOUD_ESS) {
+ if (es.distribution.packaging == Packaging.DOCKER_CLOUD
+ || es.distribution.packaging == Packaging.DOCKER_CLOUD_ESS
+ || es.distribution.packaging == Packaging.DOCKER_WOLFI_ESS) {
verifyCloudContainerInstallation(es);
}
}
@@ -541,7 +543,7 @@ private static void verifyCloudContainerInstallation(Installation es) {
final String pluginArchive = "/opt/plugins/archive";
final List plugins = listContents(pluginArchive);
- if (es.distribution.packaging == Packaging.DOCKER_CLOUD_ESS) {
+ if (es.distribution.packaging == Packaging.DOCKER_CLOUD_ESS || es.distribution.packaging == Packaging.DOCKER_WOLFI_ESS) {
assertThat("ESS image should come with plugins in " + pluginArchive, plugins, not(empty()));
final List