From 9f568858efbf11175fed4633aaeb05237fa119dc Mon Sep 17 00:00:00 2001 From: Kashif Faraz Date: Tue, 19 Dec 2023 13:14:04 +0530 Subject: [PATCH] Add logging implementation for AuditManager and audit more endpoints (#15480) Changes - Add `log` implementation for `AuditManager` alongwith `SQLAuditManager` - `LoggingAuditManager` simply logs the audit event. Thus, it returns empty for all `fetchAuditHistory` calls. - Add new config `druid.audit.manager.type` which can take values `log`, `sql` (default) - Add new config `druid.audit.manager.logLevel` which can take values `DEBUG`, `INFO`, `WARN`. This gets activated only if `type` is `log`. - Remove usage of `ConfigSerde` from `AuditManager` as audit is not just limited to configs - Add `AuditSerdeHelper` for a single implementation of serialization/deserialization of audit payload and other utility methods. --- .../endpoint/BasicAuthenticatorResource.java | 56 ++- .../endpoint/BasicAuthorizerResource.java | 115 ++++- ...dinatorBasicAuthenticatorResourceTest.java | 112 +++-- ...oordinatorBasicAuthorizerResourceTest.java | 10 +- .../BasicAuthenticatorResourceTest.java | 2 +- .../endpoint/BasicAuthorizerResourceTest.java | 2 +- .../mysql/MySQLMetadataStorageModuleTest.java | 2 + .../overlord/http/OverlordResource.java | 47 +- .../overlord/http/OverlordResourceTest.java | 57 ++- .../indexing/overlord/http/OverlordTest.java | 15 +- .../ITBasicAuthConfigurationTest.java | 135 ++--- .../org/apache/druid/audit/AuditEntry.java | 158 ++++-- .../org/apache/druid/audit/AuditInfo.java | 24 +- .../org/apache/druid/audit/AuditManager.java | 65 +-- .../org/apache/druid/audit/RequestInfo.java | 107 ++++ .../druid/common/config/ConfigSerde.java | 9 - .../common/config/JacksonConfigManager.java | 36 +- .../druid/java/util/common/logger/Logger.java | 5 - .../org/apache/druid/audit/AuditInfoTest.java | 54 +- .../common/config/ConfigManagerTest.java | 8 +- .../config/JacksonConfigManagerTest.java | 76 +-- .../java/util/metrics/StubServiceEmitter.java | 17 + .../guice/SQLMetadataStorageDruidModule.java | 39 +- .../druid/guice/StartupLoggingModule.java | 8 + .../metadata/SQLMetadataRuleManager.java | 6 +- .../SQLMetadataRuleManagerProvider.java | 3 +- .../metadata/SqlSegmentsMetadataQuery.java | 4 +- .../druid/server/audit/AuditLogger.java | 68 +++ ...rProvider.java => AuditManagerConfig.java} | 19 +- .../druid/server/audit/AuditSerdeHelper.java | 148 ++++++ .../server/audit/LoggingAuditManager.java | 50 +- .../audit/LoggingAuditManagerConfig.java | 82 +++ .../druid/server/audit/SQLAuditManager.java | 145 +++--- .../server/audit/SQLAuditManagerConfig.java | 40 +- .../server/audit/SQLAuditManagerProvider.java | 86 ---- .../duty/KillCompactionConfig.java | 1 + .../CoordinatorCompactionConfigsResource.java | 17 +- .../CoordinatorDynamicConfigsResource.java | 8 +- .../server/http/DataSourcesResource.java | 51 +- .../http/LookupCoordinatorResource.java | 20 +- .../druid/server/http/RulesResource.java | 7 +- .../cache/LookupCoordinatorManager.java | 2 +- .../server/security/AuthorizationUtils.java | 55 ++ .../metadata/SQLMetadataRuleManagerTest.java | 19 +- .../server/audit/AuditManagerConfigTest.java | 142 ++++++ .../server/audit/SQLAuditManagerTest.java | 473 +++++++----------- .../CoordinatorSimulationBuilder.java | 2 +- ...rdinatorCompactionConfigsResourceTest.java | 26 - .../server/http/DataSourcesResourceTest.java | 170 ++++--- .../http/LookupCoordinatorResourceTest.java | 187 ++----- .../druid/server/http/RulesResourceTest.java | 108 ++-- .../cache/LookupCoordinatorManagerTest.java | 15 +- .../org/apache/druid/cli/CliCoordinator.java | 6 - .../org/apache/druid/cli/CliOverlord.java | 6 - 54 files changed, 1830 insertions(+), 1295 deletions(-) create mode 100644 processing/src/main/java/org/apache/druid/audit/RequestInfo.java create mode 100644 server/src/main/java/org/apache/druid/server/audit/AuditLogger.java rename server/src/main/java/org/apache/druid/server/audit/{AuditManagerProvider.java => AuditManagerConfig.java} (60%) create mode 100644 server/src/main/java/org/apache/druid/server/audit/AuditSerdeHelper.java rename processing/src/test/java/org/apache/druid/audit/NoopAuditManager.java => server/src/main/java/org/apache/druid/server/audit/LoggingAuditManager.java (50%) create mode 100644 server/src/main/java/org/apache/druid/server/audit/LoggingAuditManagerConfig.java delete mode 100644 server/src/main/java/org/apache/druid/server/audit/SQLAuditManagerProvider.java create mode 100644 server/src/test/java/org/apache/druid/server/audit/AuditManagerConfigTest.java diff --git a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/endpoint/BasicAuthenticatorResource.java b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/endpoint/BasicAuthenticatorResource.java index 23d8316771c4..fd0b029530c0 100644 --- a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/endpoint/BasicAuthenticatorResource.java +++ b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/endpoint/BasicAuthenticatorResource.java @@ -22,10 +22,14 @@ import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; import com.google.inject.Inject; import com.sun.jersey.spi.container.ResourceFilters; +import org.apache.druid.audit.AuditEntry; +import org.apache.druid.audit.AuditManager; import org.apache.druid.guice.LazySingleton; +import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.security.basic.BasicSecurityResourceFilter; import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorCredentialUpdate; import org.apache.druid.server.security.AuthValidator; +import org.apache.druid.server.security.AuthorizationUtils; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; @@ -45,15 +49,18 @@ public class BasicAuthenticatorResource { private final BasicAuthenticatorResourceHandler handler; private final AuthValidator authValidator; + private final AuditManager auditManager; @Inject public BasicAuthenticatorResource( BasicAuthenticatorResourceHandler handler, - AuthValidator authValidator + AuthValidator authValidator, + AuditManager auditManager ) { this.handler = handler; this.authValidator = authValidator; + this.auditManager = auditManager; } /** @@ -151,7 +158,11 @@ public Response createUser( ) { authValidator.validateAuthenticatorName(authenticatorName); - return handler.createUser(authenticatorName, userName); + + final Response response = handler.createUser(authenticatorName, userName); + performAuditIfSuccess(authenticatorName, req, response, "Create user[%s]", userName); + + return response; } /** @@ -174,7 +185,10 @@ public Response deleteUser( ) { authValidator.validateAuthenticatorName(authenticatorName); - return handler.deleteUser(authenticatorName, userName); + final Response response = handler.deleteUser(authenticatorName, userName); + performAuditIfSuccess(authenticatorName, req, response, "Delete user[%s]", userName); + + return response; } /** @@ -198,7 +212,10 @@ public Response updateUserCredentials( ) { authValidator.validateAuthenticatorName(authenticatorName); - return handler.updateUserCredentials(authenticatorName, userName, update); + final Response response = handler.updateUserCredentials(authenticatorName, userName, update); + performAuditIfSuccess(authenticatorName, req, response, "Update credentials for user[%s]", userName); + + return response; } /** @@ -237,4 +254,35 @@ public Response authenticatorUpdateListener( authValidator.validateAuthenticatorName(authenticatorName); return handler.authenticatorUserUpdateListener(authenticatorName, serializedUserMap); } + + private boolean isSuccess(Response response) + { + if (response == null) { + return false; + } + + int responseCode = response.getStatus(); + return responseCode >= 200 && responseCode < 300; + } + + private void performAuditIfSuccess( + String authenticatorName, + HttpServletRequest request, + Response response, + String payloadFormat, + Object... payloadArgs + ) + { + if (isSuccess(response)) { + auditManager.doAudit( + AuditEntry.builder() + .key(authenticatorName) + .type("basic.authenticator") + .auditInfo(AuthorizationUtils.buildAuditInfo(request)) + .request(AuthorizationUtils.buildRequestInfo("coordinator", request)) + .payload(StringUtils.format(payloadFormat, payloadArgs)) + .build() + ); + } + } } diff --git a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/endpoint/BasicAuthorizerResource.java b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/endpoint/BasicAuthorizerResource.java index cb8a9fa2a240..700d86b3b040 100644 --- a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/endpoint/BasicAuthorizerResource.java +++ b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/endpoint/BasicAuthorizerResource.java @@ -22,10 +22,14 @@ import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; import com.google.inject.Inject; import com.sun.jersey.spi.container.ResourceFilters; +import org.apache.druid.audit.AuditEntry; +import org.apache.druid.audit.AuditManager; import org.apache.druid.guice.LazySingleton; +import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.security.basic.BasicSecurityResourceFilter; import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerGroupMapping; import org.apache.druid.server.security.AuthValidator; +import org.apache.druid.server.security.AuthorizationUtils; import org.apache.druid.server.security.ResourceAction; import javax.servlet.http.HttpServletRequest; @@ -48,15 +52,18 @@ public class BasicAuthorizerResource { private final BasicAuthorizerResourceHandler resourceHandler; private final AuthValidator authValidator; + private final AuditManager auditManager; @Inject public BasicAuthorizerResource( BasicAuthorizerResourceHandler resourceHandler, - AuthValidator authValidator + AuthValidator authValidator, + AuditManager auditManager ) { this.resourceHandler = resourceHandler; this.authValidator = authValidator; + this.auditManager = auditManager; } /** @@ -198,7 +205,11 @@ public Response createUser( ) { authValidator.validateAuthorizerName(authorizerName); - return resourceHandler.createUser(authorizerName, userName); + + final Response response = resourceHandler.createUser(authorizerName, userName); + performAuditIfSuccess(authorizerName, req, response, "Create user[%s]", userName); + + return response; } /** @@ -221,7 +232,11 @@ public Response deleteUser( ) { authValidator.validateAuthorizerName(authorizerName); - return resourceHandler.deleteUser(authorizerName, userName); + + final Response response = resourceHandler.deleteUser(authorizerName, userName); + performAuditIfSuccess(authorizerName, req, response, "Delete user[%s]", userName); + + return response; } /** @@ -245,10 +260,21 @@ public Response createGroupMapping( ) { authValidator.validateAuthorizerName(authorizerName); - return resourceHandler.createGroupMapping( + final Response response = resourceHandler.createGroupMapping( authorizerName, new BasicAuthorizerGroupMapping(groupMappingName, groupMapping.getGroupPattern(), groupMapping.getRoles()) ); + performAuditIfSuccess( + authorizerName, + req, + response, + "Create groupMapping[%s] with pattern[%s], roles[%s]", + groupMappingName, + groupMapping.getGroupPattern(), + groupMapping.getRoles() + ); + + return response; } /** @@ -271,7 +297,11 @@ public Response deleteGroupMapping( ) { authValidator.validateAuthorizerName(authorizerName); - return resourceHandler.deleteGroupMapping(authorizerName, groupMappingName); + + final Response response = resourceHandler.deleteGroupMapping(authorizerName, groupMappingName); + performAuditIfSuccess(authorizerName, req, response, "Delete groupMapping[%s]", groupMappingName); + + return response; } /** @@ -338,7 +368,11 @@ public Response createRole( ) { authValidator.validateAuthorizerName(authorizerName); - return resourceHandler.createRole(authorizerName, roleName); + + final Response response = resourceHandler.createRole(authorizerName, roleName); + performAuditIfSuccess(authorizerName, req, response, "Create role[%s]", roleName); + + return response; } /** @@ -361,7 +395,11 @@ public Response deleteRole( ) { authValidator.validateAuthorizerName(authorizerName); - return resourceHandler.deleteRole(authorizerName, roleName); + + final Response response = resourceHandler.deleteRole(authorizerName, roleName); + performAuditIfSuccess(authorizerName, req, response, "Delete role[%s]", roleName); + + return response; } /** @@ -386,7 +424,11 @@ public Response assignRoleToUser( ) { authValidator.validateAuthorizerName(authorizerName); - return resourceHandler.assignRoleToUser(authorizerName, userName, roleName); + + final Response response = resourceHandler.assignRoleToUser(authorizerName, userName, roleName); + performAuditIfSuccess(authorizerName, req, response, "Assign role[%s] to user[%s]", roleName, userName); + + return response; } /** @@ -411,7 +453,11 @@ public Response unassignRoleFromUser( ) { authValidator.validateAuthorizerName(authorizerName); - return resourceHandler.unassignRoleFromUser(authorizerName, userName, roleName); + + final Response response = resourceHandler.unassignRoleFromUser(authorizerName, userName, roleName); + performAuditIfSuccess(authorizerName, req, response, "Unassign role[%s] from user[%s]", roleName, userName); + + return response; } /** @@ -436,7 +482,12 @@ public Response assignRoleToGroupMapping( ) { authValidator.validateAuthorizerName(authorizerName); - return resourceHandler.assignRoleToGroupMapping(authorizerName, groupMappingName, roleName); + final Response response = resourceHandler.assignRoleToGroupMapping(authorizerName, groupMappingName, roleName); + + String msgFormat = "Assign role[%s] to groupMapping[%s]"; + performAuditIfSuccess(authorizerName, req, response, msgFormat, roleName, groupMappingName); + + return response; } /** @@ -461,7 +512,12 @@ public Response unassignRoleFromGroupMapping( ) { authValidator.validateAuthorizerName(authorizerName); - return resourceHandler.unassignRoleFromGroupMapping(authorizerName, groupMappingName, roleName); + + final Response response = resourceHandler.unassignRoleFromGroupMapping(authorizerName, groupMappingName, roleName); + String msgFormat = "Unassign role[%s] from groupMapping[%s]"; + performAuditIfSuccess(authorizerName, req, response, msgFormat, roleName, groupMappingName); + + return response; } /** @@ -486,7 +542,11 @@ public Response setRolePermissions( ) { authValidator.validateAuthorizerName(authorizerName); - return resourceHandler.setRolePermissions(authorizerName, roleName, permissions); + + final Response response = resourceHandler.setRolePermissions(authorizerName, roleName, permissions); + performAuditIfSuccess(authorizerName, req, response, "Set permissions[%s] for role[%s]", permissions, roleName); + + return response; } /** @@ -607,4 +667,35 @@ public Response authorizerGroupMappingUpdateListener( authValidator.validateAuthorizerName(authorizerName); return resourceHandler.authorizerGroupMappingUpdateListener(authorizerName, serializedGroupMappingAndRoleMap); } + + private boolean isSuccess(Response response) + { + if (response == null) { + return false; + } + + int responseCode = response.getStatus(); + return responseCode >= 200 && responseCode < 300; + } + + private void performAuditIfSuccess( + String authorizerName, + HttpServletRequest request, + Response response, + String msgFormat, + Object... args + ) + { + if (isSuccess(response)) { + auditManager.doAudit( + AuditEntry.builder() + .key(authorizerName) + .type("basic.authorizer") + .auditInfo(AuthorizationUtils.buildAuditInfo(request)) + .request(AuthorizationUtils.buildRequestInfo("coordinator", request)) + .payload(StringUtils.format(msgFormat, args)) + .build() + ); + } + } } diff --git a/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/authentication/CoordinatorBasicAuthenticatorResourceTest.java b/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/authentication/CoordinatorBasicAuthenticatorResourceTest.java index b383a6d3d92f..5dec8646573b 100644 --- a/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/authentication/CoordinatorBasicAuthenticatorResourceTest.java +++ b/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/authentication/CoordinatorBasicAuthenticatorResourceTest.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import org.apache.druid.audit.AuditManager; import org.apache.druid.metadata.DefaultPasswordProvider; import org.apache.druid.metadata.MetadataStorageTablesConfig; import org.apache.druid.metadata.TestDerbyConnector; @@ -35,7 +36,9 @@ import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorCredentialUpdate; import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorCredentials; import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorUser; +import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthValidator; +import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.AuthenticatorMapper; import org.easymock.EasyMock; import org.junit.After; @@ -50,6 +53,7 @@ import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.Response; +import java.util.Collections; import java.util.Map; import java.util.Set; @@ -68,6 +72,9 @@ public class CoordinatorBasicAuthenticatorResourceTest @Mock private AuthValidator authValidator; + + @Mock + private AuditManager auditManager; private BasicAuthenticatorResource resource; private CoordinatorBasicAuthenticatorMetadataStorageUpdater storageUpdater; private HttpServletRequest req; @@ -77,6 +84,13 @@ public class CoordinatorBasicAuthenticatorResourceTest public void setUp() { req = EasyMock.createStrictMock(HttpServletRequest.class); + EasyMock.expect(req.getHeader(AuditManager.X_DRUID_AUTHOR)).andReturn("author").anyTimes(); + EasyMock.expect(req.getHeader(AuditManager.X_DRUID_COMMENT)).andReturn("comment").anyTimes(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn( + new AuthenticationResult("id", "authorizer", "authBy", Collections.emptyMap()) + ).anyTimes(); + EasyMock.expect(req.getRemoteAddr()).andReturn("127.0.0.1").anyTimes(); + EasyMock.replay(req); objectMapper = new ObjectMapper(new SmileFactory()); TestDerbyConnector connector = derbyConnectorRule.getConnector(); @@ -145,7 +159,8 @@ public void setUp() authenticatorMapper, objectMapper ), - authValidator + authValidator, + auditManager ); storageUpdater.start(); @@ -155,12 +170,15 @@ public void setUp() public void tearDown() { storageUpdater.stop(); + if (req != null) { + EasyMock.verify(req); + } } @Test public void testInvalidAuthenticator() { - Response response = resource.getAllUsers(req, "invalidName"); + Response response = resource.getAllUsers(mockHttpRequestNoAudit(), "invalidName"); Assert.assertEquals(400, response.getStatus()); Assert.assertEquals( errorMapWithMsg("Basic authenticator with name [invalidName] does not exist."), @@ -171,13 +189,13 @@ public void testInvalidAuthenticator() @Test public void testGetAllUsers() { - Response response = resource.getAllUsers(req, AUTHENTICATOR_NAME); + Response response = resource.getAllUsers(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(ImmutableSet.of(BasicAuthUtils.ADMIN_NAME, BasicAuthUtils.INTERNAL_USER_NAME), response.getEntity()); - resource.createUser(req, AUTHENTICATOR_NAME, "druid"); - resource.createUser(req, AUTHENTICATOR_NAME, "druid2"); - resource.createUser(req, AUTHENTICATOR_NAME, "druid3"); + resource.createUser(mockHttpRequest(), AUTHENTICATOR_NAME, "druid"); + resource.createUser(mockHttpRequest(), AUTHENTICATOR_NAME, "druid2"); + resource.createUser(mockHttpRequest(), AUTHENTICATOR_NAME, "druid3"); Set expectedUsers = ImmutableSet.of( BasicAuthUtils.ADMIN_NAME, @@ -187,12 +205,12 @@ public void testGetAllUsers() "druid3" ); - response = resource.getAllUsers(req, AUTHENTICATOR_NAME); + response = resource.getAllUsers(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUsers, response.getEntity()); // Verify cached user map is also getting updated - response = resource.getCachedSerializedUserMap(req, AUTHENTICATOR_NAME); + response = resource.getCachedSerializedUserMap(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertTrue(response.getEntity() instanceof byte[]); Map cachedUserMap = BasicAuthUtils.deserializeAuthenticatorUserMap(objectMapper, (byte[]) response.getEntity()); @@ -211,17 +229,17 @@ public void testGetAllUsers() @Test public void testGetAllUsersSeparateDatabaseTables() { - Response response = resource.getAllUsers(req, AUTHENTICATOR_NAME); + Response response = resource.getAllUsers(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(ImmutableSet.of(BasicAuthUtils.ADMIN_NAME, BasicAuthUtils.INTERNAL_USER_NAME), response.getEntity()); - resource.createUser(req, AUTHENTICATOR_NAME, "druid"); - resource.createUser(req, AUTHENTICATOR_NAME, "druid2"); - resource.createUser(req, AUTHENTICATOR_NAME, "druid3"); + resource.createUser(mockHttpRequest(), AUTHENTICATOR_NAME, "druid"); + resource.createUser(mockHttpRequest(), AUTHENTICATOR_NAME, "druid2"); + resource.createUser(mockHttpRequest(), AUTHENTICATOR_NAME, "druid3"); - resource.createUser(req, AUTHENTICATOR_NAME2, "druid4"); - resource.createUser(req, AUTHENTICATOR_NAME2, "druid5"); - resource.createUser(req, AUTHENTICATOR_NAME2, "druid6"); + resource.createUser(mockHttpRequest(), AUTHENTICATOR_NAME2, "druid4"); + resource.createUser(mockHttpRequest(), AUTHENTICATOR_NAME2, "druid5"); + resource.createUser(mockHttpRequest(), AUTHENTICATOR_NAME2, "druid6"); Set expectedUsers = ImmutableSet.of( BasicAuthUtils.ADMIN_NAME, @@ -239,12 +257,12 @@ public void testGetAllUsersSeparateDatabaseTables() "druid6" ); - response = resource.getAllUsers(req, AUTHENTICATOR_NAME); + response = resource.getAllUsers(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUsers, response.getEntity()); // Verify cached user map for AUTHENTICATOR_NAME authenticator is also getting updated - response = resource.getCachedSerializedUserMap(req, AUTHENTICATOR_NAME); + response = resource.getCachedSerializedUserMap(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertTrue(response.getEntity() instanceof byte[]); @@ -260,12 +278,12 @@ public void testGetAllUsersSeparateDatabaseTables() Assert.assertNotNull(cachedUserMap.get("druid3")); Assert.assertEquals(cachedUserMap.get("druid3").getName(), "druid3"); - response = resource.getAllUsers(req, AUTHENTICATOR_NAME2); + response = resource.getAllUsers(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME2); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUsers2, response.getEntity()); // Verify cached user map for each AUTHENTICATOR_NAME2 is also getting updated - response = resource.getCachedSerializedUserMap(req, AUTHENTICATOR_NAME2); + response = resource.getCachedSerializedUserMap(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME2); Assert.assertEquals(200, response.getStatus()); Assert.assertTrue(response.getEntity() instanceof byte[]); @@ -285,29 +303,29 @@ public void testGetAllUsersSeparateDatabaseTables() @Test public void testCreateDeleteUser() { - Response response = resource.createUser(req, AUTHENTICATOR_NAME, "druid"); + Response response = resource.createUser(mockHttpRequest(), AUTHENTICATOR_NAME, "druid"); Assert.assertEquals(200, response.getStatus()); - response = resource.getUser(req, AUTHENTICATOR_NAME, "druid"); + response = resource.getUser(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME, "druid"); Assert.assertEquals(200, response.getStatus()); BasicAuthenticatorUser expectedUser = new BasicAuthenticatorUser("druid", null); Assert.assertEquals(expectedUser, response.getEntity()); - response = resource.deleteUser(req, AUTHENTICATOR_NAME, "druid"); + response = resource.deleteUser(mockHttpRequest(), AUTHENTICATOR_NAME, "druid"); Assert.assertEquals(200, response.getStatus()); - response = resource.getCachedSerializedUserMap(req, AUTHENTICATOR_NAME); + response = resource.getCachedSerializedUserMap(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertTrue(response.getEntity() instanceof byte[]); Map cachedUserMap = BasicAuthUtils.deserializeAuthenticatorUserMap(objectMapper, (byte[]) response.getEntity()); Assert.assertNotNull(cachedUserMap); Assert.assertNull(cachedUserMap.get("druid")); - response = resource.deleteUser(req, AUTHENTICATOR_NAME, "druid"); + response = resource.deleteUser(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME, "druid"); Assert.assertEquals(400, response.getStatus()); Assert.assertEquals(errorMapWithMsg("User [druid] does not exist."), response.getEntity()); - response = resource.getUser(req, AUTHENTICATOR_NAME, "druid"); + response = resource.getUser(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME, "druid"); Assert.assertEquals(400, response.getStatus()); Assert.assertEquals(errorMapWithMsg("User [druid] does not exist."), response.getEntity()); } @@ -315,18 +333,18 @@ public void testCreateDeleteUser() @Test public void testUserCredentials() { - Response response = resource.createUser(req, AUTHENTICATOR_NAME, "druid"); + Response response = resource.createUser(mockHttpRequest(), AUTHENTICATOR_NAME, "druid"); Assert.assertEquals(200, response.getStatus()); response = resource.updateUserCredentials( - req, + mockHttpRequest(), AUTHENTICATOR_NAME, "druid", new BasicAuthenticatorCredentialUpdate("helloworld", null) ); Assert.assertEquals(200, response.getStatus()); - response = resource.getUser(req, AUTHENTICATOR_NAME, "druid"); + response = resource.getUser(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME, "druid"); Assert.assertEquals(200, response.getStatus()); BasicAuthenticatorUser actualUser = (BasicAuthenticatorUser) response.getEntity(); Assert.assertEquals("druid", actualUser.getName()); @@ -346,7 +364,7 @@ public void testUserCredentials() ); Assert.assertArrayEquals(recalculatedHash, hash); - response = resource.getCachedSerializedUserMap(req, AUTHENTICATOR_NAME); + response = resource.getCachedSerializedUserMap(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertTrue(response.getEntity() instanceof byte[]); Map cachedUserMap = BasicAuthUtils.deserializeAuthenticatorUserMap(objectMapper, (byte[]) response.getEntity()); @@ -369,15 +387,15 @@ public void testUserCredentials() ); Assert.assertArrayEquals(recalculatedHash, hash); - response = resource.deleteUser(req, AUTHENTICATOR_NAME, "druid"); + response = resource.deleteUser(mockHttpRequest(), AUTHENTICATOR_NAME, "druid"); Assert.assertEquals(200, response.getStatus()); - response = resource.getUser(req, AUTHENTICATOR_NAME, "druid"); + response = resource.getUser(mockHttpRequestNoAudit(), AUTHENTICATOR_NAME, "druid"); Assert.assertEquals(400, response.getStatus()); Assert.assertEquals(errorMapWithMsg("User [druid] does not exist."), response.getEntity()); response = resource.updateUserCredentials( - req, + mockHttpRequestNoAudit(), AUTHENTICATOR_NAME, "druid", new BasicAuthenticatorCredentialUpdate("helloworld", null) @@ -386,6 +404,36 @@ public void testUserCredentials() Assert.assertEquals(errorMapWithMsg("User [druid] does not exist."), response.getEntity()); } + private HttpServletRequest mockHttpRequestNoAudit() + { + if (req != null) { + EasyMock.verify(req); + } + req = EasyMock.createStrictMock(HttpServletRequest.class); + EasyMock.replay(req); + return req; + } + + private HttpServletRequest mockHttpRequest() + { + if (req != null) { + EasyMock.verify(req); + } + req = EasyMock.createStrictMock(HttpServletRequest.class); + EasyMock.expect(req.getHeader(AuditManager.X_DRUID_AUTHOR)).andReturn("author").once(); + EasyMock.expect(req.getHeader(AuditManager.X_DRUID_COMMENT)).andReturn("comment").once(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn( + new AuthenticationResult("id", "authorizer", "authBy", Collections.emptyMap()) + ).once(); + EasyMock.expect(req.getRemoteAddr()).andReturn("127.0.0.1").once(); + EasyMock.expect(req.getMethod()).andReturn("GET").once(); + EasyMock.expect(req.getRequestURI()).andReturn("uri").once(); + EasyMock.expect(req.getQueryString()).andReturn("a=b").once(); + EasyMock.replay(req); + + return req; + } + private static Map errorMapWithMsg(String errorMsg) { return ImmutableMap.of("error", errorMsg); diff --git a/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/authorization/CoordinatorBasicAuthorizerResourceTest.java b/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/authorization/CoordinatorBasicAuthorizerResourceTest.java index 746d3339f974..225424daf4e2 100644 --- a/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/authorization/CoordinatorBasicAuthorizerResourceTest.java +++ b/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/authorization/CoordinatorBasicAuthorizerResourceTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; +import org.apache.druid.audit.AuditManager; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.metadata.MetadataStorageTablesConfig; @@ -55,7 +56,6 @@ import org.junit.Before; import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; @@ -78,15 +78,14 @@ public class CoordinatorBasicAuthorizerResourceTest private static final String AUTHORIZER_NAME2 = "test2"; private static final String AUTHORIZER_NAME3 = "test3"; - @Rule - public ExpectedException expectedException = ExpectedException.none(); - @Rule public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule(); @Mock private AuthValidator authValidator; @Mock private HttpServletRequest req; + @Mock + private AuditManager auditManager; private TestDerbyConnector connector; private MetadataStorageTablesConfig tablesConfig; @@ -154,7 +153,8 @@ public void setUp() authorizerMapper, new ObjectMapper(new SmileFactory()) ), - authValidator + authValidator, + auditManager ); storageUpdater.start(); diff --git a/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authentication/endpoint/BasicAuthenticatorResourceTest.java b/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authentication/endpoint/BasicAuthenticatorResourceTest.java index fc403b08ca30..6545b880a065 100644 --- a/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authentication/endpoint/BasicAuthenticatorResourceTest.java +++ b/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authentication/endpoint/BasicAuthenticatorResourceTest.java @@ -58,7 +58,7 @@ public void setUp() .when(authValidator) .validateAuthenticatorName(INVALID_AUTHENTICATOR_NAME); - target = new BasicAuthenticatorResource(handler, authValidator); + target = new BasicAuthenticatorResource(handler, authValidator, null); } @Test diff --git a/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authorization/endpoint/BasicAuthorizerResourceTest.java b/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authorization/endpoint/BasicAuthorizerResourceTest.java index de0bf1db4e32..6350d3f91399 100644 --- a/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authorization/endpoint/BasicAuthorizerResourceTest.java +++ b/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authorization/endpoint/BasicAuthorizerResourceTest.java @@ -68,7 +68,7 @@ public void setUp() .when(authValidator) .validateAuthorizerName(INVALID_AUTHORIZER_NAME); - target = new BasicAuthorizerResource(resourceHandler, authValidator); + target = new BasicAuthorizerResource(resourceHandler, authValidator, null); } @Test diff --git a/extensions-core/mysql-metadata-storage/src/test/java/org/apache/druid/metadata/storage/mysql/MySQLMetadataStorageModuleTest.java b/extensions-core/mysql-metadata-storage/src/test/java/org/apache/druid/metadata/storage/mysql/MySQLMetadataStorageModuleTest.java index 0e2d6c359b5b..b1c4922ea643 100644 --- a/extensions-core/mysql-metadata-storage/src/test/java/org/apache/druid/metadata/storage/mysql/MySQLMetadataStorageModuleTest.java +++ b/extensions-core/mysql-metadata-storage/src/test/java/org/apache/druid/metadata/storage/mysql/MySQLMetadataStorageModuleTest.java @@ -32,6 +32,7 @@ import org.apache.druid.guice.LifecycleModule; import org.apache.druid.guice.MetadataConfigModule; import org.apache.druid.guice.annotations.Json; +import org.apache.druid.guice.security.EscalatorModule; import org.apache.druid.java.util.emitter.core.NoopEmitter; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.junit.Assert; @@ -111,6 +112,7 @@ private Injector createInjector() MySQLMetadataStorageModule module = new MySQLMetadataStorageModule(); Injector injector = GuiceInjectors.makeStartupInjectorWithModules( ImmutableList.of( + new EscalatorModule(), new MetadataConfigModule(), new LifecycleModule(), module, diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordResource.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordResource.java index 01af55eadb31..7afaaa905264 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordResource.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordResource.java @@ -24,12 +24,12 @@ import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.inject.Inject; import com.sun.jersey.spi.container.ResourceFilters; import org.apache.druid.audit.AuditEntry; -import org.apache.druid.audit.AuditInfo; import org.apache.druid.audit.AuditManager; import org.apache.druid.client.indexing.ClientTaskQuery; import org.apache.druid.common.config.ConfigManager.SetResult; @@ -37,6 +37,7 @@ import org.apache.druid.error.DruidException; import org.apache.druid.error.ErrorResponse; import org.apache.druid.indexer.RunnerTaskState; +import org.apache.druid.indexer.TaskIdentifier; import org.apache.druid.indexer.TaskInfo; import org.apache.druid.indexer.TaskLocation; import org.apache.druid.indexer.TaskStatus; @@ -93,7 +94,6 @@ import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; -import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; @@ -139,6 +139,8 @@ public class OverlordResource private AtomicReference workerConfigRef = null; private static final List API_TASK_STATES = ImmutableList.of("pending", "waiting", "running", "complete"); + private static final Set AUDITED_TASK_TYPES + = ImmutableSet.of("index", "index_parallel", "compact", "index_hadoop", "kill"); private enum TaskStateLookup { @@ -193,7 +195,10 @@ public OverlordResource( @Path("/task") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) - public Response taskPost(final Task task, @Context final HttpServletRequest req) + public Response taskPost( + final Task task, + @Context final HttpServletRequest req + ) { final Set resourceActions; try { @@ -201,13 +206,8 @@ public Response taskPost(final Task task, @Context final HttpServletRequest req) } catch (UOE e) { return Response.status(Response.Status.BAD_REQUEST) - .entity( - ImmutableMap.of( - "error", - e.getMessage() - ) - ) - .build(); + .entity(ImmutableMap.of("error", e.getMessage())) + .build(); } Access authResult = AuthorizationUtils.authorizeAllResourceActions( @@ -225,18 +225,31 @@ public Response taskPost(final Task task, @Context final HttpServletRequest req) taskQueue -> { try { taskQueue.add(task); + + if (AUDITED_TASK_TYPES.contains(task.getType())) { + auditManager.doAudit( + AuditEntry.builder() + .key(task.getDataSource()) + .type("task") + .request(AuthorizationUtils.buildRequestInfo("overlord", req)) + .payload(new TaskIdentifier(task.getId(), task.getGroupId(), task.getType())) + .auditInfo(AuthorizationUtils.buildAuditInfo(req)) + .build() + ); + } + return Response.ok(ImmutableMap.of("task", task.getId())).build(); } catch (DruidException e) { return Response - .status(e.getStatusCode()) - .entity(new ErrorResponse(e)) - .build(); + .status(e.getStatusCode()) + .entity(new ErrorResponse(e)) + .build(); } catch (org.apache.druid.common.exception.DruidException e) { return Response.status(e.getResponseCode()) - .entity(ImmutableMap.of("error", e.getMessage())) - .build(); + .entity(ImmutableMap.of("error", e.getMessage())) + .build(); } } ); @@ -541,15 +554,13 @@ public Response getTotalWorkerCapacity() @ResourceFilters(ConfigResourceFilter.class) public Response setWorkerConfig( final WorkerBehaviorConfig workerBehaviorConfig, - @HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author, - @HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment, @Context final HttpServletRequest req ) { final SetResult setResult = configManager.set( WorkerBehaviorConfig.CONFIG_KEY, workerBehaviorConfig, - new AuditInfo(author, comment, req.getRemoteAddr()) + AuthorizationUtils.buildAuditInfo(req) ); if (setResult.isOk()) { log.info("Updating Worker configs: %s", workerBehaviorConfig); diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/OverlordResourceTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/OverlordResourceTest.java index 6eed9e32df38..561e8dd9b199 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/OverlordResourceTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/OverlordResourceTest.java @@ -25,6 +25,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import org.apache.druid.audit.AuditEntry; +import org.apache.druid.audit.AuditManager; import org.apache.druid.common.config.JacksonConfigManager; import org.apache.druid.indexer.RunnerTaskState; import org.apache.druid.indexer.TaskInfo; @@ -32,6 +34,7 @@ import org.apache.druid.indexer.TaskState; import org.apache.druid.indexer.TaskStatus; import org.apache.druid.indexer.TaskStatusPlus; +import org.apache.druid.indexing.common.task.KillUnusedSegmentsTask; import org.apache.druid.indexing.common.task.NoopTask; import org.apache.druid.indexing.common.task.Task; import org.apache.druid.indexing.overlord.ImmutableWorkerInfo; @@ -67,6 +70,7 @@ import org.apache.druid.server.security.Resource; import org.apache.druid.server.security.ResourceAction; import org.apache.druid.server.security.ResourceType; +import org.easymock.Capture; import org.easymock.EasyMock; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.joda.time.DateTime; @@ -104,7 +108,9 @@ public class OverlordResourceTest private IndexerMetadataStorageAdapter indexerMetadataStorageAdapter; private HttpServletRequest req; private TaskRunner taskRunner; + private TaskQueue taskQueue; private WorkerTaskRunnerQueryAdapter workerTaskRunnerQueryAdapter; + private AuditManager auditManager; @Rule public ExpectedException expectedException = ExpectedException.none(); @@ -113,6 +119,7 @@ public class OverlordResourceTest public void setUp() { taskRunner = EasyMock.createMock(TaskRunner.class); + taskQueue = EasyMock.createMock(TaskQueue.class); configManager = EasyMock.createMock(JacksonConfigManager.class); provisioningStrategy = EasyMock.createMock(ProvisioningStrategy.class); authConfig = EasyMock.createMock(AuthConfig.class); @@ -121,6 +128,7 @@ public void setUp() indexerMetadataStorageAdapter = EasyMock.createStrictMock(IndexerMetadataStorageAdapter.class); req = EasyMock.createStrictMock(HttpServletRequest.class); workerTaskRunnerQueryAdapter = EasyMock.createStrictMock(WorkerTaskRunnerQueryAdapter.class); + auditManager = EasyMock.createMock(AuditManager.class); EasyMock.expect(taskMaster.getTaskRunner()).andReturn( Optional.of(taskRunner) @@ -165,7 +173,7 @@ public Access authorize(AuthenticationResult authenticationResult, Resource reso indexerMetadataStorageAdapter, null, configManager, - null, + auditManager, authMapper, workerTaskRunnerQueryAdapter, provisioningStrategy, @@ -880,6 +888,53 @@ public void testSecuredTaskPost() overlordResource.taskPost(task, req); } + @Test + public void testKillTaskIsAudited() + { + EasyMock.expect(authConfig.isEnableInputSourceSecurity()).andReturn(false); + + final String username = Users.DRUID; + expectAuthorizationTokenCheck(username); + EasyMock.expect(req.getMethod()).andReturn("POST").once(); + EasyMock.expect(req.getRequestURI()).andReturn("/indexer/v2/task").once(); + EasyMock.expect(req.getQueryString()).andReturn("").once(); + EasyMock.expect(req.getHeader(AuditManager.X_DRUID_AUTHOR)).andReturn(username).once(); + EasyMock.expect(req.getHeader(AuditManager.X_DRUID_COMMENT)).andReturn("killing segments").once(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(new AuthenticationResult(username, "druid", null, null)) + .once(); + EasyMock.expect(req.getRemoteAddr()).andReturn("127.0.0.1").once(); + + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).once(); + + final Capture auditEntryCapture = EasyMock.newCapture(); + auditManager.doAudit(EasyMock.capture(auditEntryCapture)); + EasyMock.expectLastCall().once(); + + EasyMock.replay( + taskRunner, + taskMaster, + taskQueue, + taskStorageQueryAdapter, + indexerMetadataStorageAdapter, + req, + workerTaskRunnerQueryAdapter, + authConfig, + auditManager + ); + + Task task = new KillUnusedSegmentsTask("kill_all", "allow", Intervals.ETERNITY, null, false, 10, null); + overlordResource.taskPost(task, req); + + Assert.assertTrue(auditEntryCapture.hasCaptured()); + AuditEntry auditEntry = auditEntryCapture.getValue(); + Assert.assertEquals(username, auditEntry.getAuditInfo().getAuthor()); + Assert.assertEquals("killing segments", auditEntry.getAuditInfo().getComment()); + Assert.assertEquals("druid", auditEntry.getAuditInfo().getIdentity()); + Assert.assertEquals("127.0.0.1", auditEntry.getAuditInfo().getIp()); + } + @Test public void testTaskPostDeniesDatasourceReadUser() { diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/OverlordTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/OverlordTest.java index f9ce36df1815..b0df15ce4ef5 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/OverlordTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/OverlordTest.java @@ -31,6 +31,7 @@ import org.apache.curator.retry.RetryOneTime; import org.apache.curator.test.TestingServer; import org.apache.curator.test.Timing; +import org.apache.druid.audit.AuditManager; import org.apache.druid.curator.PotentiallyGzippedCompressionProvider; import org.apache.druid.curator.discovery.LatchableServiceAnnouncer; import org.apache.druid.discovery.DruidLeaderSelector; @@ -148,11 +149,18 @@ private void tearDownServerAndCurator() public void setUp() throws Exception { req = EasyMock.createMock(HttpServletRequest.class); - EasyMock.expect(req.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); - EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).anyTimes(); + EasyMock.expect(req.getHeader(AuditManager.X_DRUID_AUTHOR)).andReturn("author").once(); + EasyMock.expect(req.getHeader(AuditManager.X_DRUID_COMMENT)).andReturn("comment").once(); + EasyMock.expect(req.getRemoteAddr()).andReturn("127.0.0.1").once(); EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn( new AuthenticationResult("druid", "druid", null, null) ).anyTimes(); + EasyMock.expect(req.getMethod()).andReturn("GET").anyTimes(); + EasyMock.expect(req.getRequestURI()).andReturn("/request/uri").anyTimes(); + EasyMock.expect(req.getQueryString()).andReturn("query=string").anyTimes(); + + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).anyTimes(); req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); EasyMock.expectLastCall().anyTimes(); supervisorManager = EasyMock.createMock(SupervisorManager.class); @@ -260,13 +268,14 @@ public void testOverlordRun() throws Exception final TaskStorageQueryAdapter taskStorageQueryAdapter = new TaskStorageQueryAdapter(taskStorage, taskLockbox, taskMaster); final WorkerTaskRunnerQueryAdapter workerTaskRunnerQueryAdapter = new WorkerTaskRunnerQueryAdapter(taskMaster, null); // Test Overlord resource stuff + AuditManager auditManager = EasyMock.createNiceMock(AuditManager.class); overlordResource = new OverlordResource( taskMaster, taskStorageQueryAdapter, new IndexerMetadataStorageAdapter(taskStorageQueryAdapter, null), null, null, - null, + auditManager, AuthTestUtils.TEST_AUTHORIZER_MAPPER, workerTaskRunnerQueryAdapter, null, diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java index 410e4a9202d2..17123c02352f 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java @@ -36,6 +36,7 @@ import org.testng.annotations.Guice; import org.testng.annotations.Test; +import java.io.IOException; import java.util.List; import java.util.Properties; @@ -80,7 +81,6 @@ public void test_druid99User_hasNodeAccess() protected void setupDatasourceOnlyUser() throws Exception { createUserAndRoleWithPermissions( - getHttpClient(User.ADMIN), "datasourceOnlyUser", "helloworld", "datasourceOnlyRole", @@ -92,7 +92,6 @@ protected void setupDatasourceOnlyUser() throws Exception protected void setupDatasourceAndContextParamsUser() throws Exception { createUserAndRoleWithPermissions( - getHttpClient(User.ADMIN), "datasourceAndContextParamsUser", "helloworld", "datasourceAndContextParamsRole", @@ -104,7 +103,6 @@ protected void setupDatasourceAndContextParamsUser() throws Exception protected void setupDatasourceAndSysTableUser() throws Exception { createUserAndRoleWithPermissions( - getHttpClient(User.ADMIN), "datasourceAndSysUser", "helloworld", "datasourceAndSysRole", @@ -116,7 +114,6 @@ protected void setupDatasourceAndSysTableUser() throws Exception protected void setupDatasourceAndSysAndStateUser() throws Exception { createUserAndRoleWithPermissions( - getHttpClient(User.ADMIN), "datasourceWithStateUser", "helloworld", "datasourceWithStateRole", @@ -128,7 +125,6 @@ protected void setupDatasourceAndSysAndStateUser() throws Exception protected void setupSysTableAndStateOnlyUser() throws Exception { createUserAndRoleWithPermissions( - getHttpClient(User.ADMIN), "stateOnlyUser", "helloworld", "stateOnlyRole", @@ -141,7 +137,6 @@ protected void setupTestSpecificHttpClients() throws Exception { // create a new user+role that can read /status createUserAndRoleWithPermissions( - getHttpClient(User.ADMIN), "druid", "helloworld", "druidrole", @@ -150,37 +145,18 @@ protected void setupTestSpecificHttpClients() throws Exception // create 100 users for (int i = 0; i < 100; i++) { - HttpUtil.makeRequest( - getHttpClient(User.ADMIN), - HttpMethod.POST, - config.getCoordinatorUrl() + "/druid-ext/basic-security/authentication/db/basic/users/druid" + i, - null - ); - - HttpUtil.makeRequest( - getHttpClient(User.ADMIN), - HttpMethod.POST, - config.getCoordinatorUrl() + "/druid-ext/basic-security/authorization/db/basic/users/druid" + i, - null - ); - - LOG.info("Finished creating user druid" + i); + final String username = "druid" + i; + postAsAdmin(null, "/authentication/db/basic/users/%s", username); + postAsAdmin(null, "/authorization/db/basic/users/%s", username); + LOG.info("Created user[%s]", username); } // setup the last of 100 users and check that it works - HttpUtil.makeRequest( - getHttpClient(User.ADMIN), - HttpMethod.POST, - config.getCoordinatorUrl() + "/druid-ext/basic-security/authentication/db/basic/users/druid99/credentials", - jsonMapper.writeValueAsBytes(new BasicAuthenticatorCredentialUpdate("helloworld", 5000)) - ); - - HttpUtil.makeRequest( - getHttpClient(User.ADMIN), - HttpMethod.POST, - config.getCoordinatorUrl() + "/druid-ext/basic-security/authorization/db/basic/users/druid99/roles/druidrole", - null + postAsAdmin( + new BasicAuthenticatorCredentialUpdate("helloworld", 5000), + "/authentication/db/basic/users/druid99/credentials" ); + postAsAdmin(null, "/authorization/db/basic/users/druid99/roles/druidrole"); druid99 = new CredentialedHttpClient( new BasicCredentials("druid99", "helloworld"), @@ -231,74 +207,41 @@ protected Properties getAvaticaConnectionPropertiesForUser(User user) } private void createUserAndRoleWithPermissions( - HttpClient adminClient, String user, String password, String role, List permissions ) throws Exception { - HttpUtil.makeRequest( - adminClient, - HttpMethod.POST, - StringUtils.format( - "%s/druid-ext/basic-security/authentication/db/basic/users/%s", - config.getCoordinatorUrl(), - user - ), - null - ); - HttpUtil.makeRequest( - adminClient, - HttpMethod.POST, - StringUtils.format( - "%s/druid-ext/basic-security/authentication/db/basic/users/%s/credentials", - config.getCoordinatorUrl(), - user - ), - jsonMapper.writeValueAsBytes(new BasicAuthenticatorCredentialUpdate(password, 5000)) - ); - HttpUtil.makeRequest( - adminClient, - HttpMethod.POST, - StringUtils.format( - "%s/druid-ext/basic-security/authorization/db/basic/users/%s", - config.getCoordinatorUrl(), - user - ), - null - ); - HttpUtil.makeRequest( - adminClient, - HttpMethod.POST, - StringUtils.format( - "%s/druid-ext/basic-security/authorization/db/basic/roles/%s", - config.getCoordinatorUrl(), - role - ), - null - ); - HttpUtil.makeRequest( - adminClient, - HttpMethod.POST, - StringUtils.format( - "%s/druid-ext/basic-security/authorization/db/basic/users/%s/roles/%s", - config.getCoordinatorUrl(), - user, - role - ), - null - ); - byte[] permissionsBytes = jsonMapper.writeValueAsBytes(permissions); - HttpUtil.makeRequest( - adminClient, - HttpMethod.POST, - StringUtils.format( - "%s/druid-ext/basic-security/authorization/db/basic/roles/%s/permissions", - config.getCoordinatorUrl(), - role - ), - permissionsBytes - ); + // Setup authentication by creating user and password + postAsAdmin(null, "/authentication/db/basic/users/%s", user); + + final BasicAuthenticatorCredentialUpdate credentials + = new BasicAuthenticatorCredentialUpdate(password, 5000); + postAsAdmin(credentials, "/authentication/db/basic/users/%s/credentials", user); + + // Setup authorization by assigning a role to the user + postAsAdmin(null, "/authorization/db/basic/users/%s", user); + postAsAdmin(null, "/authorization/db/basic/roles/%s", role); + postAsAdmin(null, "/authorization/db/basic/users/%s/roles/%s", user, role); + postAsAdmin(permissions, "/authorization/db/basic/roles/%s/permissions", role); + } + + private void postAsAdmin( + Object payload, + String pathFormat, + Object... pathParams + ) throws IOException + { + HttpClient adminClient = getHttpClient(User.ADMIN); + + byte[] payloadBytes = payload == null ? null : jsonMapper.writeValueAsBytes(payload); + String url = getBaseUrl() + StringUtils.format(pathFormat, pathParams); + HttpUtil.makeRequest(adminClient, HttpMethod.POST, url, payloadBytes); + } + + private String getBaseUrl() + { + return config.getCoordinatorUrl() + "/druid-ext/basic-security"; } } diff --git a/processing/src/main/java/org/apache/druid/audit/AuditEntry.java b/processing/src/main/java/org/apache/druid/audit/AuditEntry.java index 2aeb77279953..3a1789ae46db 100644 --- a/processing/src/main/java/org/apache/druid/audit/AuditEntry.java +++ b/processing/src/main/java/org/apache/druid/audit/AuditEntry.java @@ -21,19 +21,25 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Preconditions; +import org.apache.druid.error.DruidException; import org.apache.druid.java.util.common.DateTimes; import org.joda.time.DateTime; +import java.util.Objects; + /** - * An Entry in Audit Table. + * Serializable record of an audit event that can be persisted, logged or sent + * over REST APIs. */ public class AuditEntry { private final String key; private final String type; private final AuditInfo auditInfo; - private final String payload; + private final RequestInfo request; + private final Payload payload; private final DateTime auditTime; @JsonCreator @@ -41,7 +47,8 @@ public AuditEntry( @JsonProperty("key") String key, @JsonProperty("type") String type, @JsonProperty("auditInfo") AuditInfo authorInfo, - @JsonProperty("payload") String payload, + @JsonProperty("request") RequestInfo request, + @JsonProperty("payload") Payload payload, @JsonProperty("auditTime") DateTime auditTime ) { @@ -51,8 +58,9 @@ public AuditEntry( this.key = key; this.type = type; this.auditInfo = authorInfo; + this.request = request; this.auditTime = auditTime == null ? DateTimes.nowUtc() : auditTime; - this.payload = payload; + this.payload = payload == null ? Payload.fromString("") : payload; } @JsonProperty @@ -74,17 +82,26 @@ public AuditInfo getAuditInfo() } /** - * @return returns payload as String - */ + * Details of the REST API request associated with this audit, if any. + */ @JsonProperty - public String getPayload() + public RequestInfo getRequest() + { + return request; + } + + /** + * Non-null payload of the audit event. + */ + @JsonProperty + public Payload getPayload() { return payload; } /** - * @return audit time as DateTime - */ + * @return audit time as DateTime + */ @JsonProperty public DateTime getAuditTime() { @@ -106,36 +123,19 @@ public boolean equals(Object o) return false; } - AuditEntry entry = (AuditEntry) o; - - if (!auditTime.equals(entry.auditTime)) { - return false; - } - if (!auditInfo.equals(entry.auditInfo)) { - return false; - } - if (!key.equals(entry.key)) { - return false; - } - if (!payload.equals(entry.payload)) { - return false; - } - if (!type.equals(entry.type)) { - return false; - } - - return true; + AuditEntry that = (AuditEntry) o; + return Objects.equals(this.auditTime, that.auditTime) + && Objects.equals(this.key, that.key) + && Objects.equals(this.type, that.type) + && Objects.equals(this.auditInfo, that.auditInfo) + && Objects.equals(this.request, that.request) + && Objects.equals(this.payload, that.payload); } @Override public int hashCode() { - int result = key.hashCode(); - result = 31 * result + type.hashCode(); - result = 31 * result + auditInfo.hashCode(); - result = 31 * result + payload.hashCode(); - result = 31 * result + auditTime.hashCode(); - return result; + return Objects.hash(key, type, auditInfo, request, payload, auditTime); } public static class Builder @@ -143,14 +143,14 @@ public static class Builder private String key; private String type; private AuditInfo auditInfo; - private String payload; + private RequestInfo requestInfo; + private Object payload; + private String serializedPayload; + private DateTime auditTime; private Builder() { - this.key = null; - this.auditInfo = null; - this.payload = null; this.auditTime = DateTimes.nowUtc(); } @@ -172,7 +172,19 @@ public Builder auditInfo(AuditInfo auditInfo) return this; } - public Builder payload(String payload) + public Builder request(RequestInfo requestInfo) + { + this.requestInfo = requestInfo; + return this; + } + + public Builder serializedPayload(String serializedPayload) + { + this.serializedPayload = serializedPayload; + return this; + } + + public Builder payload(Object payload) { this.payload = payload; return this; @@ -186,9 +198,73 @@ public Builder auditTime(DateTime auditTime) public AuditEntry build() { - return new AuditEntry(key, type, auditInfo, payload, auditTime); - } + if (payload != null && serializedPayload != null) { + throw DruidException.defensive( + "Either payload[%s] or serializedPayload[%s] must be specified, not both.", + payload, serializedPayload + ); + } + return new AuditEntry(key, type, auditInfo, requestInfo, new Payload(serializedPayload, payload), auditTime); + } } + /** + * Payload of an {@link AuditEntry} that may be specified {@link #raw()} or {@link #serialized()}. + */ + public static class Payload + { + private final String serialized; + private final Object raw; + + @JsonCreator + public static Payload fromString(String serialized) + { + return new Payload(serialized, null); + } + + @JsonValue + @Override + public String toString() + { + return serialized == null ? "" : serialized; + } + + private Payload(String serialized, Object raw) + { + this.serialized = serialized; + this.raw = raw; + } + + public String serialized() + { + return serialized; + } + + public Object raw() + { + return raw; + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + Payload that = (Payload) o; + return Objects.equals(this.serialized, that.serialized) + && Objects.equals(this.raw, that.raw); + } + + @Override + public int hashCode() + { + return Objects.hash(serialized, raw); + } + } } diff --git a/processing/src/main/java/org/apache/druid/audit/AuditInfo.java b/processing/src/main/java/org/apache/druid/audit/AuditInfo.java index cab62c9a0289..f5e009c53837 100644 --- a/processing/src/main/java/org/apache/druid/audit/AuditInfo.java +++ b/processing/src/main/java/org/apache/druid/audit/AuditInfo.java @@ -24,20 +24,26 @@ import java.util.Objects; +/** + * Contains information about the author who performed an audited operation. + */ public class AuditInfo { private final String author; + private final String identity; private final String comment; private final String ip; @JsonCreator public AuditInfo( @JsonProperty("author") String author, + @JsonProperty("identity") String identity, @JsonProperty("comment") String comment, @JsonProperty("ip") String ip ) { this.author = author; + this.identity = identity; this.comment = comment; this.ip = ip; } @@ -48,6 +54,12 @@ public String getAuthor() return author; } + @JsonProperty + public String getIdentity() + { + return identity; + } + @JsonProperty public String getComment() { @@ -69,16 +81,17 @@ public boolean equals(Object o) if (o == null || getClass() != o.getClass()) { return false; } - AuditInfo auditInfo = (AuditInfo) o; - return Objects.equals(author, auditInfo.author) - && Objects.equals(comment, auditInfo.comment) - && Objects.equals(ip, auditInfo.ip); + AuditInfo that = (AuditInfo) o; + return Objects.equals(this.author, that.author) + && Objects.equals(this.identity, that.identity) + && Objects.equals(this.comment, that.comment) + && Objects.equals(this.ip, that.ip); } @Override public int hashCode() { - return Objects.hash(author, comment, ip); + return Objects.hash(author, identity, comment, ip); } @Override @@ -86,6 +99,7 @@ public String toString() { return "AuditInfo{" + "author='" + author + '\'' + + ", identity='" + identity + '\'' + ", comment='" + comment + '\'' + ", ip='" + ip + '\'' + '}'; diff --git a/processing/src/main/java/org/apache/druid/audit/AuditManager.java b/processing/src/main/java/org/apache/druid/audit/AuditManager.java index f403423c6321..3ab126e93373 100644 --- a/processing/src/main/java/org/apache/druid/audit/AuditManager.java +++ b/processing/src/main/java/org/apache/druid/audit/AuditManager.java @@ -20,7 +20,6 @@ package org.apache.druid.audit; -import org.apache.druid.common.config.ConfigSerde; import org.joda.time.Interval; import org.skife.jdbi.v2.Handle; @@ -29,66 +28,54 @@ public interface AuditManager { - /** - * This String is the default message stored instead of the actual audit payload if the audit payload size - * exceeded the maximum size limit configuration - */ - String PAYLOAD_SKIP_MSG_FORMAT = "Payload was not stored as its size exceeds the limit [%d] configured by druid.audit.manager.maxPayloadSizeBytes"; String X_DRUID_AUTHOR = "X-Druid-Author"; - String X_DRUID_COMMENT = "X-Druid-Comment"; - /** - * inserts an audit entry in the Audit Table - * @param key of the audit entry - * @param type of the audit entry - * @param auditInfo of the audit entry - * @param payload of the audit entry - * @param configSerde of the payload of the audit entry - */ - void doAudit(String key, String type, AuditInfo auditInfo, T payload, ConfigSerde configSerde); + void doAudit(AuditEntry event); /** - * inserts an audit Entry in audit table using the handler provided - * used to do the audit in same transaction as the config changes - * @param auditEntry - * @param handler - * @throws IOException + * Inserts an audit entry in audit table using the provided JDBI handle. + * This method can be used to perform the audit in the same transaction as the + * audited changes. Only SQL-based implementations need to implement this method, + * other implementations call {@link #doAudit} by default. + * + * @param event Event to audit + * @param handle JDBI Handle representing connection to the database */ - void doAudit(AuditEntry auditEntry, Handle handler) throws IOException; + default void doAudit(AuditEntry event, Handle handle) throws IOException + { + doAudit(event); + } /** - * provides audit history for given key, type and interval - * @param key - * @param type - * @param interval - * @return list of AuditEntries satisfying the passed parameters + * Fetches audit entries made for the given key, type and interval. Implementations + * that do not maintain an audit history should return an empty list. + * + * @return List of recorded audit events satisfying the passed parameters. */ List fetchAuditHistory(String key, String type, Interval interval); /** - * provides audit history for given type and interval - * @param type type of auditEntry - * @param interval interval for which to fetch auditHistory - * @return list of AuditEntries satisfying the passed parameters + * Fetches audit entries of a type whose audit time lies in the given interval. + * + * @param type Type of audit entry + * @param interval Eligible interval for audit time + * @return List of recorded audit events satisfying the passed parameters. */ List fetchAuditHistory(String type, Interval interval); /** * Provides last N entries of audit history for given key, type - * @param key - * @param type - * @param limit - * @return list of AuditEntries satisfying the passed parameters + * + * @return list of recorded audit events satisfying the passed parameters */ List fetchAuditHistory(String key, String type, int limit); /** * Provides last N entries of audit history for given type - * @param type type of auditEntry - * @param limit - * @return list of AuditEntries satisfying the passed parameters + * + * @return List of recorded audit events satisfying the passed parameters. */ List fetchAuditHistory(String type, int limit); @@ -96,7 +83,7 @@ public interface AuditManager * Remove audit logs created older than the given timestamp. * * @param timestamp timestamp in milliseconds - * @return number of audit logs removed + * @return Number of audit logs removed */ int removeAuditLogsOlderThan(long timestamp); } diff --git a/processing/src/main/java/org/apache/druid/audit/RequestInfo.java b/processing/src/main/java/org/apache/druid/audit/RequestInfo.java new file mode 100644 index 000000000000..706fdca2f01a --- /dev/null +++ b/processing/src/main/java/org/apache/druid/audit/RequestInfo.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.audit; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.Objects; + +/** + * Contains information about a REST API request that was audited. + */ +public class RequestInfo +{ + private final String service; + private final String method; + private final String uri; + private final String queryParams; + + @JsonCreator + public RequestInfo( + @JsonProperty("service") String service, + @JsonProperty("method") String method, + @JsonProperty("uri") String uri, + @JsonProperty("queryParams") String queryParams + ) + { + this.service = service; + this.method = method; + this.uri = uri; + this.queryParams = queryParams; + } + + @JsonProperty + public String getService() + { + return service; + } + + @JsonProperty + public String getMethod() + { + return method; + } + + @JsonProperty + public String getUri() + { + return uri; + } + + @JsonProperty + public String getQueryParams() + { + return queryParams; + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + RequestInfo that = (RequestInfo) o; + return Objects.equals(this.service, that.service) + && Objects.equals(this.method, that.method) + && Objects.equals(this.uri, that.uri) + && Objects.equals(this.queryParams, that.queryParams); + } + + @Override + public int hashCode() + { + return Objects.hash(service, method, uri, queryParams); + } + + @Override + public String toString() + { + return "RequestInfo{" + + "service='" + service + '\'' + + ", method='" + method + '\'' + + ", path='" + uri + '\'' + + ", queryParams='" + queryParams + '\'' + + '}'; + } +} diff --git a/processing/src/main/java/org/apache/druid/common/config/ConfigSerde.java b/processing/src/main/java/org/apache/druid/common/config/ConfigSerde.java index 708d16d8b190..a4c06e1371e0 100644 --- a/processing/src/main/java/org/apache/druid/common/config/ConfigSerde.java +++ b/processing/src/main/java/org/apache/druid/common/config/ConfigSerde.java @@ -24,14 +24,5 @@ public interface ConfigSerde { byte[] serialize(T obj); - /** - * Serialize object to String - * - * @param obj to be serialize - * @param skipNull if true, then skip serialization of any field with null value. - * This can be used to reduce the size of the resulting String. - * @return String serialization of the input - */ - String serializeToString(T obj, boolean skipNull); T deserialize(byte[] bytes); } diff --git a/processing/src/main/java/org/apache/druid/common/config/JacksonConfigManager.java b/processing/src/main/java/org/apache/druid/common/config/JacksonConfigManager.java index 7cf78a5ca0bf..92a9ea0ebee7 100644 --- a/processing/src/main/java/org/apache/druid/common/config/JacksonConfigManager.java +++ b/processing/src/main/java/org/apache/druid/common/config/JacksonConfigManager.java @@ -24,11 +24,11 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.inject.Inject; +import org.apache.druid.audit.AuditEntry; import org.apache.druid.audit.AuditInfo; import org.apache.druid.audit.AuditManager; import org.apache.druid.common.config.ConfigManager.SetResult; import org.apache.druid.guice.annotations.Json; -import org.apache.druid.guice.annotations.JsonNonNull; import org.apache.druid.java.util.common.jackson.JacksonUtils; import javax.annotation.Nullable; @@ -41,21 +41,18 @@ public class JacksonConfigManager { private final ConfigManager configManager; private final ObjectMapper jsonMapper; - private final ObjectMapper jsonMapperSkipNull; private final AuditManager auditManager; @Inject public JacksonConfigManager( ConfigManager configManager, @Json ObjectMapper jsonMapper, - @JsonNonNull ObjectMapper jsonMapperOnlyNonNullValue, AuditManager auditManager ) { this.configManager = configManager; this.jsonMapper = jsonMapper; this.auditManager = auditManager; - this.jsonMapperSkipNull = jsonMapperOnlyNonNullValue; } public AtomicReference watch(String key, Class clazz) @@ -119,7 +116,14 @@ public SetResult set( ConfigSerde configSerde = create(newValue.getClass(), null); // Audit and actual config change are done in separate transactions // there can be phantom audits and reOrdering in audit changes as well. - auditManager.doAudit(key, key, auditInfo, newValue, configSerde); + auditManager.doAudit( + AuditEntry.builder() + .key(key) + .type(key) + .auditInfo(auditInfo) + .payload(newValue) + .build() + ); return configManager.set(key, configSerde, oldValue, newValue); } @@ -139,17 +143,6 @@ public byte[] serialize(T obj) } } - @Override - public String serializeToString(T obj, boolean skipNull) - { - try { - return skipNull ? jsonMapperSkipNull.writeValueAsString(obj) : jsonMapper.writeValueAsString(obj); - } - catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } - @Override public T deserialize(byte[] bytes) { @@ -178,17 +171,6 @@ public byte[] serialize(T obj) } } - @Override - public String serializeToString(T obj, boolean skipNull) - { - try { - return skipNull ? jsonMapperSkipNull.writeValueAsString(obj) : jsonMapper.writeValueAsString(obj); - } - catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } - @Override public T deserialize(byte[] bytes) { diff --git a/processing/src/main/java/org/apache/druid/java/util/common/logger/Logger.java b/processing/src/main/java/org/apache/druid/java/util/common/logger/Logger.java index 1639bf5378f8..59f2eb839b98 100644 --- a/processing/src/main/java/org/apache/druid/java/util/common/logger/Logger.java +++ b/processing/src/main/java/org/apache/druid/java/util/common/logger/Logger.java @@ -220,11 +220,6 @@ public void error(Throwable t, String message, Object... formatArgs) logException(log::error, t, StringUtils.nonStrictFormat(message, formatArgs)); } - public void assertionError(String message, Object... formatArgs) - { - log.error("ASSERTION_ERROR: " + message, formatArgs); - } - public void debugSegments(@Nullable final Collection segments, @Nullable String preamble) { if (log.isDebugEnabled()) { diff --git a/processing/src/test/java/org/apache/druid/audit/AuditInfoTest.java b/processing/src/test/java/org/apache/druid/audit/AuditInfoTest.java index c61b50ec6a12..e481391c4547 100644 --- a/processing/src/test/java/org/apache/druid/audit/AuditInfoTest.java +++ b/processing/src/test/java/org/apache/druid/audit/AuditInfoTest.java @@ -29,32 +29,62 @@ public class AuditInfoTest { + private final ObjectMapper mapper = new DefaultObjectMapper(); + @Test public void testAuditInfoEquality() { - final AuditInfo auditInfo1 = new AuditInfo("druid", "test equality", "127.0.0.1"); - final AuditInfo auditInfo2 = new AuditInfo("druid", "test equality", "127.0.0.1"); + final AuditInfo auditInfo1 = new AuditInfo("druid", "id", "test equality", "127.0.0.1"); + final AuditInfo auditInfo2 = new AuditInfo("druid", "id", "test equality", "127.0.0.1"); Assert.assertEquals(auditInfo1, auditInfo2); Assert.assertEquals(auditInfo1.hashCode(), auditInfo2.hashCode()); } + @Test + public void testAuditInfoSerde() throws IOException + { + final AuditInfo auditInfo = new AuditInfo("author", null, "comment", "ip"); + AuditInfo deserialized = mapper.readValue(mapper.writeValueAsString(auditInfo), AuditInfo.class); + Assert.assertEquals(auditInfo, deserialized); + + final AuditInfo auditInfoWithIdentity = new AuditInfo("author", "identity", "comment", "ip"); + deserialized = mapper.readValue(mapper.writeValueAsString(auditInfoWithIdentity), AuditInfo.class); + Assert.assertEquals(auditInfoWithIdentity, deserialized); + + Assert.assertNotEquals(auditInfo, auditInfoWithIdentity); + } + @Test(timeout = 60_000L) public void testAuditEntrySerde() throws IOException { - AuditEntry entry = new AuditEntry( + final AuditEntry original = new AuditEntry( "testKey", "testType", - new AuditInfo( - "testAuthor", - "testComment", - "127.0.0.1" - ), - "testPayload", + new AuditInfo("testAuthor", "testIdentity", "testComment", "127.0.0.1"), + new RequestInfo("overlord", "GET", "/segments", "?abc=1"), + AuditEntry.Payload.fromString("testPayload"), DateTimes.of("2013-01-01T00:00:00Z") ); - ObjectMapper mapper = new DefaultObjectMapper(); - AuditEntry serde = mapper.readValue(mapper.writeValueAsString(entry), AuditEntry.class); - Assert.assertEquals(entry, serde); + AuditEntry deserialized = mapper.readValue(mapper.writeValueAsString(original), AuditEntry.class); + Assert.assertEquals(original, deserialized); + } + + @Test + public void testAuditEntrySerdeIsBackwardsCompatible() throws IOException + { + final String json = "{\"key\": \"a\", \"type\": \"b\", \"auditInfo\": {}, \"payload\":\"Truncated\"}"; + AuditEntry entry = mapper.readValue(json, AuditEntry.class); + Assert.assertEquals("a", entry.getKey()); + Assert.assertEquals("b", entry.getType()); + Assert.assertEquals(AuditEntry.Payload.fromString("Truncated"), entry.getPayload()); + } + + @Test + public void testRequestInfoEquality() throws IOException + { + RequestInfo requestInfo = new RequestInfo("overlord", "GET", "/uri", "a=b"); + RequestInfo deserialized = mapper.readValue(mapper.writeValueAsString(requestInfo), RequestInfo.class); + Assert.assertEquals(requestInfo, deserialized); } } diff --git a/processing/src/test/java/org/apache/druid/common/config/ConfigManagerTest.java b/processing/src/test/java/org/apache/druid/common/config/ConfigManagerTest.java index 69c319f1bcf2..0f6cb4e7c4e7 100644 --- a/processing/src/test/java/org/apache/druid/common/config/ConfigManagerTest.java +++ b/processing/src/test/java/org/apache/druid/common/config/ConfigManagerTest.java @@ -20,12 +20,9 @@ package org.apache.druid.common.config; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Suppliers; -import org.apache.druid.audit.AuditManager; -import org.apache.druid.audit.NoopAuditManager; import org.apache.druid.metadata.MetadataCASUpdate; import org.apache.druid.metadata.MetadataStorageConnector; import org.apache.druid.metadata.MetadataStorageTablesConfig; @@ -37,7 +34,6 @@ import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; -@SuppressWarnings("ALL") public class ConfigManagerTest { private static final String CONFIG_KEY = "configX"; @@ -47,7 +43,6 @@ public class ConfigManagerTest private MetadataStorageConnector dbConnector; private MetadataStorageTablesConfig metadataStorageTablesConfig; - private AuditManager mockAuditManager; private TestConfigManagerConfig configManagerConfig; private ConfigSerde configConfigSerdeFromClass; @@ -79,8 +74,7 @@ public String getConfigTable() jacksonConfigManager = new JacksonConfigManager( configManager, new ObjectMapper(), - new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL), - new NoopAuditManager() + null ); configConfigSerdeFromClass = jacksonConfigManager.create(TestConfig.class, null); } diff --git a/processing/src/test/java/org/apache/druid/common/config/JacksonConfigManagerTest.java b/processing/src/test/java/org/apache/druid/common/config/JacksonConfigManagerTest.java index 4a4aaf72cd4b..0d0393459445 100644 --- a/processing/src/test/java/org/apache/druid/common/config/JacksonConfigManagerTest.java +++ b/processing/src/test/java/org/apache/druid/common/config/JacksonConfigManagerTest.java @@ -20,20 +20,17 @@ package org.apache.druid.common.config; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.druid.audit.AuditEntry; import org.apache.druid.audit.AuditInfo; import org.apache.druid.audit.AuditManager; import org.junit.Assert; import org.junit.Before; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; -import org.mockito.ArgumentMatchers; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; @@ -51,68 +48,16 @@ public class JacksonConfigManagerTest private JacksonConfigManager jacksonConfigManager; - @Rule - public ExpectedException exception = ExpectedException.none(); - @Before public void setUp() { jacksonConfigManager = new JacksonConfigManager( mockConfigManager, new ObjectMapper(), - new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL), mockAuditManager ); } - @Test - public void testSerializeToStringWithSkipNullTrue() - { - ConfigSerde configConfigSerdeFromTypeReference = jacksonConfigManager.create(new TypeReference() - { - }, null); - ConfigSerde configConfigSerdeFromClass = jacksonConfigManager.create(TestConfig.class, null); - TestConfig config = new TestConfig("version", null, 3); - String actual = configConfigSerdeFromTypeReference.serializeToString(config, true); - Assert.assertEquals("{\"version\":\"version\",\"settingInt\":3}", actual); - actual = configConfigSerdeFromClass.serializeToString(config, true); - Assert.assertEquals("{\"version\":\"version\",\"settingInt\":3}", actual); - } - - @Test - public void testSerializeToStringWithSkipNullFalse() - { - ConfigSerde configConfigSerdeFromTypeReference = jacksonConfigManager.create(new TypeReference() - { - }, null); - ConfigSerde configConfigSerdeFromClass = jacksonConfigManager.create(TestConfig.class, null); - TestConfig config = new TestConfig("version", null, 3); - String actual = configConfigSerdeFromTypeReference.serializeToString(config, false); - Assert.assertEquals("{\"version\":\"version\",\"settingString\":null,\"settingInt\":3}", actual); - actual = configConfigSerdeFromClass.serializeToString(config, false); - Assert.assertEquals("{\"version\":\"version\",\"settingString\":null,\"settingInt\":3}", actual); - } - - @Test - public void testSerializeToStringWithInvalidConfigForConfigSerdeFromTypeReference() - { - ConfigSerde configConfigSerdeFromTypeReference = jacksonConfigManager.create(new TypeReference() - { - }, null); - exception.expect(RuntimeException.class); - exception.expectMessage("InvalidDefinitionException"); - configConfigSerdeFromTypeReference.serializeToString(new ClassThatJacksonCannotSerialize(), false); - } - - @Test - public void testSerializeToStringWithInvalidConfigForConfigSerdeFromClass() - { - ConfigSerde configConfigSerdeFromClass = jacksonConfigManager.create(ClassThatJacksonCannotSerialize.class, null); - exception.expect(RuntimeException.class); - exception.expectMessage("InvalidDefinitionException"); - configConfigSerdeFromClass.serializeToString(new ClassThatJacksonCannotSerialize(), false); - } - @Test public void testSet() { @@ -120,22 +65,16 @@ public void testSet() TestConfig val = new TestConfig("version", "string", 3); AuditInfo auditInfo = new AuditInfo( "testAuthor", + "testIdentity", "testComment", "127.0.0.1" ); jacksonConfigManager.set(key, val, auditInfo); - ArgumentCaptor configSerdeCapture = ArgumentCaptor.forClass( - ConfigSerde.class); - Mockito.verify(mockAuditManager).doAudit( - ArgumentMatchers.eq(key), - ArgumentMatchers.eq(key), - ArgumentMatchers.eq(auditInfo), - ArgumentMatchers.eq(val), - configSerdeCapture.capture() - ); - Assert.assertNotNull(configSerdeCapture.getValue()); + ArgumentCaptor auditCapture = ArgumentCaptor.forClass(AuditEntry.class); + Mockito.verify(mockAuditManager).doAudit(auditCapture.capture()); + Assert.assertNotNull(auditCapture.getValue()); } @Test @@ -214,9 +153,4 @@ public int hashCode() return Objects.hash(version, settingString, settingInt); } } - - static class ClassThatJacksonCannotSerialize - { - - } } diff --git a/processing/src/test/java/org/apache/druid/java/util/metrics/StubServiceEmitter.java b/processing/src/test/java/org/apache/druid/java/util/metrics/StubServiceEmitter.java index 52049c6956f6..40826af80473 100644 --- a/processing/src/test/java/org/apache/druid/java/util/metrics/StubServiceEmitter.java +++ b/processing/src/test/java/org/apache/druid/java/util/metrics/StubServiceEmitter.java @@ -30,6 +30,10 @@ import java.util.List; import java.util.Map; +/** + * Test implementation of {@link ServiceEmitter} that collects emitted metrics + * and alerts in lists. + */ public class StubServiceEmitter extends ServiceEmitter implements MetricsVerifier { private final List events = new ArrayList<>(); @@ -62,6 +66,19 @@ public List getEvents() return events; } + /** + * Gets all the metric events emitted since the previous {@link #flush()}. + * + * @return Map from metric name to list of events emitted for that metric. + */ + public Map> getMetricEvents() + { + return metricEvents; + } + + /** + * Gets all the alerts emitted since the previous {@link #flush()}. + */ public List getAlerts() { return alertEvents; diff --git a/server/src/main/java/org/apache/druid/guice/SQLMetadataStorageDruidModule.java b/server/src/main/java/org/apache/druid/guice/SQLMetadataStorageDruidModule.java index 6bfe1e25eedc..7ece79ad40f9 100644 --- a/server/src/main/java/org/apache/druid/guice/SQLMetadataStorageDruidModule.java +++ b/server/src/main/java/org/apache/druid/guice/SQLMetadataStorageDruidModule.java @@ -45,10 +45,9 @@ import org.apache.druid.metadata.SegmentsMetadataManagerProvider; import org.apache.druid.metadata.SqlSegmentsMetadataManager; import org.apache.druid.metadata.SqlSegmentsMetadataManagerProvider; -import org.apache.druid.server.audit.AuditManagerProvider; +import org.apache.druid.server.audit.AuditManagerConfig; +import org.apache.druid.server.audit.AuditSerdeHelper; import org.apache.druid.server.audit.SQLAuditManager; -import org.apache.druid.server.audit.SQLAuditManagerConfig; -import org.apache.druid.server.audit.SQLAuditManagerProvider; public class SQLMetadataStorageDruidModule implements Module { @@ -82,9 +81,9 @@ public void createBindingChoices(Binder binder, String defaultValue) PolyBind.createChoiceWithDefault(binder, prop, Key.get(IndexerMetadataStorageCoordinator.class), defaultValue); PolyBind.createChoiceWithDefault(binder, prop, Key.get(MetadataStorageActionHandlerFactory.class), defaultValue); PolyBind.createChoiceWithDefault(binder, prop, Key.get(MetadataStorageUpdaterJobHandler.class), defaultValue); - PolyBind.createChoiceWithDefault(binder, prop, Key.get(AuditManager.class), defaultValue); - PolyBind.createChoiceWithDefault(binder, prop, Key.get(AuditManagerProvider.class), defaultValue); PolyBind.createChoiceWithDefault(binder, prop, Key.get(MetadataSupervisorManager.class), defaultValue); + + configureAuditManager(binder); } @Override @@ -130,21 +129,27 @@ public void configure(Binder binder) .to(SQLMetadataStorageUpdaterJobHandler.class) .in(LazySingleton.class); - JsonConfigProvider.bind(binder, "druid.audit.manager", SQLAuditManagerConfig.class); - - PolyBind.optionBinder(binder, Key.get(AuditManager.class)) - .addBinding(type) - .to(SQLAuditManager.class) - .in(LazySingleton.class); - - PolyBind.optionBinder(binder, Key.get(AuditManagerProvider.class)) - .addBinding(type) - .to(SQLAuditManagerProvider.class) - .in(LazySingleton.class); - PolyBind.optionBinder(binder, Key.get(MetadataSupervisorManager.class)) .addBinding(type) .to(SQLMetadataSupervisorManager.class) .in(LazySingleton.class); } + + private void configureAuditManager(Binder binder) + { + JsonConfigProvider.bind(binder, "druid.audit.manager", AuditManagerConfig.class); + + PolyBind.createChoice( + binder, + "druid.audit.manager.type", + Key.get(AuditManager.class), + Key.get(SQLAuditManager.class) + ); + PolyBind.optionBinder(binder, Key.get(AuditManager.class)) + .addBinding("sql") + .to(SQLAuditManager.class) + .in(LazySingleton.class); + + binder.bind(AuditSerdeHelper.class).in(LazySingleton.class); + } } diff --git a/server/src/main/java/org/apache/druid/guice/StartupLoggingModule.java b/server/src/main/java/org/apache/druid/guice/StartupLoggingModule.java index c75b0901a88f..db25a0906c38 100644 --- a/server/src/main/java/org/apache/druid/guice/StartupLoggingModule.java +++ b/server/src/main/java/org/apache/druid/guice/StartupLoggingModule.java @@ -20,7 +20,10 @@ package org.apache.druid.guice; import com.google.inject.Binder; +import com.google.inject.Key; import com.google.inject.Module; +import org.apache.druid.audit.AuditManager; +import org.apache.druid.server.audit.LoggingAuditManager; import org.apache.druid.server.log.StartupLoggingConfig; public class StartupLoggingModule implements Module @@ -29,5 +32,10 @@ public class StartupLoggingModule implements Module public void configure(Binder binder) { JsonConfigProvider.bind(binder, "druid.startup.logging", StartupLoggingConfig.class); + + PolyBind.optionBinder(binder, Key.get(AuditManager.class)) + .addBinding("log") + .to(LoggingAuditManager.class) + .in(LazySingleton.class); } } diff --git a/server/src/main/java/org/apache/druid/metadata/SQLMetadataRuleManager.java b/server/src/main/java/org/apache/druid/metadata/SQLMetadataRuleManager.java index d0f9799e6aab..b83f77183aa5 100644 --- a/server/src/main/java/org/apache/druid/metadata/SQLMetadataRuleManager.java +++ b/server/src/main/java/org/apache/druid/metadata/SQLMetadataRuleManager.java @@ -336,10 +336,10 @@ public boolean overrideRule(final String dataSource, final List newRules, final String ruleString; try { ruleString = jsonMapper.writeValueAsString(newRules); - log.info("Updating [%s] with rules [%s] as per [%s]", dataSource, ruleString, auditInfo); + log.info("Updating datasource[%s] with rules[%s] as per [%s]", dataSource, ruleString, auditInfo); } catch (JsonProcessingException e) { - log.error(e, "Unable to write rules as string for [%s]", dataSource); + log.error(e, "Unable to write rules as string for datasource[%s]", dataSource); return false; } synchronized (lock) { @@ -352,7 +352,7 @@ public boolean overrideRule(final String dataSource, final List newRules, .key(dataSource) .type("rules") .auditInfo(auditInfo) - .payload(ruleString) + .serializedPayload(ruleString) .auditTime(auditTime) .build(), handle diff --git a/server/src/main/java/org/apache/druid/metadata/SQLMetadataRuleManagerProvider.java b/server/src/main/java/org/apache/druid/metadata/SQLMetadataRuleManagerProvider.java index ba3d05c131dc..d11d33ba5b17 100644 --- a/server/src/main/java/org/apache/druid/metadata/SQLMetadataRuleManagerProvider.java +++ b/server/src/main/java/org/apache/druid/metadata/SQLMetadataRuleManagerProvider.java @@ -23,7 +23,6 @@ import com.google.inject.Inject; import org.apache.druid.audit.AuditManager; import org.apache.druid.java.util.common.lifecycle.Lifecycle; -import org.apache.druid.server.audit.SQLAuditManager; import org.skife.jdbi.v2.IDBI; /** @@ -45,7 +44,7 @@ public SQLMetadataRuleManagerProvider( MetadataStorageTablesConfig dbTables, SQLMetadataConnector connector, Lifecycle lifecycle, - SQLAuditManager auditManager + AuditManager auditManager ) { this.jsonMapper = jsonMapper; diff --git a/server/src/main/java/org/apache/druid/metadata/SqlSegmentsMetadataQuery.java b/server/src/main/java/org/apache/druid/metadata/SqlSegmentsMetadataQuery.java index 63bfdbf53dec..545acb84507e 100644 --- a/server/src/main/java/org/apache/druid/metadata/SqlSegmentsMetadataQuery.java +++ b/server/src/main/java/org/apache/druid/metadata/SqlSegmentsMetadataQuery.java @@ -504,8 +504,8 @@ private static int computeNumChangedSegments(List segmentIds, int[] segm for (int i = 0; i < segmentChanges.length; i++) { int numUpdatedRows = segmentChanges[i]; if (numUpdatedRows < 0) { - log.assertionError( - "Negative number of rows updated for segment id [%s]: %d", + log.error( + "ASSERTION_ERROR: Negative number of rows updated for segment id [%s]: %d", segmentIds.get(i), numUpdatedRows ); diff --git a/server/src/main/java/org/apache/druid/server/audit/AuditLogger.java b/server/src/main/java/org/apache/druid/server/audit/AuditLogger.java new file mode 100644 index 000000000000..1293575d79b4 --- /dev/null +++ b/server/src/main/java/org/apache/druid/server/audit/AuditLogger.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.server.audit; + +import org.apache.druid.audit.AuditEntry; +import org.apache.druid.java.util.common.logger.Logger; + +public class AuditLogger +{ + public enum Level + { + DEBUG, INFO, WARN + } + + private static final String MSG_FORMAT + = "User[%s], identity[%s], IP[%s] performed action of type[%s] on key[%s]" + + " with comment[%s], request[%s], payload[%s]."; + + private final Level level; + private final Logger logger = new Logger(AuditLogger.class); + + public AuditLogger(Level level) + { + this.level = level; + } + + public void log(AuditEntry entry) + { + Object[] args = { + entry.getAuditInfo().getAuthor(), + entry.getAuditInfo().getIdentity(), + entry.getAuditInfo().getIp(), + entry.getType(), + entry.getKey(), + entry.getAuditInfo().getComment(), + entry.getRequest(), + entry.getPayload() + }; + switch (level) { + case DEBUG: + logger.debug(MSG_FORMAT, args); + break; + case INFO: + logger.info(MSG_FORMAT, args); + break; + case WARN: + logger.warn(MSG_FORMAT, args); + break; + } + } +} diff --git a/server/src/main/java/org/apache/druid/server/audit/AuditManagerProvider.java b/server/src/main/java/org/apache/druid/server/audit/AuditManagerConfig.java similarity index 60% rename from server/src/main/java/org/apache/druid/server/audit/AuditManagerProvider.java rename to server/src/main/java/org/apache/druid/server/audit/AuditManagerConfig.java index 7607d90e3ebd..6f174da2db12 100644 --- a/server/src/main/java/org/apache/druid/server/audit/AuditManagerProvider.java +++ b/server/src/main/java/org/apache/druid/server/audit/AuditManagerConfig.java @@ -19,11 +19,20 @@ package org.apache.druid.server.audit; -import com.google.inject.Provider; -import org.apache.druid.audit.AuditManager; +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonSubTypes.Type; +import com.fasterxml.jackson.annotation.JsonTypeInfo; -public interface AuditManagerProvider extends Provider +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SQLAuditManagerConfig.class) +@JsonSubTypes(value = { + @Type(name = "log", value = LoggingAuditManagerConfig.class), + @Type(name = "sql", value = SQLAuditManagerConfig.class) +}) +public interface AuditManagerConfig { - @Override - AuditManager get(); + boolean isSkipNullField(); + + long getMaxPayloadSizeBytes(); + + boolean isAuditSystemRequests(); } diff --git a/server/src/main/java/org/apache/druid/server/audit/AuditSerdeHelper.java b/server/src/main/java/org/apache/druid/server/audit/AuditSerdeHelper.java new file mode 100644 index 000000000000..385811d055ce --- /dev/null +++ b/server/src/main/java/org/apache/druid/server/audit/AuditSerdeHelper.java @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.server.audit; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.inject.Inject; +import org.apache.druid.audit.AuditEntry; +import org.apache.druid.guice.annotations.Json; +import org.apache.druid.guice.annotations.JsonNonNull; +import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.java.util.common.logger.Logger; +import org.apache.druid.server.security.Escalator; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +/** + * Audit utility class that can be used by different implementations of + * {@link org.apache.druid.audit.AuditManager} to serialize/deserialize audit + * payloads based on the values configured in {@link AuditManagerConfig}. + */ +public class AuditSerdeHelper +{ + /** + * Default message stored instead of the actual audit payload if the audit + * payload size exceeds the maximum size limit. + */ + private static final String PAYLOAD_TRUNCATED_MSG = + "Payload truncated as it exceeds 'druid.audit.manager.maxPayloadSizeBytes'"; + private static final String SERIALIZE_ERROR_MSG = + "Error serializing payload. Check logs for details."; + private static final Logger log = new Logger(AuditSerdeHelper.class); + + private final ObjectMapper jsonMapper; + private final ObjectMapper jsonMapperSkipNulls; + + private final String systemIdentity; + private final AuditManagerConfig config; + + @Inject + public AuditSerdeHelper( + AuditManagerConfig config, + Escalator escalator, + @Json ObjectMapper jsonMapper, + @JsonNonNull ObjectMapper jsonMapperSkipNulls + ) + { + this.config = config; + this.jsonMapper = jsonMapper; + this.jsonMapperSkipNulls = jsonMapperSkipNulls; + this.systemIdentity = escalator == null + ? null : escalator.createEscalatedAuthenticationResult().getIdentity(); + } + + /** + * Checks if the given audit event needs to be handled. + * + * @return true only if the event was not initiated by the Druid system OR if + * system requests should be audited too. + */ + public boolean shouldProcessAuditEntry(AuditEntry entry) + { + final boolean isSystemRequest = systemIdentity != null + && systemIdentity.equals(entry.getAuditInfo().getIdentity()); + return config.isAuditSystemRequests() || !isSystemRequest; + } + + /** + * Processes the given AuditEntry for further use such as logging or persistence. + * This involves serializing and truncating the payload based on the values + * configured in {@link AuditManagerConfig}. + * + * @return A new AuditEntry with a serialized payload that can be used for + * logging or persistence. + */ + public AuditEntry processAuditEntry(AuditEntry entry) + { + final AuditEntry.Payload payload = entry.getPayload(); + final String serialized = payload.serialized() == null + ? serializePayloadToString(payload.raw()) + : payload.serialized(); + + final AuditEntry.Payload processedPayload = AuditEntry.Payload.fromString( + truncateSerializedAuditPayload(serialized) + ); + return new AuditEntry( + entry.getKey(), + entry.getType(), + entry.getAuditInfo(), + entry.getRequest(), + processedPayload, + entry.getAuditTime() + ); + } + + private String serializePayloadToString(Object rawPayload) + { + if (rawPayload == null) { + return ""; + } + + try { + return config.isSkipNullField() + ? jsonMapperSkipNulls.writeValueAsString(rawPayload) + : jsonMapper.writeValueAsString(rawPayload); + } + catch (IOException e) { + // Do not throw exception, only log error + log.error(e, "Could not serialize audit payload[%s]", rawPayload); + return SERIALIZE_ERROR_MSG; + } + } + + /** + * Truncates the audit payload string if it exceeds + * {@link AuditManagerConfig#getMaxPayloadSizeBytes()}. + */ + private String truncateSerializedAuditPayload(String serializedPayload) + { + if (serializedPayload == null || config.getMaxPayloadSizeBytes() < 0) { + return serializedPayload; + } + + int payloadSize = serializedPayload.getBytes(StandardCharsets.UTF_8).length; + if (payloadSize > config.getMaxPayloadSizeBytes()) { + return PAYLOAD_TRUNCATED_MSG + StringUtils.format("[%s].", config.getMaxPayloadSizeBytes()); + } else { + return serializedPayload; + } + } +} diff --git a/processing/src/test/java/org/apache/druid/audit/NoopAuditManager.java b/server/src/main/java/org/apache/druid/server/audit/LoggingAuditManager.java similarity index 50% rename from processing/src/test/java/org/apache/druid/audit/NoopAuditManager.java rename to server/src/main/java/org/apache/druid/server/audit/LoggingAuditManager.java index 593a9c73eaca..65d3427b8ff5 100644 --- a/processing/src/test/java/org/apache/druid/audit/NoopAuditManager.java +++ b/server/src/main/java/org/apache/druid/server/audit/LoggingAuditManager.java @@ -17,55 +17,77 @@ * under the License. */ -package org.apache.druid.audit; +package org.apache.druid.server.audit; -import org.apache.druid.common.config.ConfigSerde; +import com.google.inject.Inject; +import org.apache.druid.audit.AuditEntry; +import org.apache.druid.audit.AuditManager; +import org.apache.druid.error.DruidException; import org.joda.time.Interval; -import org.skife.jdbi.v2.Handle; +import java.util.Collections; import java.util.List; -public class NoopAuditManager implements AuditManager +/** + * Audit manager that logs audited events at the level specified in + * {@link LoggingAuditManagerConfig}. + */ +public class LoggingAuditManager implements AuditManager { - @Override - public void doAudit(String key, String type, AuditInfo auditInfo, T payload, ConfigSerde configSerde) + private final AuditLogger auditLogger; + private final LoggingAuditManagerConfig managerConfig; + private final AuditSerdeHelper serdeHelper; + + @Inject + public LoggingAuditManager( + AuditManagerConfig config, + AuditSerdeHelper serdeHelper + ) { - throw new UnsupportedOperationException(); + if (!(config instanceof LoggingAuditManagerConfig)) { + throw DruidException.defensive("Config[%s] is not an instance of LoggingAuditManagerConfig", config); + } + + this.managerConfig = (LoggingAuditManagerConfig) config; + this.serdeHelper = serdeHelper; + this.auditLogger = new AuditLogger(managerConfig.getLogLevel()); } @Override - public void doAudit(AuditEntry auditEntry, Handle handler) + public void doAudit(AuditEntry entry) { - throw new UnsupportedOperationException(); + if (serdeHelper.shouldProcessAuditEntry(entry)) { + auditLogger.log(serdeHelper.processAuditEntry(entry)); + } } @Override public List fetchAuditHistory(String key, String type, Interval interval) { - throw new UnsupportedOperationException(); + return Collections.emptyList(); } @Override public List fetchAuditHistory(String type, Interval interval) { - throw new UnsupportedOperationException(); + return Collections.emptyList(); } @Override public List fetchAuditHistory(String key, String type, int limit) { - throw new UnsupportedOperationException(); + return Collections.emptyList(); } @Override public List fetchAuditHistory(String type, int limit) { - throw new UnsupportedOperationException(); + return Collections.emptyList(); } @Override public int removeAuditLogsOlderThan(long timestamp) { - throw new UnsupportedOperationException(); + return 0; } } diff --git a/server/src/main/java/org/apache/druid/server/audit/LoggingAuditManagerConfig.java b/server/src/main/java/org/apache/druid/server/audit/LoggingAuditManagerConfig.java new file mode 100644 index 000000000000..35f525ba8bd4 --- /dev/null +++ b/server/src/main/java/org/apache/druid/server/audit/LoggingAuditManagerConfig.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.server.audit; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.apache.druid.common.config.Configs; +import org.apache.druid.java.util.common.HumanReadableBytes; +import org.apache.druid.java.util.common.HumanReadableBytesRange; + +public class LoggingAuditManagerConfig implements AuditManagerConfig +{ + @JsonProperty + private final AuditLogger.Level logLevel; + + @JsonProperty + @HumanReadableBytesRange( + min = -1, + message = "maxPayloadSizeBytes must either be -1 (for disabling the check) or a non negative number" + ) + private final HumanReadableBytes maxPayloadSizeBytes; + + @JsonProperty + private final boolean skipNullField; + + @JsonProperty + private final boolean auditSystemRequests; + + @JsonCreator + public LoggingAuditManagerConfig( + @JsonProperty("logLevel") AuditLogger.Level logLevel, + @JsonProperty("auditSystemRequests") Boolean auditSystemRequests, + @JsonProperty("maxPayloadSizeBytes") HumanReadableBytes maxPayloadSizeBytes, + @JsonProperty("skipNullField") Boolean skipNullField + ) + { + this.logLevel = Configs.valueOrDefault(logLevel, AuditLogger.Level.INFO); + this.auditSystemRequests = Configs.valueOrDefault(auditSystemRequests, true); + this.maxPayloadSizeBytes = Configs.valueOrDefault(maxPayloadSizeBytes, HumanReadableBytes.valueOf(-1)); + this.skipNullField = Configs.valueOrDefault(skipNullField, false); + } + + @Override + public boolean isSkipNullField() + { + return skipNullField; + } + + @Override + public long getMaxPayloadSizeBytes() + { + return maxPayloadSizeBytes.getBytes(); + } + + @Override + public boolean isAuditSystemRequests() + { + return auditSystemRequests; + } + + public AuditLogger.Level getLogLevel() + { + return logLevel; + } +} diff --git a/server/src/main/java/org/apache/druid/server/audit/SQLAuditManager.java b/server/src/main/java/org/apache/druid/server/audit/SQLAuditManager.java index 37163f5d7a27..13c4f167e098 100644 --- a/server/src/main/java/org/apache/druid/server/audit/SQLAuditManager.java +++ b/server/src/main/java/org/apache/druid/server/audit/SQLAuditManager.java @@ -20,18 +20,18 @@ package org.apache.druid.server.audit; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Supplier; import com.google.inject.Inject; import org.apache.druid.audit.AuditEntry; -import org.apache.druid.audit.AuditInfo; import org.apache.druid.audit.AuditManager; -import org.apache.druid.common.config.ConfigSerde; +import org.apache.druid.error.DruidException; import org.apache.druid.guice.ManageLifecycle; import org.apache.druid.guice.annotations.Json; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.jackson.JacksonUtils; +import org.apache.druid.java.util.common.lifecycle.LifecycleStart; +import org.apache.druid.java.util.common.lifecycle.LifecycleStop; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; import org.apache.druid.metadata.MetadataStorageTablesConfig; @@ -41,10 +41,13 @@ import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.IDBI; import org.skife.jdbi.v2.Query; +import org.skife.jdbi.v2.StatementContext; import org.skife.jdbi.v2.Update; -import org.skife.jdbi.v2.tweak.HandleCallback; +import org.skife.jdbi.v2.tweak.ResultSetMapper; import java.io.IOException; +import java.sql.ResultSet; +import java.sql.SQLException; import java.util.List; import java.util.Map; @@ -52,111 +55,114 @@ public class SQLAuditManager implements AuditManager { private final IDBI dbi; + private final SQLMetadataConnector connector; private final Supplier dbTables; private final ServiceEmitter emitter; private final ObjectMapper jsonMapper; private final SQLAuditManagerConfig config; + private final AuditSerdeHelper serdeHelper; + + private final ResultSetMapper resultMapper; @Inject public SQLAuditManager( + AuditManagerConfig config, + AuditSerdeHelper serdeHelper, SQLMetadataConnector connector, Supplier dbTables, ServiceEmitter emitter, - @Json ObjectMapper jsonMapper, - SQLAuditManagerConfig config + @Json ObjectMapper jsonMapper ) { this.dbi = connector.getDBI(); + this.connector = connector; this.dbTables = dbTables; this.emitter = emitter; this.jsonMapper = jsonMapper; - this.config = config; + this.serdeHelper = serdeHelper; + this.resultMapper = new AuditEntryMapper(); + + if (!(config instanceof SQLAuditManagerConfig)) { + throw DruidException.defensive("Config[%s] is not an instance of SQLAuditManagerConfig", config); + } + this.config = (SQLAuditManagerConfig) config; + } + + @LifecycleStart + public void start() + { + connector.createAuditTable(); + } + + @LifecycleStop + public void stop() + { + // Do nothing } - public String getAuditTable() + private String getAuditTable() { return dbTables.get().getAuditTable(); } @Override - public void doAudit(String key, String type, AuditInfo auditInfo, T payload, ConfigSerde configSerde) + public void doAudit(AuditEntry event) { - AuditEntry auditEntry = AuditEntry.builder() - .key(key) - .type(type) - .auditInfo(auditInfo) - .payload(configSerde.serializeToString(payload, config.isSkipNullField())) - .build(); - dbi.withHandle( - new HandleCallback() - { - @Override - public Void withHandle(Handle handle) throws Exception - { - doAudit(auditEntry, handle); - return null; - } + handle -> { + doAudit(event, handle); + return 0; } ); } - @VisibleForTesting - ServiceMetricEvent.Builder getAuditMetricEventBuilder(AuditEntry auditEntry) + private ServiceMetricEvent.Builder createMetricEventBuilder(AuditEntry entry) { ServiceMetricEvent.Builder builder = new ServiceMetricEvent.Builder() - .setDimension("key", auditEntry.getKey()) - .setDimension("type", auditEntry.getType()) - .setDimension("author", auditEntry.getAuditInfo().getAuthor()) - .setDimension("comment", auditEntry.getAuditInfo().getComment()) - .setDimension("remote_address", auditEntry.getAuditInfo().getIp()) - .setDimension("created_date", auditEntry.getAuditTime().toString()); + .setDimension("key", entry.getKey()) + .setDimension("type", entry.getType()) + .setDimension("author", entry.getAuditInfo().getAuthor()) + .setDimension("comment", entry.getAuditInfo().getComment()) + .setDimension("remote_address", entry.getAuditInfo().getIp()) + .setDimension("created_date", entry.getAuditTime().toString()); - if (config.getIncludePayloadAsDimensionInMetric()) { - builder.setDimension("payload", auditEntry.getPayload()); + if (config.isIncludePayloadAsDimensionInMetric()) { + builder.setDimension("payload", entry.getPayload().serialized()); } return builder; } @Override - public void doAudit(AuditEntry auditEntry, Handle handle) throws IOException + public void doAudit(AuditEntry event, Handle handle) throws IOException { - emitter.emit(getAuditMetricEventBuilder(auditEntry).setMetric("config/audit", 1)); - - AuditEntry auditEntryToStore = auditEntry; - if (config.getMaxPayloadSizeBytes() >= 0) { - int payloadSize = jsonMapper.writeValueAsBytes(auditEntry.getPayload()).length; - if (payloadSize > config.getMaxPayloadSizeBytes()) { - auditEntryToStore = AuditEntry.builder() - .key(auditEntry.getKey()) - .type(auditEntry.getType()) - .auditInfo(auditEntry.getAuditInfo()) - .payload(StringUtils.format(PAYLOAD_SKIP_MSG_FORMAT, config.getMaxPayloadSizeBytes())) - .auditTime(auditEntry.getAuditTime()) - .build(); - } + if (!serdeHelper.shouldProcessAuditEntry(event)) { + return; } + emitter.emit(createMetricEventBuilder(event).setMetric("config/audit", 1)); + + final AuditEntry record = serdeHelper.processAuditEntry(event); handle.createStatement( StringUtils.format( - "INSERT INTO %s ( audit_key, type, author, comment, created_date, payload) VALUES (:audit_key, :type, :author, :comment, :created_date, :payload)", + "INSERT INTO %s (audit_key, type, author, comment, created_date, payload)" + + " VALUES (:audit_key, :type, :author, :comment, :created_date, :payload)", getAuditTable() ) ) - .bind("audit_key", auditEntry.getKey()) - .bind("type", auditEntry.getType()) - .bind("author", auditEntry.getAuditInfo().getAuthor()) - .bind("comment", auditEntry.getAuditInfo().getComment()) - .bind("created_date", auditEntry.getAuditTime().toString()) - .bind("payload", jsonMapper.writeValueAsBytes(auditEntryToStore)) + .bind("audit_key", record.getKey()) + .bind("type", record.getType()) + .bind("author", record.getAuditInfo().getAuthor()) + .bind("comment", record.getAuditInfo().getComment()) + .bind("created_date", record.getAuditTime().toString()) + .bind("payload", jsonMapper.writeValueAsBytes(record)) .execute(); } @Override public List fetchAuditHistory(final String key, final String type, Interval interval) { - final Interval theInterval = getIntervalOrDefault(interval); + final Interval theInterval = createAuditHistoryIntervalIfNull(interval); return dbi.withHandle( (Handle handle) -> handle .createQuery( @@ -170,21 +176,19 @@ public List fetchAuditHistory(final String key, final String type, I .bind("type", type) .bind("start_date", theInterval.getStart().toString()) .bind("end_date", theInterval.getEnd().toString()) - .map((index, r, ctx) -> JacksonUtils.readValue(jsonMapper, r.getBytes("payload"), AuditEntry.class)) + .map(resultMapper) .list() ); } - private Interval getIntervalOrDefault(Interval interval) + private Interval createAuditHistoryIntervalIfNull(Interval interval) { - final Interval theInterval; if (interval == null) { DateTime now = DateTimes.nowUtc(); - theInterval = new Interval(now.minus(config.getAuditHistoryMillis()), now); + return new Interval(now.minus(config.getAuditHistoryMillis()), now); } else { - theInterval = interval; + return interval; } - return theInterval; } private int getLimit(int limit) throws IllegalArgumentException @@ -198,7 +202,7 @@ private int getLimit(int limit) throws IllegalArgumentException @Override public List fetchAuditHistory(final String type, Interval interval) { - final Interval theInterval = getIntervalOrDefault(interval); + final Interval theInterval = createAuditHistoryIntervalIfNull(interval); return dbi.withHandle( (Handle handle) -> handle .createQuery( @@ -211,7 +215,7 @@ public List fetchAuditHistory(final String type, Interval interval) .bind("type", type) .bind("start_date", theInterval.getStart().toString()) .bind("end_date", theInterval.getEnd().toString()) - .map((index, r, ctx) -> JacksonUtils.readValue(jsonMapper, r.getBytes("payload"), AuditEntry.class)) + .map(resultMapper) .list() ); } @@ -268,10 +272,19 @@ private List fetchAuditHistoryLastEntries(final String key, final St return query .bind("type", type) .setMaxRows(theLimit) - .map((index, r, ctx) -> JacksonUtils.readValue(jsonMapper, r.getBytes("payload"), AuditEntry.class)) + .map(resultMapper) .list(); } ); } + private class AuditEntryMapper implements ResultSetMapper + { + @Override + public AuditEntry map(int index, ResultSet r, StatementContext ctx) throws SQLException + { + return JacksonUtils.readValue(jsonMapper, r.getBytes("payload"), AuditEntry.class); + } + } + } diff --git a/server/src/main/java/org/apache/druid/server/audit/SQLAuditManagerConfig.java b/server/src/main/java/org/apache/druid/server/audit/SQLAuditManagerConfig.java index 8509e06c1029..e1ac78ece178 100644 --- a/server/src/main/java/org/apache/druid/server/audit/SQLAuditManagerConfig.java +++ b/server/src/main/java/org/apache/druid/server/audit/SQLAuditManagerConfig.java @@ -19,48 +19,76 @@ package org.apache.druid.server.audit; +import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import org.apache.druid.common.config.Configs; import org.apache.druid.java.util.common.HumanReadableBytes; import org.apache.druid.java.util.common.HumanReadableBytesRange; /** */ -public class SQLAuditManagerConfig +public class SQLAuditManagerConfig implements AuditManagerConfig { @JsonProperty - private long auditHistoryMillis = 7 * 24 * 60 * 60 * 1000L; // 1 WEEK + private final long auditHistoryMillis; @JsonProperty - private boolean includePayloadAsDimensionInMetric = false; + private final boolean includePayloadAsDimensionInMetric; @JsonProperty @HumanReadableBytesRange( min = -1, message = "maxPayloadSizeBytes must either be -1 (for disabling the check) or a non negative number" ) - private HumanReadableBytes maxPayloadSizeBytes = HumanReadableBytes.valueOf(-1); + private final HumanReadableBytes maxPayloadSizeBytes; @JsonProperty - private boolean skipNullField = false; + private final boolean skipNullField; + + @JsonProperty + private final boolean auditSystemRequests; + + @JsonCreator + public SQLAuditManagerConfig( + @JsonProperty("auditSystemRequests") Boolean auditSystemRequests, + @JsonProperty("maxPayloadSizeBytes") HumanReadableBytes maxPayloadSizeBytes, + @JsonProperty("skipNullField") Boolean skipNullField, + @JsonProperty("auditHistoryMillis") Long auditHistoryMillis, + @JsonProperty("includePayloadAsDimensionInMetric") Boolean includePayloadAsDimensionInMetric + ) + { + this.auditSystemRequests = Configs.valueOrDefault(auditSystemRequests, true); + this.maxPayloadSizeBytes = Configs.valueOrDefault(maxPayloadSizeBytes, HumanReadableBytes.valueOf(-1)); + this.skipNullField = Configs.valueOrDefault(skipNullField, false); + this.auditHistoryMillis = Configs.valueOrDefault(auditHistoryMillis, 7 * 24 * 60 * 60 * 1000L); + this.includePayloadAsDimensionInMetric = Configs.valueOrDefault(includePayloadAsDimensionInMetric, false); + } public long getAuditHistoryMillis() { return auditHistoryMillis; } - public boolean getIncludePayloadAsDimensionInMetric() + public boolean isIncludePayloadAsDimensionInMetric() { return includePayloadAsDimensionInMetric; } + @Override public long getMaxPayloadSizeBytes() { return maxPayloadSizeBytes.getBytes(); } + @Override public boolean isSkipNullField() { return skipNullField; } + @Override + public boolean isAuditSystemRequests() + { + return auditSystemRequests; + } } diff --git a/server/src/main/java/org/apache/druid/server/audit/SQLAuditManagerProvider.java b/server/src/main/java/org/apache/druid/server/audit/SQLAuditManagerProvider.java deleted file mode 100644 index c1d87399f591..000000000000 --- a/server/src/main/java/org/apache/druid/server/audit/SQLAuditManagerProvider.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.server.audit; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Supplier; -import com.google.inject.Inject; -import org.apache.druid.audit.AuditManager; -import org.apache.druid.guice.annotations.Json; -import org.apache.druid.java.util.common.lifecycle.Lifecycle; -import org.apache.druid.java.util.emitter.service.ServiceEmitter; -import org.apache.druid.metadata.MetadataStorageTablesConfig; -import org.apache.druid.metadata.SQLMetadataConnector; - -public class SQLAuditManagerProvider implements AuditManagerProvider -{ - private final Supplier dbTables; - private final SQLMetadataConnector connector; - private final Lifecycle lifecycle; - private final ServiceEmitter emitter; - private final ObjectMapper mapper; - private final SQLAuditManagerConfig config; - - @Inject - public SQLAuditManagerProvider( - Supplier dbTables, - SQLMetadataConnector connector, - Lifecycle lifecycle, - ServiceEmitter emitter, - @Json ObjectMapper mapper, - SQLAuditManagerConfig config - ) - { - this.dbTables = dbTables; - this.connector = connector; - this.lifecycle = lifecycle; - this.emitter = emitter; - this.mapper = mapper; - this.config = config; - } - - @Override - public AuditManager get() - { - try { - lifecycle.addMaybeStartHandler( - new Lifecycle.Handler() - { - @Override - public void start() - { - connector.createAuditTable(); - } - - @Override - public void stop() - { - - } - } - ); - } - catch (Exception e) { - throw new RuntimeException(e); - } - - return new SQLAuditManager(connector, dbTables, emitter, mapper, config); - } -} diff --git a/server/src/main/java/org/apache/druid/server/coordinator/duty/KillCompactionConfig.java b/server/src/main/java/org/apache/druid/server/coordinator/duty/KillCompactionConfig.java index e039632d30df..304b54e7cf84 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/duty/KillCompactionConfig.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/duty/KillCompactionConfig.java @@ -133,6 +133,7 @@ private int tryDeleteCompactionConfigs() throws RetryableException return updated; }, new AuditInfo( + "KillCompactionConfig", "KillCompactionConfig", "CoordinatorDuty for automatic deletion of compaction config", "" diff --git a/server/src/main/java/org/apache/druid/server/http/CoordinatorCompactionConfigsResource.java b/server/src/main/java/org/apache/druid/server/http/CoordinatorCompactionConfigsResource.java index d8ae8e3aaa3d..3939b8669771 100644 --- a/server/src/main/java/org/apache/druid/server/http/CoordinatorCompactionConfigsResource.java +++ b/server/src/main/java/org/apache/druid/server/http/CoordinatorCompactionConfigsResource.java @@ -35,14 +35,13 @@ import org.apache.druid.server.coordinator.DataSourceCompactionConfig; import org.apache.druid.server.coordinator.DataSourceCompactionConfigHistory; import org.apache.druid.server.http.security.ConfigResourceFilter; +import org.apache.druid.server.security.AuthorizationUtils; import org.joda.time.Interval; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; -import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; -import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; @@ -95,8 +94,6 @@ public Response setCompactionTaskLimit( @QueryParam("ratio") Double compactionTaskSlotRatio, @QueryParam("max") Integer maxCompactionTaskSlots, @QueryParam("useAutoScaleSlots") Boolean useAutoScaleSlots, - @HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author, - @HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment, @Context HttpServletRequest req ) { @@ -107,15 +104,13 @@ public Response setCompactionTaskLimit( maxCompactionTaskSlots, useAutoScaleSlots ); - return updateConfigHelper(operator, new AuditInfo(author, comment, req.getRemoteAddr())); + return updateConfigHelper(operator, AuthorizationUtils.buildAuditInfo(req)); } @POST @Consumes(MediaType.APPLICATION_JSON) public Response addOrUpdateCompactionConfig( final DataSourceCompactionConfig newConfig, - @HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author, - @HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment, @Context HttpServletRequest req ) { @@ -132,7 +127,7 @@ public Response addOrUpdateCompactionConfig( }; return updateConfigHelper( callable, - new AuditInfo(author, comment, req.getRemoteAddr()) + AuthorizationUtils.buildAuditInfo(req) ); } @@ -183,7 +178,7 @@ public Response getCompactionConfigHistory( DataSourceCompactionConfigHistory history = new DataSourceCompactionConfigHistory(dataSource); for (AuditEntry audit : auditEntries) { CoordinatorCompactionConfig coordinatorCompactionConfig = configManager.convertBytesToCompactionConfig( - audit.getPayload().getBytes(StandardCharsets.UTF_8) + audit.getPayload().serialized().getBytes(StandardCharsets.UTF_8) ); history.add(coordinatorCompactionConfig, audit.getAuditInfo(), audit.getAuditTime()); } @@ -201,8 +196,6 @@ public Response getCompactionConfigHistory( @Produces(MediaType.APPLICATION_JSON) public Response deleteCompactionConfig( @PathParam("dataSource") String dataSource, - @HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author, - @HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment, @Context HttpServletRequest req ) { @@ -219,7 +212,7 @@ public Response deleteCompactionConfig( return CoordinatorCompactionConfig.from(current, ImmutableList.copyOf(configs.values())); }; - return updateConfigHelper(callable, new AuditInfo(author, comment, req.getRemoteAddr())); + return updateConfigHelper(callable, AuthorizationUtils.buildAuditInfo(req)); } private Response updateConfigHelper( diff --git a/server/src/main/java/org/apache/druid/server/http/CoordinatorDynamicConfigsResource.java b/server/src/main/java/org/apache/druid/server/http/CoordinatorDynamicConfigsResource.java index aab0a69632cb..a1ec2ea30e1b 100644 --- a/server/src/main/java/org/apache/druid/server/http/CoordinatorDynamicConfigsResource.java +++ b/server/src/main/java/org/apache/druid/server/http/CoordinatorDynamicConfigsResource.java @@ -20,7 +20,6 @@ package org.apache.druid.server.http; import com.sun.jersey.spi.container.ResourceFilters; -import org.apache.druid.audit.AuditInfo; import org.apache.druid.audit.AuditManager; import org.apache.druid.common.config.ConfigManager.SetResult; import org.apache.druid.common.utils.ServletResourceUtils; @@ -28,14 +27,13 @@ import org.apache.druid.server.coordinator.CoordinatorConfigManager; import org.apache.druid.server.coordinator.CoordinatorDynamicConfig; import org.apache.druid.server.http.security.ConfigResourceFilter; +import org.apache.druid.server.security.AuthorizationUtils; import org.joda.time.Interval; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; -import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; -import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; @@ -75,8 +73,6 @@ public Response getDynamicConfigs() @Consumes(MediaType.APPLICATION_JSON) public Response setDynamicConfigs( final CoordinatorDynamicConfig.Builder dynamicConfigBuilder, - @HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author, - @HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment, @Context HttpServletRequest req ) { @@ -85,7 +81,7 @@ public Response setDynamicConfigs( final SetResult setResult = manager.setDynamicConfig( dynamicConfigBuilder.build(current), - new AuditInfo(author, comment, req.getRemoteAddr()) + AuthorizationUtils.buildAuditInfo(req) ); if (setResult.isOk()) { diff --git a/server/src/main/java/org/apache/druid/server/http/DataSourcesResource.java b/server/src/main/java/org/apache/druid/server/http/DataSourcesResource.java index 79237f507a54..51e8a3d93c3f 100644 --- a/server/src/main/java/org/apache/druid/server/http/DataSourcesResource.java +++ b/server/src/main/java/org/apache/druid/server/http/DataSourcesResource.java @@ -30,6 +30,8 @@ import com.sun.jersey.spi.container.ResourceFilters; import it.unimi.dsi.fastutil.objects.Object2LongMap; import org.apache.commons.lang.StringUtils; +import org.apache.druid.audit.AuditEntry; +import org.apache.druid.audit.AuditManager; import org.apache.druid.client.CoordinatorServerView; import org.apache.druid.client.DruidDataSource; import org.apache.druid.client.DruidServer; @@ -56,6 +58,7 @@ import org.apache.druid.server.coordinator.rules.LoadRule; import org.apache.druid.server.coordinator.rules.Rule; import org.apache.druid.server.http.security.DatasourceResourceFilter; +import org.apache.druid.server.security.AuthorizationUtils; import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.SegmentId; @@ -110,6 +113,7 @@ public class DataSourcesResource private final OverlordClient overlordClient; private final AuthorizerMapper authorizerMapper; private final DruidCoordinator coordinator; + private final AuditManager auditManager; @Inject public DataSourcesResource( @@ -118,7 +122,8 @@ public DataSourcesResource( MetadataRuleManager metadataRuleManager, @Nullable OverlordClient overlordClient, AuthorizerMapper authorizerMapper, - DruidCoordinator coordinator + DruidCoordinator coordinator, + AuditManager auditManager ) { this.serverInventoryView = serverInventoryView; @@ -127,6 +132,7 @@ public DataSourcesResource( this.overlordClient = overlordClient; this.authorizerMapper = authorizerMapper; this.coordinator = coordinator; + this.auditManager = auditManager; } @GET @@ -220,13 +226,17 @@ public Response markAsUsedNonOvershadowedSegments( @Consumes(MediaType.APPLICATION_JSON) public Response markSegmentsAsUnused( @PathParam("dataSourceName") final String dataSourceName, - final MarkDataSourceSegmentsPayload payload + final MarkDataSourceSegmentsPayload payload, + @Context final HttpServletRequest req ) { MarkSegments markSegments = () -> { final Interval interval = payload.getInterval(); + final int numUpdatedSegments; + final Object auditPayload; if (interval != null) { - return segmentsMetadataManager.markAsUnusedSegmentsInInterval(dataSourceName, interval); + numUpdatedSegments = segmentsMetadataManager.markAsUnusedSegmentsInInterval(dataSourceName, interval); + auditPayload = Collections.singletonMap("interval", interval); } else { final Set segmentIds = payload.getSegmentIds() @@ -236,12 +246,23 @@ public Response markSegmentsAsUnused( .collect(Collectors.toSet()); // Note: segments for the "wrong" datasource are ignored. - return segmentsMetadataManager.markSegmentsAsUnused( + numUpdatedSegments = segmentsMetadataManager.markSegmentsAsUnused( segmentIds.stream() .filter(segmentId -> segmentId.getDataSource().equals(dataSourceName)) .collect(Collectors.toSet()) ); + auditPayload = Collections.singletonMap("segmentIds", segmentIds); } + auditManager.doAudit( + AuditEntry.builder() + .key(dataSourceName) + .type("segment.markUnused") + .payload(auditPayload) + .auditInfo(AuthorizationUtils.buildAuditInfo(req)) + .request(AuthorizationUtils.buildRequestInfo("coordinator", req)) + .build() + ); + return numUpdatedSegments; }; return doMarkSegmentsWithPayload("markSegmentsAsUnused", dataSourceName, payload, markSegments); } @@ -312,7 +333,8 @@ private static Response doMarkSegments(String method, String dataSourceName, Mar public Response markAsUnusedAllSegmentsOrKillUnusedSegmentsInInterval( @PathParam("dataSourceName") final String dataSourceName, @QueryParam("kill") final String kill, - @QueryParam("interval") final String interval + @QueryParam("interval") final String interval, + @Context HttpServletRequest req ) { if (overlordClient == null) { @@ -321,7 +343,7 @@ public Response markAsUnusedAllSegmentsOrKillUnusedSegmentsInInterval( boolean killSegments = kill != null && Boolean.valueOf(kill); if (killSegments) { - return killUnusedSegmentsInInterval(dataSourceName, interval); + return killUnusedSegmentsInInterval(dataSourceName, interval, req); } else { MarkSegments markSegments = () -> segmentsMetadataManager.markAsUnusedAllSegmentsInDataSource(dataSourceName); return doMarkSegments("markAsUnusedAllSegments", dataSourceName, markSegments); @@ -334,7 +356,8 @@ public Response markAsUnusedAllSegmentsOrKillUnusedSegmentsInInterval( @Produces(MediaType.APPLICATION_JSON) public Response killUnusedSegmentsInInterval( @PathParam("dataSourceName") final String dataSourceName, - @PathParam("interval") final String interval + @PathParam("interval") final String interval, + @Context final HttpServletRequest req ) { if (overlordClient == null) { @@ -345,7 +368,19 @@ public Response killUnusedSegmentsInInterval( } final Interval theInterval = Intervals.of(interval.replace('_', '/')); try { - FutureUtils.getUnchecked(overlordClient.runKillTask("api-issued", dataSourceName, theInterval, null), true); + final String killTaskId = FutureUtils.getUnchecked( + overlordClient.runKillTask("api-issued", dataSourceName, theInterval, null), + true + ); + auditManager.doAudit( + AuditEntry.builder() + .key(dataSourceName) + .type("segment.kill") + .payload(ImmutableMap.of("killTaskId", killTaskId, "interval", theInterval)) + .auditInfo(AuthorizationUtils.buildAuditInfo(req)) + .request(AuthorizationUtils.buildRequestInfo("coordinator", req)) + .build() + ); return Response.ok().build(); } catch (Exception e) { diff --git a/server/src/main/java/org/apache/druid/server/http/LookupCoordinatorResource.java b/server/src/main/java/org/apache/druid/server/http/LookupCoordinatorResource.java index 8bc21caed54a..9e435e72f952 100644 --- a/server/src/main/java/org/apache/druid/server/http/LookupCoordinatorResource.java +++ b/server/src/main/java/org/apache/druid/server/http/LookupCoordinatorResource.java @@ -29,8 +29,6 @@ import com.google.common.net.HostAndPort; import com.google.inject.Inject; import com.sun.jersey.spi.container.ResourceFilters; -import org.apache.druid.audit.AuditInfo; -import org.apache.druid.audit.AuditManager; import org.apache.druid.common.utils.ServletResourceUtils; import org.apache.druid.guice.annotations.Json; import org.apache.druid.guice.annotations.Smile; @@ -41,6 +39,7 @@ import org.apache.druid.server.http.security.ConfigResourceFilter; import org.apache.druid.server.lookup.cache.LookupCoordinatorManager; import org.apache.druid.server.lookup.cache.LookupExtractorFactoryMapContainer; +import org.apache.druid.server.security.AuthorizationUtils; import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; @@ -48,7 +47,6 @@ import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; -import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; @@ -149,8 +147,6 @@ public Response getAllLookupSpecs() @Consumes({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE}) public Response updateAllLookups( InputStream in, - @HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author, - @HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment, @Context HttpServletRequest req ) { @@ -166,7 +162,7 @@ public Response updateAllLookups( catch (IOException e) { return Response.status(Response.Status.BAD_REQUEST).entity(ServletResourceUtils.sanitizeException(e)).build(); } - if (lookupCoordinatorManager.updateLookups(map, new AuditInfo(author, comment, req.getRemoteAddr()))) { + if (lookupCoordinatorManager.updateLookups(map, AuthorizationUtils.buildAuditInfo(req))) { return Response.status(Response.Status.ACCEPTED).entity(map).build(); } else { throw new RuntimeException("Unknown error updating configuration"); @@ -183,8 +179,6 @@ public Response updateAllLookups( @Path("/config/{tier}") public Response deleteTier( @PathParam("tier") String tier, - @HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author, - @HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment, @Context HttpServletRequest req ) { @@ -195,7 +189,7 @@ public Response deleteTier( .build(); } - if (lookupCoordinatorManager.deleteTier(tier, new AuditInfo(author, comment, req.getRemoteAddr()))) { + if (lookupCoordinatorManager.deleteTier(tier, AuthorizationUtils.buildAuditInfo(req))) { return Response.status(Response.Status.ACCEPTED).build(); } else { return Response.status(Response.Status.NOT_FOUND).build(); @@ -213,8 +207,6 @@ public Response deleteTier( public Response deleteLookup( @PathParam("tier") String tier, @PathParam("lookup") String lookup, - @HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author, - @HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment, @Context HttpServletRequest req ) { @@ -231,7 +223,7 @@ public Response deleteLookup( .build(); } - if (lookupCoordinatorManager.deleteLookup(tier, lookup, new AuditInfo(author, comment, req.getRemoteAddr()))) { + if (lookupCoordinatorManager.deleteLookup(tier, lookup, AuthorizationUtils.buildAuditInfo(req))) { return Response.status(Response.Status.ACCEPTED).build(); } else { return Response.status(Response.Status.NOT_FOUND).build(); @@ -249,8 +241,6 @@ public Response deleteLookup( public Response createOrUpdateLookup( @PathParam("tier") String tier, @PathParam("lookup") String lookup, - @HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author, - @HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment, InputStream in, @Context HttpServletRequest req ) @@ -280,7 +270,7 @@ public Response createOrUpdateLookup( tier, lookup, lookupSpec, - new AuditInfo(author, comment, req.getRemoteAddr()) + AuthorizationUtils.buildAuditInfo(req) )) { return Response.status(Response.Status.ACCEPTED).build(); } else { diff --git a/server/src/main/java/org/apache/druid/server/http/RulesResource.java b/server/src/main/java/org/apache/druid/server/http/RulesResource.java index beb223a6cc65..63769a3e76c3 100644 --- a/server/src/main/java/org/apache/druid/server/http/RulesResource.java +++ b/server/src/main/java/org/apache/druid/server/http/RulesResource.java @@ -30,13 +30,12 @@ import org.apache.druid.server.coordinator.rules.Rule; import org.apache.druid.server.http.security.RulesResourceFilter; import org.apache.druid.server.http.security.StateResourceFilter; +import org.apache.druid.server.security.AuthorizationUtils; import org.joda.time.Interval; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; -import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; -import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; @@ -102,13 +101,11 @@ public Response getDatasourceRules( public Response setDatasourceRules( @PathParam("dataSourceName") final String dataSourceName, final List rules, - @HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author, - @HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment, @Context HttpServletRequest req ) { try { - final AuditInfo auditInfo = new AuditInfo(author, comment, req.getRemoteAddr()); + final AuditInfo auditInfo = AuthorizationUtils.buildAuditInfo(req); if (databaseRuleManager.overrideRule(dataSourceName, rules, auditInfo)) { return Response.ok().build(); } else { diff --git a/server/src/main/java/org/apache/druid/server/lookup/cache/LookupCoordinatorManager.java b/server/src/main/java/org/apache/druid/server/lookup/cache/LookupCoordinatorManager.java index 2662de6b981b..fcb1a81fd5fe 100644 --- a/server/src/main/java/org/apache/druid/server/lookup/cache/LookupCoordinatorManager.java +++ b/server/src/main/java/org/apache/druid/server/lookup/cache/LookupCoordinatorManager.java @@ -519,7 +519,7 @@ private void initializeLookupsConfigWatcher() configManager.set( LOOKUP_CONFIG_KEY, converted, - new AuditInfo("autoConversion", "autoConversion", "127.0.0.1") + new AuditInfo("autoConversion", "autoConversion", "autoConversion", "127.0.0.1") ); } } diff --git a/server/src/main/java/org/apache/druid/server/security/AuthorizationUtils.java b/server/src/main/java/org/apache/druid/server/security/AuthorizationUtils.java index 2b2afa9a9cdd..431819da8a42 100644 --- a/server/src/main/java/org/apache/druid/server/security/AuthorizationUtils.java +++ b/server/src/main/java/org/apache/druid/server/security/AuthorizationUtils.java @@ -22,6 +22,9 @@ import com.google.common.base.Function; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import org.apache.druid.audit.AuditInfo; +import org.apache.druid.audit.AuditManager; +import org.apache.druid.audit.RequestInfo; import org.apache.druid.error.DruidException; import org.apache.druid.java.util.common.ISE; @@ -89,6 +92,58 @@ public static AuthenticationResult authenticationResultFromRequest(final HttpSer return authenticationResult; } + /** + * Extracts the identity from the authentication result if set as an atrribute + * of this request. + */ + public static String getAuthenticatedIdentity(HttpServletRequest request) + { + final AuthenticationResult authenticationResult = (AuthenticationResult) request.getAttribute( + AuthConfig.DRUID_AUTHENTICATION_RESULT + ); + + if (authenticationResult == null) { + return null; + } else { + return authenticationResult.getIdentity(); + } + } + + /** + * Builds an AuditInfo for the given request by extracting the following from + * it: + *
    + *
  • Header {@link AuditManager#X_DRUID_AUTHOR}
  • + *
  • Header {@link AuditManager#X_DRUID_COMMENT}
  • + *
  • Attribute {@link AuthConfig#DRUID_AUTHENTICATION_RESULT}
  • + *
  • IP address using {@link HttpServletRequest#getRemoteAddr()}
  • + *
+ */ + public static AuditInfo buildAuditInfo(HttpServletRequest request) + { + final String author = request.getHeader(AuditManager.X_DRUID_AUTHOR); + final String comment = request.getHeader(AuditManager.X_DRUID_COMMENT); + return new AuditInfo( + author == null ? "" : author, + getAuthenticatedIdentity(request), + comment == null ? "" : comment, + request.getRemoteAddr() + ); + } + + /** + * Builds a RequestInfo object that can be used for auditing purposes. + */ + public static RequestInfo buildRequestInfo(String service, HttpServletRequest request) + { + return new RequestInfo( + service, + request.getMethod(), + request.getRequestURI(), + request.getQueryString() + ); + } + /** * Check a list of resource-actions to be performed by the identity represented by authenticationResult. * diff --git a/server/src/test/java/org/apache/druid/metadata/SQLMetadataRuleManagerTest.java b/server/src/test/java/org/apache/druid/metadata/SQLMetadataRuleManagerTest.java index 20ffdb81188a..f1b7855e3d1f 100644 --- a/server/src/test/java/org/apache/druid/metadata/SQLMetadataRuleManagerTest.java +++ b/server/src/test/java/org/apache/druid/metadata/SQLMetadataRuleManagerTest.java @@ -19,7 +19,6 @@ package org.apache.druid.metadata; - import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Suppliers; @@ -34,7 +33,7 @@ import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.segment.TestHelper; +import org.apache.druid.server.audit.AuditSerdeHelper; import org.apache.druid.server.audit.SQLAuditManager; import org.apache.druid.server.audit.SQLAuditManagerConfig; import org.apache.druid.server.coordinator.rules.IntervalLoadRule; @@ -65,7 +64,6 @@ public class SQLMetadataRuleManagerTest private AuditManager auditManager; private SQLMetadataSegmentPublisher publisher; private final ObjectMapper mapper = new DefaultObjectMapper(); - private final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); @Before public void setUp() @@ -73,12 +71,15 @@ public void setUp() connector = derbyConnectorRule.getConnector(); tablesConfig = derbyConnectorRule.metadataTablesConfigSupplier().get(); connector.createAuditTable(); + + final SQLAuditManagerConfig auditManagerConfig = new SQLAuditManagerConfig(null, null, null, null, null); auditManager = new SQLAuditManager( + auditManagerConfig, + new AuditSerdeHelper(auditManagerConfig, null, mapper, mapper), connector, Suppliers.ofInstance(tablesConfig), new NoopServiceEmitter(), - mapper, - new SQLAuditManagerConfig() + mapper ); connector.createRulesTable(); @@ -86,7 +87,7 @@ public void setUp() ruleManager = new SQLMetadataRuleManager(mapper, managerConfig, tablesConfig, connector, auditManager); connector.createSegmentTable(); publisher = new SQLMetadataSegmentPublisher( - jsonMapper, + mapper, derbyConnectorRule.metadataTablesConfigSupplier().get(), connector ); @@ -190,7 +191,7 @@ public void testAuditEntryCreated() throws Exception Assert.assertEquals( rules, - mapper.readValue(entry.getPayload(), new TypeReference>() {}) + mapper.readValue(entry.getPayload().serialized(), new TypeReference>() {}) ); Assert.assertEquals(auditInfo, entry.getAuditInfo()); Assert.assertEquals(DATASOURCE, entry.getKey()); @@ -223,7 +224,7 @@ public void testFetchAuditEntriesForAllDataSources() throws Exception for (AuditEntry entry : auditEntries) { Assert.assertEquals( rules, - mapper.readValue(entry.getPayload(), new TypeReference>() {}) + mapper.readValue(entry.getPayload().serialized(), new TypeReference>() {}) ); Assert.assertEquals(auditInfo, entry.getAuditInfo()); } @@ -367,7 +368,7 @@ private void dropTable(final String tableName) private AuditInfo createAuditInfo(String comment) { - return new AuditInfo("test", comment, "127.0.0.1"); + return new AuditInfo("test", "id", comment, "127.0.0.1"); } } diff --git a/server/src/test/java/org/apache/druid/server/audit/AuditManagerConfigTest.java b/server/src/test/java/org/apache/druid/server/audit/AuditManagerConfigTest.java new file mode 100644 index 000000000000..2ed4d85856f6 --- /dev/null +++ b/server/src/test/java/org/apache/druid/server/audit/AuditManagerConfigTest.java @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.server.audit; + +import com.google.common.collect.ImmutableList; +import com.google.inject.Injector; +import org.apache.druid.guice.GuiceInjectors; +import org.apache.druid.guice.JsonConfigProvider; +import org.apache.druid.guice.JsonConfigurator; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Properties; + +public class AuditManagerConfigTest +{ + private static final String CONFIG_BASE = "druid.audit.manager"; + + @Test + public void testDefaultAuditConfig() + { + final Injector injector = createInjector(); + final JsonConfigProvider provider = JsonConfigProvider.of( + CONFIG_BASE, + AuditManagerConfig.class + ); + + provider.inject(new Properties(), injector.getInstance(JsonConfigurator.class)); + final AuditManagerConfig config = provider.get(); + Assert.assertTrue(config instanceof SQLAuditManagerConfig); + + final SQLAuditManagerConfig sqlAuditConfig = (SQLAuditManagerConfig) config; + Assert.assertTrue(sqlAuditConfig.isAuditSystemRequests()); + Assert.assertFalse(sqlAuditConfig.isSkipNullField()); + Assert.assertFalse(sqlAuditConfig.isIncludePayloadAsDimensionInMetric()); + Assert.assertEquals(-1, sqlAuditConfig.getMaxPayloadSizeBytes()); + Assert.assertEquals(7 * 86400 * 1000, sqlAuditConfig.getAuditHistoryMillis()); + } + + @Test + public void testLogAuditConfigWithDefaults() + { + final Injector injector = createInjector(); + final JsonConfigProvider provider = JsonConfigProvider.of( + CONFIG_BASE, + AuditManagerConfig.class + ); + + final Properties props = new Properties(); + props.setProperty("druid.audit.manager.type", "log"); + + provider.inject(props, injector.getInstance(JsonConfigurator.class)); + final AuditManagerConfig config = provider.get(); + Assert.assertTrue(config instanceof LoggingAuditManagerConfig); + + final LoggingAuditManagerConfig logAuditConfig = (LoggingAuditManagerConfig) config; + Assert.assertTrue(logAuditConfig.isAuditSystemRequests()); + Assert.assertFalse(logAuditConfig.isSkipNullField()); + Assert.assertEquals(-1, logAuditConfig.getMaxPayloadSizeBytes()); + Assert.assertEquals(AuditLogger.Level.INFO, logAuditConfig.getLogLevel()); + } + + @Test + public void testLogAuditConfigWithOverrides() + { + final Injector injector = createInjector(); + final JsonConfigProvider provider = JsonConfigProvider.of( + CONFIG_BASE, + AuditManagerConfig.class + ); + + final Properties props = new Properties(); + props.setProperty("druid.audit.manager.type", "log"); + props.setProperty("druid.audit.manager.logLevel", "WARN"); + props.setProperty("druid.audit.manager.auditSystemRequests", "true"); + + provider.inject(props, injector.getInstance(JsonConfigurator.class)); + + final AuditManagerConfig config = provider.get(); + Assert.assertTrue(config instanceof LoggingAuditManagerConfig); + + final LoggingAuditManagerConfig logAuditConfig = (LoggingAuditManagerConfig) config; + Assert.assertTrue(logAuditConfig.isAuditSystemRequests()); + Assert.assertFalse(logAuditConfig.isSkipNullField()); + Assert.assertEquals(-1, logAuditConfig.getMaxPayloadSizeBytes()); + Assert.assertEquals(AuditLogger.Level.WARN, logAuditConfig.getLogLevel()); + } + + @Test + public void testSqlAuditConfigWithOverrides() + { + final Injector injector = createInjector(); + final JsonConfigProvider provider = JsonConfigProvider.of( + CONFIG_BASE, + AuditManagerConfig.class + ); + + final Properties props = new Properties(); + props.setProperty("druid.audit.manager.type", "sql"); + props.setProperty("druid.audit.manager.skipNullField", "true"); + props.setProperty("druid.audit.manager.maxPayloadSizeBytes", "100"); + props.setProperty("druid.audit.manager.auditHistoryMillis", "1000"); + props.setProperty("druid.audit.manager.includePayloadAsDimensionInMetric", "true"); + + provider.inject(props, injector.getInstance(JsonConfigurator.class)); + + final AuditManagerConfig config = provider.get(); + Assert.assertTrue(config instanceof SQLAuditManagerConfig); + + final SQLAuditManagerConfig sqlAuditConfig = (SQLAuditManagerConfig) config; + Assert.assertTrue(sqlAuditConfig.isSkipNullField()); + Assert.assertTrue(sqlAuditConfig.isIncludePayloadAsDimensionInMetric()); + Assert.assertEquals(100, sqlAuditConfig.getMaxPayloadSizeBytes()); + Assert.assertEquals(1000L, sqlAuditConfig.getAuditHistoryMillis()); + } + + private Injector createInjector() + { + return GuiceInjectors.makeStartupInjectorWithModules( + ImmutableList.of( + binder -> JsonConfigProvider.bind(binder, CONFIG_BASE, AuditManagerConfig.class) + ) + ); + } +} diff --git a/server/src/test/java/org/apache/druid/server/audit/SQLAuditManagerTest.java b/server/src/test/java/org/apache/druid/server/audit/SQLAuditManagerTest.java index 7e003c346211..a12722088159 100644 --- a/server/src/test/java/org/apache/druid/server/audit/SQLAuditManagerTest.java +++ b/server/src/test/java/org/apache/druid/server/audit/SQLAuditManagerTest.java @@ -19,299 +19,200 @@ package org.apache.druid.server.audit; -import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.druid.audit.AuditEntry; import org.apache.druid.audit.AuditInfo; -import org.apache.druid.audit.AuditManager; -import org.apache.druid.common.config.ConfigSerde; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.DateTimes; +import org.apache.druid.java.util.common.HumanReadableBytes; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.jackson.JacksonUtils; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; +import org.apache.druid.java.util.metrics.StubServiceEmitter; import org.apache.druid.metadata.TestDerbyConnector; -import org.apache.druid.server.metrics.NoopServiceEmitter; +import org.joda.time.DateTime; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; -import org.mockito.ArgumentMatchers; -import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import java.io.IOException; -import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.TreeMap; @RunWith(MockitoJUnitRunner.class) public class SQLAuditManagerTest { @Rule - public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule(); + public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule + = new TestDerbyConnector.DerbyConnectorRule(); private TestDerbyConnector connector; - private AuditManager auditManager; - private ConfigSerde stringConfigSerde; + private SQLAuditManager auditManager; + private StubServiceEmitter serviceEmitter; private final ObjectMapper mapper = new DefaultObjectMapper(); + private final ObjectMapper mapperSkipNull + = new DefaultObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); @Before public void setUp() { + serviceEmitter = new StubServiceEmitter("audit-test", "localhost"); connector = derbyConnectorRule.getConnector(); connector.createAuditTable(); - auditManager = new SQLAuditManager( + auditManager = createAuditManager(new SQLAuditManagerConfig(null, null, null, null, null)); + } + + private SQLAuditManager createAuditManager(SQLAuditManagerConfig config) + { + return new SQLAuditManager( + config, + new AuditSerdeHelper(config, null, mapper, mapperSkipNull), connector, derbyConnectorRule.metadataTablesConfigSupplier(), - new NoopServiceEmitter(), - mapper, - new SQLAuditManagerConfig() + serviceEmitter, + mapper ); - stringConfigSerde = new ConfigSerde() - { - @Override - public byte[] serialize(String obj) - { - try { - return mapper.writeValueAsBytes(obj); - } - catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } - - @Override - public String serializeToString(String obj, boolean skipNull) - { - // In our test, payload Object is already a String - // So to serialize to String, we just return a String - return obj; - } - - @Override - public String deserialize(byte[] bytes) - { - return JacksonUtils.readValue(mapper, bytes, String.class); - } - }; } @Test - public void testAuditMetricEventBuilderConfig() + public void testAuditMetricEventWithPayload() throws IOException { - AuditEntry entry = new AuditEntry( - "testKey", - "testType", - new AuditInfo("testAuthor", "testComment", "127.0.0.1"), - "testPayload", - DateTimes.of("2013-01-01T00:00:00Z") + SQLAuditManager auditManager = createAuditManager( + new SQLAuditManagerConfig(null, null, null, null, true) ); - SQLAuditManager auditManagerWithPayloadAsDimension = new SQLAuditManager( - connector, - derbyConnectorRule.metadataTablesConfigSupplier(), - new NoopServiceEmitter(), - mapper, - new SQLAuditManagerConfig() - { - @Override - public boolean getIncludePayloadAsDimensionInMetric() - { - return true; - } - } - ); + final AuditEntry entry = createAuditEntry("testKey", "testType", DateTimes.nowUtc()); + auditManager.doAudit(entry); + + Map> metricEvents = serviceEmitter.getMetricEvents(); + Assert.assertEquals(1, metricEvents.size()); - ServiceMetricEvent.Builder auditEntryBuilder = ((SQLAuditManager) auditManager).getAuditMetricEventBuilder(entry); - final String payloadDimensionKey = "payload"; - Assert.assertNull(auditEntryBuilder.getDimension(payloadDimensionKey)); + List auditMetricEvents = metricEvents.get("config/audit"); + Assert.assertNotNull(auditMetricEvents); + Assert.assertEquals(1, auditMetricEvents.size()); - ServiceMetricEvent.Builder auditEntryBuilderWithPayload = auditManagerWithPayloadAsDimension.getAuditMetricEventBuilder(entry); - Assert.assertEquals("testPayload", auditEntryBuilderWithPayload.getDimension(payloadDimensionKey)); + ServiceMetricEvent metric = auditMetricEvents.get(0); + + final AuditEntry dbEntry = lookupAuditEntryForKey("testKey"); + Assert.assertNotNull(dbEntry); + Assert.assertEquals(dbEntry.getKey(), metric.getUserDims().get("key")); + Assert.assertEquals(dbEntry.getType(), metric.getUserDims().get("type")); + Assert.assertEquals(dbEntry.getPayload().serialized(), metric.getUserDims().get("payload")); + Assert.assertEquals(dbEntry.getAuditInfo().getAuthor(), metric.getUserDims().get("author")); + Assert.assertEquals(dbEntry.getAuditInfo().getComment(), metric.getUserDims().get("comment")); + Assert.assertEquals(dbEntry.getAuditInfo().getIp(), metric.getUserDims().get("remote_address")); } @Test(timeout = 60_000L) public void testCreateAuditEntry() throws IOException { - String entry1Key = "testKey"; - String entry1Type = "testType"; - AuditInfo entry1AuditInfo = new AuditInfo("testAuthor", "testComment", "127.0.0.1"); - String entry1Payload = "testPayload"; - - auditManager.doAudit(entry1Key, entry1Type, entry1AuditInfo, entry1Payload, stringConfigSerde); - - byte[] payload = connector.lookup( - derbyConnectorRule.metadataTablesConfigSupplier().get().getAuditTable(), - "audit_key", - "payload", - "testKey" - ); - AuditEntry dbEntry = mapper.readValue(payload, AuditEntry.class); - Assert.assertEquals(entry1Key, dbEntry.getKey()); - Assert.assertEquals(entry1Payload, dbEntry.getPayload()); - Assert.assertEquals(entry1Type, dbEntry.getType()); - Assert.assertEquals(entry1AuditInfo, dbEntry.getAuditInfo()); + final AuditEntry entry = createAuditEntry("key1", "type1", DateTimes.nowUtc()); + auditManager.doAudit(entry); + + AuditEntry dbEntry = lookupAuditEntryForKey(entry.getKey()); + Assert.assertEquals(entry, dbEntry); + + // Verify emitted metrics + Map> metricEvents = serviceEmitter.getMetricEvents(); + Assert.assertEquals(1, metricEvents.size()); + + List auditMetricEvents = metricEvents.get("config/audit"); + Assert.assertNotNull(auditMetricEvents); + Assert.assertEquals(1, auditMetricEvents.size()); + + ServiceMetricEvent metric = auditMetricEvents.get(0); + Assert.assertEquals(dbEntry.getKey(), metric.getUserDims().get("key")); + Assert.assertEquals(dbEntry.getType(), metric.getUserDims().get("type")); + Assert.assertNull(metric.getUserDims().get("payload")); + Assert.assertEquals(dbEntry.getAuditInfo().getAuthor(), metric.getUserDims().get("author")); + Assert.assertEquals(dbEntry.getAuditInfo().getComment(), metric.getUserDims().get("comment")); + Assert.assertEquals(dbEntry.getAuditInfo().getIp(), metric.getUserDims().get("remote_address")); } @Test(timeout = 60_000L) public void testFetchAuditHistory() { - String entry1Key = "testKey"; - String entry1Type = "testType"; - AuditInfo entry1AuditInfo = new AuditInfo("testAuthor", "testComment", "127.0.0.1"); - String entry1Payload = "testPayload"; - - auditManager.doAudit(entry1Key, entry1Type, entry1AuditInfo, entry1Payload, stringConfigSerde); - auditManager.doAudit(entry1Key, entry1Type, entry1AuditInfo, entry1Payload, stringConfigSerde); + final AuditEntry event = createAuditEntry("testKey", "testType", DateTimes.nowUtc()); + auditManager.doAudit(event); + auditManager.doAudit(event); List auditEntries = auditManager.fetchAuditHistory( "testKey", "testType", Intervals.of("2000-01-01T00:00:00Z/2100-01-03T00:00:00Z") ); - Assert.assertEquals(2, auditEntries.size()); - Assert.assertEquals(entry1Key, auditEntries.get(0).getKey()); - Assert.assertEquals(entry1Payload, auditEntries.get(0).getPayload()); - Assert.assertEquals(entry1Type, auditEntries.get(0).getType()); - Assert.assertEquals(entry1AuditInfo, auditEntries.get(0).getAuditInfo()); - - Assert.assertEquals(entry1Key, auditEntries.get(1).getKey()); - Assert.assertEquals(entry1Payload, auditEntries.get(1).getPayload()); - Assert.assertEquals(entry1Type, auditEntries.get(1).getType()); - Assert.assertEquals(entry1AuditInfo, auditEntries.get(1).getAuditInfo()); + Assert.assertEquals(2, auditEntries.size()); + Assert.assertEquals(event, auditEntries.get(0)); + Assert.assertEquals(event, auditEntries.get(1)); } @Test(timeout = 60_000L) public void testFetchAuditHistoryByKeyAndTypeWithLimit() { - String entry1Key = "testKey1"; - String entry1Type = "testType"; - AuditInfo entry1AuditInfo = new AuditInfo("testAuthor", "testComment", "127.0.0.1"); - String entry1Payload = "testPayload"; - - String entry2Key = "testKey2"; - String entry2Type = "testType"; - AuditInfo entry2AuditInfo = new AuditInfo("testAuthor", "testComment", "127.0.0.1"); - String entry2Payload = "testPayload"; - - auditManager.doAudit(entry1Key, entry1Type, entry1AuditInfo, entry1Payload, stringConfigSerde); - auditManager.doAudit(entry2Key, entry2Type, entry2AuditInfo, entry2Payload, stringConfigSerde); - List auditEntries = auditManager.fetchAuditHistory("testKey1", "testType", 1); + final AuditEntry entry1 = createAuditEntry("key1", "type1", DateTimes.nowUtc()); + final AuditEntry entry2 = createAuditEntry("key2", "type2", DateTimes.nowUtc()); + + auditManager.doAudit(entry1); + auditManager.doAudit(entry2); + + List auditEntries = auditManager.fetchAuditHistory(entry1.getKey(), entry1.getType(), 1); Assert.assertEquals(1, auditEntries.size()); - Assert.assertEquals(entry1Key, auditEntries.get(0).getKey()); - Assert.assertEquals(entry1Payload, auditEntries.get(0).getPayload()); - Assert.assertEquals(entry1Type, auditEntries.get(0).getType()); - Assert.assertEquals(entry1AuditInfo, auditEntries.get(0).getAuditInfo()); + Assert.assertEquals(entry1, auditEntries.get(0)); } @Test(timeout = 60_000L) public void testRemoveAuditLogsOlderThanWithEntryOlderThanTime() throws IOException { - String entry1Key = "testKey"; - String entry1Type = "testType"; - AuditInfo entry1AuditInfo = new AuditInfo("testAuthor", "testComment", "127.0.0.1"); - String entry1Payload = "testPayload"; + final AuditEntry entry = createAuditEntry("key1", "type1", DateTimes.nowUtc()); + auditManager.doAudit(entry); - auditManager.doAudit(entry1Key, entry1Type, entry1AuditInfo, entry1Payload, stringConfigSerde); - byte[] payload = connector.lookup( - derbyConnectorRule.metadataTablesConfigSupplier().get().getAuditTable(), - "audit_key", - "payload", - "testKey" - ); - AuditEntry dbEntry = mapper.readValue(payload, AuditEntry.class); - Assert.assertEquals(entry1Key, dbEntry.getKey()); - Assert.assertEquals(entry1Payload, dbEntry.getPayload()); - Assert.assertEquals(entry1Type, dbEntry.getType()); - Assert.assertEquals(entry1AuditInfo, dbEntry.getAuditInfo()); + AuditEntry dbEntry = lookupAuditEntryForKey(entry.getKey()); + Assert.assertEquals(entry, dbEntry); - // Do delete + // Verify that the audit entry gets deleted auditManager.removeAuditLogsOlderThan(System.currentTimeMillis()); - // Verify the delete - payload = connector.lookup( - derbyConnectorRule.metadataTablesConfigSupplier().get().getAuditTable(), - "audit_key", - "payload", - "testKey" - ); - Assert.assertNull(payload); + Assert.assertNull(lookupAuditEntryForKey(entry.getKey())); } @Test(timeout = 60_000L) public void testRemoveAuditLogsOlderThanWithEntryNotOlderThanTime() throws IOException { - String entry1Key = "testKey"; - String entry1Type = "testType"; - AuditInfo entry1AuditInfo = new AuditInfo("testAuthor", "testComment", "127.0.0.1"); - String entry1Payload = "testPayload"; + AuditEntry entry = createAuditEntry("key", "type", DateTimes.nowUtc()); + auditManager.doAudit(entry); - auditManager.doAudit(entry1Key, entry1Type, entry1AuditInfo, entry1Payload, stringConfigSerde); - byte[] payload = connector.lookup( - derbyConnectorRule.metadataTablesConfigSupplier().get().getAuditTable(), - "audit_key", - "payload", - "testKey" - ); - AuditEntry dbEntry = mapper.readValue(payload, AuditEntry.class); - Assert.assertEquals(entry1Key, dbEntry.getKey()); - Assert.assertEquals(entry1Payload, dbEntry.getPayload()); - Assert.assertEquals(entry1Type, dbEntry.getType()); - Assert.assertEquals(entry1AuditInfo, dbEntry.getAuditInfo()); - // Do delete + AuditEntry dbEntry = lookupAuditEntryForKey(entry.getKey()); + Assert.assertEquals(entry, dbEntry); + + // Delete old audit logs auditManager.removeAuditLogsOlderThan(DateTimes.of("2012-01-01T00:00:00Z").getMillis()); - // Verify that entry was not delete - payload = connector.lookup( - derbyConnectorRule.metadataTablesConfigSupplier().get().getAuditTable(), - "audit_key", - "payload", - "testKey" - ); - dbEntry = mapper.readValue(payload, AuditEntry.class); - Assert.assertEquals(entry1Key, dbEntry.getKey()); - Assert.assertEquals(entry1Payload, dbEntry.getPayload()); - Assert.assertEquals(entry1Type, dbEntry.getType()); - Assert.assertEquals(entry1AuditInfo, dbEntry.getAuditInfo()); + + dbEntry = lookupAuditEntryForKey(entry.getKey()); + Assert.assertEquals(entry, dbEntry); } @Test(timeout = 60_000L) public void testFetchAuditHistoryByTypeWithLimit() { - String entry1Key = "testKey"; - String entry1Type = "testType"; - AuditInfo entry1AuditInfo = new AuditInfo("testAuthor", "testComment", "127.0.0.1"); - String entry1Payload = "testPayload1"; - - String entry2Key = "testKey"; - String entry2Type = "testType"; - AuditInfo entry2AuditInfo = new AuditInfo("testAuthor", "testComment", "127.0.0.1"); - String entry2Payload = "testPayload2"; + final AuditEntry entry1 = createAuditEntry("testKey", "testType", DateTimes.of("2022-01")); + final AuditEntry entry2 = createAuditEntry("testKey", "testType", DateTimes.of("2022-03")); + final AuditEntry entry3 = createAuditEntry("testKey", "testType", DateTimes.of("2022-02")); - String entry3Key = "testKey"; - String entry3Type = "testType"; - AuditInfo entry3AuditInfo = new AuditInfo("testAuthor", "testComment", "127.0.0.1"); - String entry3Payload = "testPayload3"; - - auditManager.doAudit(entry1Key, entry1Type, entry1AuditInfo, entry1Payload, stringConfigSerde); - auditManager.doAudit(entry2Key, entry2Type, entry2AuditInfo, entry2Payload, stringConfigSerde); - auditManager.doAudit(entry3Key, entry3Type, entry3AuditInfo, entry3Payload, stringConfigSerde); + auditManager.doAudit(entry1); + auditManager.doAudit(entry2); + auditManager.doAudit(entry3); List auditEntries = auditManager.fetchAuditHistory("testType", 2); Assert.assertEquals(2, auditEntries.size()); - Assert.assertEquals(entry3Key, auditEntries.get(0).getKey()); - Assert.assertEquals(entry3Payload, auditEntries.get(0).getPayload()); - Assert.assertEquals(entry3Type, auditEntries.get(0).getType()); - Assert.assertEquals(entry3AuditInfo, auditEntries.get(0).getAuditInfo()); - - Assert.assertEquals(entry2Key, auditEntries.get(1).getKey()); - Assert.assertEquals(entry2Payload, auditEntries.get(1).getPayload()); - Assert.assertEquals(entry2Type, auditEntries.get(1).getType()); - Assert.assertEquals(entry2AuditInfo, auditEntries.get(1).getAuditInfo()); + Assert.assertEquals(entry2, auditEntries.get(0)); + Assert.assertEquals(entry3, auditEntries.get(1)); } @Test(expected = IllegalArgumentException.class, timeout = 10_000L) @@ -329,123 +230,64 @@ public void testFetchAuditHistoryLimitZero() @Test(timeout = 60_000L) public void testCreateAuditEntryWithPayloadOverSkipPayloadLimit() throws IOException { - int maxPayloadSize = 10; - SQLAuditManager auditManagerWithMaxPayloadSizeBytes = new SQLAuditManager( - connector, - derbyConnectorRule.metadataTablesConfigSupplier(), - new NoopServiceEmitter(), - mapper, - new SQLAuditManagerConfig() - { - @Override - public long getMaxPayloadSizeBytes() - { - return maxPayloadSize; - } - } + final SQLAuditManager auditManager = createAuditManager( + new SQLAuditManagerConfig(null, HumanReadableBytes.valueOf(10), null, null, null) ); - String entry1Key = "testKey"; - String entry1Type = "testType"; - AuditInfo entry1AuditInfo = new AuditInfo("testAuthor", "testComment", "127.0.0.1"); - String entry1Payload = "payload audit to store"; - - auditManagerWithMaxPayloadSizeBytes.doAudit( - entry1Key, - entry1Type, - entry1AuditInfo, - entry1Payload, - stringConfigSerde - ); + final AuditEntry entry = createAuditEntry("key", "type", DateTimes.nowUtc()); + auditManager.doAudit(entry); - byte[] payload = connector.lookup( - derbyConnectorRule.metadataTablesConfigSupplier().get().getAuditTable(), - "audit_key", - "payload", - "testKey" + // Verify that all the fields are the same except for the payload + AuditEntry dbEntry = lookupAuditEntryForKey(entry.getKey()); + Assert.assertEquals(entry.getKey(), dbEntry.getKey()); + // Assert.assertNotEquals(entry.getPayload(), dbEntry.getPayload()); + Assert.assertEquals( + "Payload truncated as it exceeds 'druid.audit.manager.maxPayloadSizeBytes'[10].", + dbEntry.getPayload().serialized() ); - - AuditEntry dbEntry = mapper.readValue(payload, AuditEntry.class); - Assert.assertEquals(entry1Key, dbEntry.getKey()); - Assert.assertNotEquals(entry1Payload, dbEntry.getPayload()); - Assert.assertEquals(StringUtils.format(AuditManager.PAYLOAD_SKIP_MSG_FORMAT, maxPayloadSize), dbEntry.getPayload()); - Assert.assertEquals(entry1Type, dbEntry.getType()); - Assert.assertEquals(entry1AuditInfo, dbEntry.getAuditInfo()); + Assert.assertEquals(entry.getType(), dbEntry.getType()); + Assert.assertEquals(entry.getAuditInfo(), dbEntry.getAuditInfo()); } @Test(timeout = 60_000L) public void testCreateAuditEntryWithPayloadUnderSkipPayloadLimit() throws IOException { - SQLAuditManager auditManagerWithMaxPayloadSizeBytes = new SQLAuditManager( - connector, - derbyConnectorRule.metadataTablesConfigSupplier(), - new NoopServiceEmitter(), - mapper, - new SQLAuditManagerConfig() - { - @Override - public long getMaxPayloadSizeBytes() - { - return 500; - } - } - ); - String entry1Key = "testKey"; - String entry1Type = "testType"; - AuditInfo entry1AuditInfo = new AuditInfo("testAuthor", "testComment", "127.0.0.1"); - String entry1Payload = "payload audit to store"; - - auditManagerWithMaxPayloadSizeBytes.doAudit( - entry1Key, - entry1Type, - entry1AuditInfo, - entry1Payload, - stringConfigSerde + SQLAuditManager auditManager = createAuditManager( + new SQLAuditManagerConfig(null, HumanReadableBytes.valueOf(500), null, null, null) ); - byte[] payload = connector.lookup( - derbyConnectorRule.metadataTablesConfigSupplier().get().getAuditTable(), - "audit_key", - "payload", - "testKey" - ); + final AuditEntry entry = createAuditEntry("key", "type", DateTimes.nowUtc()); + auditManager.doAudit(entry); - AuditEntry dbEntry = mapper.readValue(payload, AuditEntry.class); - Assert.assertEquals(entry1Key, dbEntry.getKey()); - Assert.assertEquals(entry1Payload, dbEntry.getPayload()); - Assert.assertEquals(entry1Type, dbEntry.getType()); - Assert.assertEquals(entry1AuditInfo, dbEntry.getAuditInfo()); + // Verify that the actual payload has been persisted + AuditEntry dbEntry = lookupAuditEntryForKey(entry.getKey()); + Assert.assertEquals(entry, dbEntry); } @Test(timeout = 60_000L) - public void testCreateAuditEntryWithSkipNullConfigTrue() + public void testCreateAuditEntryWithSkipNullsInPayload() throws IOException { - ConfigSerde> mockConfigSerde = Mockito.mock(ConfigSerde.class); - SQLAuditManager auditManagerWithSkipNull = new SQLAuditManager( - connector, - derbyConnectorRule.metadataTablesConfigSupplier(), - new NoopServiceEmitter(), - mapper, - new SQLAuditManagerConfig() - { - @Override - public boolean isSkipNullField() - { - return true; - } - } + final SQLAuditManager auditManagerSkipNull = createAuditManager( + new SQLAuditManagerConfig(null, null, true, null, null) ); - String entry1Key = "test1Key"; - String entry1Type = "test1Type"; - AuditInfo entry1AuditInfo = new AuditInfo("testAuthor", "testComment", "127.0.0.1"); - // Entry 1 payload has a null field for one of the property - Map entryPayload1WithNull = new HashMap<>(); - entryPayload1WithNull.put("version", "x"); - entryPayload1WithNull.put("something", null); + AuditInfo auditInfo = new AuditInfo("testAuthor", "testIdentity", "testComment", "127.0.0.1"); + + final Map payloadMap = new TreeMap<>(); + payloadMap.put("version", "x"); + payloadMap.put("something", null); + + auditManager.doAudit( + AuditEntry.builder().key("key1").type("type1").auditInfo(auditInfo).payload(payloadMap).build() + ); + AuditEntry entryWithNulls = lookupAuditEntryForKey("key1"); + Assert.assertEquals("{\"something\":null,\"version\":\"x\"}", entryWithNulls.getPayload().serialized()); - auditManagerWithSkipNull.doAudit(entry1Key, entry1Type, entry1AuditInfo, entryPayload1WithNull, mockConfigSerde); - Mockito.verify(mockConfigSerde).serializeToString(ArgumentMatchers.eq(entryPayload1WithNull), ArgumentMatchers.eq(true)); + auditManagerSkipNull.doAudit( + AuditEntry.builder().key("key2").type("type2").auditInfo(auditInfo).payload(payloadMap).build() + ); + AuditEntry entryWithoutNulls = lookupAuditEntryForKey("key2"); + Assert.assertEquals("{\"version\":\"x\"}", entryWithoutNulls.getPayload().serialized()); } @After @@ -456,12 +298,37 @@ public void cleanup() private void dropTable(final String tableName) { - Assert.assertEquals( - 0, - connector.getDBI().withHandle( - handle -> handle.createStatement(StringUtils.format("DROP TABLE %s", tableName)) + int rowsAffected = connector.getDBI().withHandle( + handle -> handle.createStatement(StringUtils.format("DROP TABLE %s", tableName)) .execute() - ).intValue() ); + Assert.assertEquals(0, rowsAffected); + } + + private AuditEntry lookupAuditEntryForKey(String key) throws IOException + { + byte[] payload = connector.lookup( + derbyConnectorRule.metadataTablesConfigSupplier().get().getAuditTable(), + "audit_key", + "payload", + key + ); + + if (payload == null) { + return null; + } else { + return mapper.readValue(payload, AuditEntry.class); + } + } + + private AuditEntry createAuditEntry(String key, String type, DateTime auditTime) + { + return AuditEntry.builder() + .key(key) + .type(type) + .serializedPayload(StringUtils.format("Test payload for key[%s], type[%s]", key, type)) + .auditInfo(new AuditInfo("author", "identity", "comment", "127.0.0.1")) + .auditTime(auditTime) + .build(); } } diff --git a/server/src/test/java/org/apache/druid/server/coordinator/simulate/CoordinatorSimulationBuilder.java b/server/src/test/java/org/apache/druid/server/coordinator/simulate/CoordinatorSimulationBuilder.java index b5174c8f02f3..f341ebf5df99 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/simulate/CoordinatorSimulationBuilder.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/simulate/CoordinatorSimulationBuilder.java @@ -335,7 +335,7 @@ public void setRetentionRules(String datasource, Rule... rules) env.ruleManager.overrideRule( datasource, Arrays.asList(rules), - new AuditInfo("sim", "sim", "localhost") + new AuditInfo("sim", "sim", "sim", "localhost") ); } diff --git a/server/src/test/java/org/apache/druid/server/http/CoordinatorCompactionConfigsResourceTest.java b/server/src/test/java/org/apache/druid/server/http/CoordinatorCompactionConfigsResourceTest.java index 8d9b02401905..c31364ac9fd9 100644 --- a/server/src/test/java/org/apache/druid/server/http/CoordinatorCompactionConfigsResourceTest.java +++ b/server/src/test/java/org/apache/druid/server/http/CoordinatorCompactionConfigsResourceTest.java @@ -148,14 +148,10 @@ public void testSetCompactionTaskLimitWithExistingConfig() double compactionTaskSlotRatio = 0.5; int maxCompactionTaskSlots = 9; - String author = "maytas"; - String comment = "hello"; Response result = coordinatorCompactionConfigsResource.setCompactionTaskLimit( compactionTaskSlotRatio, maxCompactionTaskSlots, true, - author, - comment, mockHttpServletRequest ); Assert.assertEquals(Response.Status.OK.getStatusCode(), result.getStatus()); @@ -195,12 +191,8 @@ public void testAddOrUpdateCompactionConfigWithExistingConfig() null, ImmutableMap.of("key", "val") ); - String author = "maytas"; - String comment = "hello"; Response result = coordinatorCompactionConfigsResource.addOrUpdateCompactionConfig( newConfig, - author, - comment, mockHttpServletRequest ); Assert.assertEquals(Response.Status.OK.getStatusCode(), result.getStatus()); @@ -248,12 +240,8 @@ public void testDeleteCompactionConfigWithExistingConfig() ) ).thenReturn(originalConfig); - String author = "maytas"; - String comment = "hello"; Response result = coordinatorCompactionConfigsResource.deleteCompactionConfig( datasourceName, - author, - comment, mockHttpServletRequest ); Assert.assertEquals(Response.Status.OK.getStatusCode(), result.getStatus()); @@ -277,8 +265,6 @@ public void testUpdateShouldRetryIfRetryableException() coordinatorCompactionConfigsResource.addOrUpdateCompactionConfig( NEW_CONFIG, - "author", - "test", mockHttpServletRequest ); @@ -308,8 +294,6 @@ public void testUpdateShouldNotRetryIfNotRetryableException() coordinatorCompactionConfigsResource.addOrUpdateCompactionConfig( NEW_CONFIG, - "author", - "test", mockHttpServletRequest ); @@ -351,14 +335,10 @@ public void testSetCompactionTaskLimitWithoutExistingConfig() double compactionTaskSlotRatio = 0.5; int maxCompactionTaskSlots = 9; - String author = "maytas"; - String comment = "hello"; Response result = coordinatorCompactionConfigsResource.setCompactionTaskLimit( compactionTaskSlotRatio, maxCompactionTaskSlots, true, - author, - comment, mockHttpServletRequest ); Assert.assertEquals(Response.Status.OK.getStatusCode(), result.getStatus()); @@ -414,8 +394,6 @@ public void testAddOrUpdateCompactionConfigWithoutExistingConfig() String comment = "hello"; Response result = coordinatorCompactionConfigsResource.addOrUpdateCompactionConfig( newConfig, - author, - comment, mockHttpServletRequest ); Assert.assertEquals(Response.Status.OK.getStatusCode(), result.getStatus()); @@ -441,12 +419,8 @@ public void testDeleteCompactionConfigWithoutExistingConfigShouldFailAsDatasourc ArgumentMatchers.eq(CoordinatorCompactionConfig.empty()) ) ).thenReturn(CoordinatorCompactionConfig.empty()); - String author = "maytas"; - String comment = "hello"; Response result = coordinatorCompactionConfigsResource.deleteCompactionConfig( DATASOURCE_NOT_EXISTS, - author, - comment, mockHttpServletRequest ); Assert.assertEquals(Response.Status.NOT_FOUND.getStatusCode(), result.getStatus()); diff --git a/server/src/test/java/org/apache/druid/server/http/DataSourcesResourceTest.java b/server/src/test/java/org/apache/druid/server/http/DataSourcesResourceTest.java index c33fffc80830..386a486c1638 100644 --- a/server/src/test/java/org/apache/druid/server/http/DataSourcesResourceTest.java +++ b/server/src/test/java/org/apache/druid/server/http/DataSourcesResourceTest.java @@ -28,6 +28,7 @@ import com.google.common.util.concurrent.Futures; import it.unimi.dsi.fastutil.objects.Object2LongMap; import it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap; +import org.apache.druid.audit.AuditManager; import org.apache.druid.client.CoordinatorServerView; import org.apache.druid.client.DruidDataSource; import org.apache.druid.client.DruidServer; @@ -89,6 +90,7 @@ public class DataSourcesResourceTest private List dataSegmentList; private HttpServletRequest request; private SegmentsMetadataManager segmentsMetadataManager; + private AuditManager auditManager; @Before public void setUp() @@ -96,6 +98,7 @@ public void setUp() request = EasyMock.createStrictMock(HttpServletRequest.class); inventoryView = EasyMock.createStrictMock(CoordinatorServerView.class); server = EasyMock.niceMock(DruidServer.class); + auditManager = EasyMock.niceMock(AuditManager.class); dataSegmentList = new ArrayList<>(); dataSegmentList.add( new DataSegment( @@ -180,8 +183,7 @@ public void testGetFullQueryableDataSources() EasyMock.expectLastCall().times(1); EasyMock.replay(inventoryView, server, request); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, null, null, null, AuthTestUtils.TEST_AUTHORIZER_MAPPER, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.getQueryableDataSources("full", null, request); Set result = (Set) response.getEntity(); Assert.assertEquals(200, response.getStatus()); @@ -255,7 +257,8 @@ public Access authorize(AuthenticationResult authenticationResult1, Resource res } }; - DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, null, null, null, authMapper, null); + DataSourcesResource dataSourcesResource = + new DataSourcesResource(inventoryView, null, null, null, authMapper, null, auditManager); Response response = dataSourcesResource.getQueryableDataSources("full", null, request); Set result = (Set) response.getEntity(); @@ -293,8 +296,7 @@ public void testGetSimpleQueryableDataSources() EasyMock.expectLastCall().times(1); EasyMock.replay(inventoryView, server, request); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, null, null, null, AuthTestUtils.TEST_AUTHORIZER_MAPPER, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.getQueryableDataSources(null, "simple", request); Assert.assertEquals(200, response.getStatus()); List> results = (List>) response.getEntity(); @@ -317,8 +319,7 @@ public void testFullGetTheDataSource() EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server)).atLeastOnce(); EasyMock.replay(inventoryView, server); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, null, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.getDataSource("datasource1", "full"); ImmutableDruidDataSource result = (ImmutableDruidDataSource) response.getEntity(); Assert.assertEquals(200, response.getStatus()); @@ -333,8 +334,7 @@ public void testNullGetTheDataSource() EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server)).atLeastOnce(); EasyMock.replay(inventoryView, server); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, null, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Assert.assertEquals(204, dataSourcesResource.getDataSource("none", null).getStatus()); EasyMock.verify(inventoryView, server); } @@ -351,8 +351,7 @@ public void testSimpleGetTheDataSource() EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server)).atLeastOnce(); EasyMock.replay(inventoryView, server); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, null, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.getDataSource("datasource1", null); Assert.assertEquals(200, response.getStatus()); Map> result = (Map>) response.getEntity(); @@ -385,7 +384,7 @@ public void testSimpleGetTheDataSourceManyTiers() EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server, server2, server3)).atLeastOnce(); EasyMock.replay(inventoryView, server, server2, server3); - DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, null, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.getDataSource("datasource1", null); Assert.assertEquals(200, response.getStatus()); Map> result = (Map>) response.getEntity(); @@ -423,7 +422,7 @@ public void testSimpleGetTheDataSourceWithReplicatedSegments() EasyMock.replay(inventoryView); - DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, null, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.getDataSource("datasource1", null); Assert.assertEquals(200, response.getStatus()); Map> result1 = (Map>) response.getEntity(); @@ -467,15 +466,14 @@ public void testGetSegmentDataSourceIntervals() List expectedIntervals = new ArrayList<>(); expectedIntervals.add(Intervals.of("2010-01-22T00:00:00.000Z/2010-01-23T00:00:00.000Z")); expectedIntervals.add(Intervals.of("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, null, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.getIntervalsWithServedSegmentsOrAllServedSegmentsPerIntervals( "invalidDataSource", null, null ); - Assert.assertEquals(response.getEntity(), null); + Assert.assertNull(response.getEntity()); response = dataSourcesResource.getIntervalsWithServedSegmentsOrAllServedSegmentsPerIntervals( "datasource1", @@ -527,15 +525,14 @@ public void testGetServedSegmentsInIntervalInDataSource() EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server)).atLeastOnce(); EasyMock.replay(inventoryView); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, null, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.getServedSegmentsInInterval( "invalidDataSource", "2010-01-01/P1D", null, null ); - Assert.assertEquals(null, response.getEntity()); + Assert.assertNull(response.getEntity()); response = dataSourcesResource.getServedSegmentsInInterval( "datasource1", @@ -589,17 +586,18 @@ public void testGetServedSegmentsInIntervalInDataSource() @Test public void testKillSegmentsInIntervalInDataSource() { - String interval = "2010-01-01_P1D"; + String interval = "2010-01-01/P1D"; Interval theInterval = Intervals.of(interval.replace('_', '/')); OverlordClient overlordClient = EasyMock.createStrictMock(OverlordClient.class); EasyMock.expect(overlordClient.runKillTask("api-issued", "datasource1", theInterval, null)) - .andReturn(Futures.immediateFuture(null)); + .andReturn(Futures.immediateFuture("kill_task_1")); EasyMock.replay(overlordClient, server); DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, null, null, overlordClient, null, null); - Response response = dataSourcesResource.killUnusedSegmentsInInterval("datasource1", interval); + new DataSourcesResource(inventoryView, null, null, overlordClient, null, null, auditManager); + prepareRequestForAudit(); + Response response = dataSourcesResource.killUnusedSegmentsInInterval("datasource1", interval, request); Assert.assertEquals(200, response.getStatus()); Assert.assertNull(response.getEntity()); @@ -612,10 +610,10 @@ public void testMarkAsUnusedAllSegmentsInDataSource() OverlordClient overlordClient = EasyMock.createStrictMock(OverlordClient.class); EasyMock.replay(overlordClient, server); DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, null, null, overlordClient, null, null); + new DataSourcesResource(inventoryView, null, null, overlordClient, null, null, auditManager); try { Response response = - dataSourcesResource.markAsUnusedAllSegmentsOrKillUnusedSegmentsInInterval("datasource", "true", "???"); + dataSourcesResource.markAsUnusedAllSegmentsOrKillUnusedSegmentsInInterval("datasource", "true", "???", request); // 400 (Bad Request) or an IllegalArgumentException is expected. Assert.assertEquals(400, response.getStatus()); Assert.assertNotNull(response.getEntity()); @@ -635,7 +633,7 @@ public void testIsHandOffComplete() Rule loadRule = new IntervalLoadRule(Intervals.of("2013-01-02T00:00:00Z/2013-01-03T00:00:00Z"), null, null); Rule dropRule = new IntervalDropRule(Intervals.of("2013-01-01T00:00:00Z/2013-01-02T00:00:00Z")); DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, null, databaseRuleManager, null, null, null); + new DataSourcesResource(inventoryView, null, databaseRuleManager, null, null, null, auditManager); // test dropped EasyMock.expect(databaseRuleManager.getRulesWithDefault("dataSource1")) @@ -704,7 +702,7 @@ public void testMarkSegmentAsUsed() EasyMock.expect(segmentsMetadataManager.markSegmentAsUsed(segment.getId().toString())).andReturn(true).once(); EasyMock.replay(segmentsMetadataManager); - DataSourcesResource dataSourcesResource = new DataSourcesResource(null, segmentsMetadataManager, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.markSegmentAsUsed(segment.getDataSource(), segment.getId().toString()); Assert.assertEquals(200, response.getStatus()); @@ -718,7 +716,7 @@ public void testMarkSegmentAsUsedNoChange() EasyMock.expect(segmentsMetadataManager.markSegmentAsUsed(segment.getId().toString())).andReturn(false).once(); EasyMock.replay(segmentsMetadataManager); - DataSourcesResource dataSourcesResource = new DataSourcesResource(null, segmentsMetadataManager, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.markSegmentAsUsed(segment.getDataSource(), segment.getId().toString()); Assert.assertEquals(200, response.getStatus()); @@ -738,9 +736,7 @@ public void testMarkAsUsedNonOvershadowedSegmentsInterval() EasyMock.expect(server.getDataSource("datasource1")).andReturn(dataSource).once(); EasyMock.replay(segmentsMetadataManager, inventoryView, server); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); - + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( "datasource1", new DataSourcesResource.MarkDataSourceSegmentsPayload(interval, null) @@ -761,8 +757,7 @@ public void testMarkAsUsedNonOvershadowedSegmentsIntervalNoneUpdated() EasyMock.expect(server.getDataSource("datasource1")).andReturn(dataSource).once(); EasyMock.replay(segmentsMetadataManager, inventoryView, server); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( "datasource1", @@ -784,8 +779,7 @@ public void testMarkAsUsedNonOvershadowedSegmentsSet() throws UnknownSegmentIdsE EasyMock.expect(server.getDataSource("datasource1")).andReturn(dataSource).once(); EasyMock.replay(segmentsMetadataManager, inventoryView, server); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( "datasource1", @@ -807,8 +801,7 @@ public void testMarkAsUsedNonOvershadowedSegmentsIntervalException() EasyMock.expect(server.getDataSource("datasource1")).andReturn(dataSource).once(); EasyMock.replay(segmentsMetadataManager, inventoryView, server); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( "datasource1", @@ -825,8 +818,7 @@ public void testMarkAsUsedNonOvershadowedSegmentsNoDataSource() EasyMock.expect(server.getDataSource("datasource1")).andReturn(null).once(); EasyMock.replay(segmentsMetadataManager, inventoryView, server); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( "datasource1", @@ -839,8 +831,7 @@ public void testMarkAsUsedNonOvershadowedSegmentsNoDataSource() @Test public void testMarkAsUsedNonOvershadowedSegmentsInvalidPayloadNoArguments() { - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( "datasource1", @@ -852,8 +843,7 @@ public void testMarkAsUsedNonOvershadowedSegmentsInvalidPayloadNoArguments() @Test public void testMarkAsUsedNonOvershadowedSegmentsInvalidPayloadBothArguments() { - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( "datasource1", @@ -865,8 +855,7 @@ public void testMarkAsUsedNonOvershadowedSegmentsInvalidPayloadBothArguments() @Test public void testMarkAsUsedNonOvershadowedSegmentsInvalidPayloadEmptyArray() { - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( "datasource1", @@ -878,8 +867,7 @@ public void testMarkAsUsedNonOvershadowedSegmentsInvalidPayloadEmptyArray() @Test public void testMarkAsUsedNonOvershadowedSegmentsNoPayload() { - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments("datasource1", null); Assert.assertEquals(400, response.getStatus()); @@ -1038,9 +1026,9 @@ public void testMarkSegmentsAsUnused() .collect(Collectors.toSet()) ); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload); + DataSourcesResource dataSourcesResource = createResource(); + prepareRequestForAudit(); + Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(ImmutableMap.of("numChangedSegments", 1), response.getEntity()); EasyMock.verify(segmentsMetadataManager, inventoryView, server); @@ -1069,9 +1057,9 @@ public void testMarkSegmentsAsUnusedNoChanges() .collect(Collectors.toSet()) ); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload); + DataSourcesResource dataSourcesResource = createResource(); + prepareRequestForAudit(); + Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity()); EasyMock.verify(segmentsMetadataManager, inventoryView, server); @@ -1103,8 +1091,8 @@ public void testMarkSegmentsAsUnusedException() ); DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload); + createResource(); + Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); Assert.assertEquals(500, response.getStatus()); Assert.assertNotNull(response.getEntity()); EasyMock.verify(segmentsMetadataManager, inventoryView, server); @@ -1124,9 +1112,9 @@ public void testMarkAsUnusedSegmentsInInterval() final DataSourcesResource.MarkDataSourceSegmentsPayload payload = new DataSourcesResource.MarkDataSourceSegmentsPayload(theInterval, null); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload); + DataSourcesResource dataSourcesResource = createResource(); + prepareRequestForAudit(); + Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(ImmutableMap.of("numChangedSegments", 1), response.getEntity()); EasyMock.verify(segmentsMetadataManager, inventoryView, server); @@ -1147,9 +1135,9 @@ public void testMarkAsUnusedSegmentsInIntervalNoChanges() final DataSourcesResource.MarkDataSourceSegmentsPayload payload = new DataSourcesResource.MarkDataSourceSegmentsPayload(theInterval, null); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload); + DataSourcesResource dataSourcesResource = createResource(); + prepareRequestForAudit(); + Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity()); EasyMock.verify(segmentsMetadataManager, inventoryView, server); @@ -1172,8 +1160,8 @@ public void testMarkAsUnusedSegmentsInIntervalException() new DataSourcesResource.MarkDataSourceSegmentsPayload(theInterval, null); DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload); + createResource(); + Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); Assert.assertEquals(500, response.getStatus()); Assert.assertNotNull(response.getEntity()); EasyMock.verify(segmentsMetadataManager, inventoryView, server); @@ -1183,9 +1171,9 @@ public void testMarkAsUnusedSegmentsInIntervalException() public void testMarkSegmentsAsUnusedNullPayload() { DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + createResource(); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", null); + Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", null, request); Assert.assertEquals(400, response.getStatus()); Assert.assertNotNull(response.getEntity()); Assert.assertEquals( @@ -1198,12 +1186,12 @@ public void testMarkSegmentsAsUnusedNullPayload() public void testMarkSegmentsAsUnusedInvalidPayload() { DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + createResource(); final DataSourcesResource.MarkDataSourceSegmentsPayload payload = new DataSourcesResource.MarkDataSourceSegmentsPayload(null, null); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload); + Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); Assert.assertEquals(400, response.getStatus()); Assert.assertNotNull(response.getEntity()); } @@ -1212,12 +1200,12 @@ public void testMarkSegmentsAsUnusedInvalidPayload() public void testMarkSegmentsAsUnusedInvalidPayloadBothArguments() { DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + createResource(); final DataSourcesResource.MarkDataSourceSegmentsPayload payload = new DataSourcesResource.MarkDataSourceSegmentsPayload(Intervals.of("2010-01-01/P1D"), ImmutableSet.of()); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload); + Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); Assert.assertEquals(400, response.getStatus()); Assert.assertNotNull(response.getEntity()); } @@ -1225,7 +1213,7 @@ public void testMarkSegmentsAsUnusedInvalidPayloadBothArguments() @Test public void testGetDatasourceLoadstatusForceMetadataRefreshNull() { - DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", null, null, null, null, null); Assert.assertEquals(400, response.getStatus()); } @@ -1246,6 +1234,7 @@ public void testGetDatasourceLoadstatusNoSegmentForInterval() null, null, null, + null, null ); Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, null, null); @@ -1305,7 +1294,7 @@ public void testGetDatasourceLoadstatusDefault() EasyMock.expect(inventoryView.getLoadInfoForAllSegments()).andReturn(completedLoadInfoMap).once(); EasyMock.replay(segmentsMetadataManager, inventoryView); - DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, null, null); Assert.assertEquals(200, response.getStatus()); Assert.assertNotNull(response.getEntity()); @@ -1321,7 +1310,7 @@ public void testGetDatasourceLoadstatusDefault() EasyMock.expect(inventoryView.getLoadInfoForAllSegments()).andReturn(halfLoadedInfoMap).once(); EasyMock.replay(segmentsMetadataManager, inventoryView); - dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + dataSourcesResource = createResource(); response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, null, null); Assert.assertEquals(200, response.getStatus()); Assert.assertNotNull(response.getEntity()); @@ -1384,7 +1373,7 @@ public void testGetDatasourceLoadstatusSimple() EasyMock.expect(inventoryView.getLoadInfoForAllSegments()).andReturn(completedLoadInfoMap).once(); EasyMock.replay(segmentsMetadataManager, inventoryView); - DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, "simple", null, null); Assert.assertEquals(200, response.getStatus()); Assert.assertNotNull(response.getEntity()); @@ -1400,7 +1389,7 @@ public void testGetDatasourceLoadstatusSimple() EasyMock.expect(inventoryView.getLoadInfoForAllSegments()).andReturn(halfLoadedInfoMap).once(); EasyMock.replay(segmentsMetadataManager, inventoryView); - dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null); + dataSourcesResource = createResource(); response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, "simple", null, null); Assert.assertEquals(200, response.getStatus()); Assert.assertNotNull(response.getEntity()); @@ -1455,7 +1444,8 @@ public void testGetDatasourceLoadstatusFull() EasyMock.replay(segmentsMetadataManager, druidCoordinator); - DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, druidCoordinator); + DataSourcesResource dataSourcesResource = + new DataSourcesResource(null, segmentsMetadataManager, null, null, null, druidCoordinator, auditManager); Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, "full", null); Assert.assertEquals(200, response.getStatus()); Assert.assertNotNull(response.getEntity()); @@ -1512,7 +1502,8 @@ public void testGetDatasourceLoadstatusFullAndComputeUsingClusterView() EasyMock.replay(segmentsMetadataManager, druidCoordinator); - DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, druidCoordinator); + DataSourcesResource dataSourcesResource = + new DataSourcesResource(null, segmentsMetadataManager, null, null, null, druidCoordinator, auditManager); Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, "full", "computeUsingClusterView"); Assert.assertEquals(200, response.getStatus()); Assert.assertNotNull(response.getEntity()); @@ -1552,4 +1543,31 @@ private DataSegment createSegment(Interval interval, String version, int partiti 0, 0 ); } + + private void prepareRequestForAudit() + { + EasyMock.expect(request.getHeader(AuditManager.X_DRUID_AUTHOR)).andReturn("author").anyTimes(); + EasyMock.expect(request.getHeader(AuditManager.X_DRUID_COMMENT)).andReturn("comment").anyTimes(); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(null).anyTimes(); + EasyMock.expect(request.getRemoteAddr()).andReturn("127.0.0.1").anyTimes(); + + EasyMock.expect(request.getMethod()).andReturn("POST").anyTimes(); + EasyMock.expect(request.getRequestURI()).andReturn("/request/uri").anyTimes(); + EasyMock.expect(request.getQueryString()).andReturn("query=string").anyTimes(); + + EasyMock.replay(request); + } + + private DataSourcesResource createResource() + { + return new DataSourcesResource( + inventoryView, + segmentsMetadataManager, + null, + null, + AuthTestUtils.TEST_AUTHORIZER_MAPPER, + null, + auditManager + ); + } } diff --git a/server/src/test/java/org/apache/druid/server/http/LookupCoordinatorResourceTest.java b/server/src/test/java/org/apache/druid/server/http/LookupCoordinatorResourceTest.java index 365eac6888ed..a56e135a9980 100644 --- a/server/src/test/java/org/apache/druid/server/http/LookupCoordinatorResourceTest.java +++ b/server/src/test/java/org/apache/druid/server/http/LookupCoordinatorResourceTest.java @@ -26,11 +26,14 @@ import com.google.common.io.ByteSource; import com.google.common.net.HostAndPort; import org.apache.druid.audit.AuditInfo; +import org.apache.druid.audit.AuditManager; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.query.lookup.LookupsState; import org.apache.druid.server.lookup.cache.LookupCoordinatorManager; import org.apache.druid.server.lookup.cache.LookupExtractorFactoryMapContainer; +import org.apache.druid.server.security.AuthConfig; +import org.apache.druid.server.security.AuthenticationResult; import org.easymock.Capture; import org.easymock.EasyMock; import org.junit.Assert; @@ -43,6 +46,7 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; @@ -89,6 +93,10 @@ public InputStream openStream() throws IOException private static final Map> NODES_LOOKUP_STATE = ImmutableMap.of(LOOKUP_NODE, LOOKUP_STATE); + private static final AuthenticationResult AUTH_RESULT + = new AuthenticationResult("id", "authorizer", "authBy", Collections.emptyMap()); + private final AuditInfo auditInfo + = new AuditInfo("some author", "id", "some comment", "127.0.0.1"); @Test public void testSimpleGet() @@ -289,12 +297,7 @@ public void testExceptionalGetLookup() @Test public void testSimpleDeleteTier() { - final String author = "some author"; - final String comment = "some comment"; - final String ip = "127.0.0.1"; - - final HttpServletRequest request = EasyMock.createStrictMock(HttpServletRequest.class); - EasyMock.expect(request.getRemoteAddr()).andReturn(ip).once(); + final HttpServletRequest request = createMockRequest(false); final Capture auditInfoCapture = Capture.newInstance(); final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( @@ -313,17 +316,12 @@ public void testSimpleDeleteTier() ); final Response response = lookupCoordinatorResource.deleteTier( LOOKUP_TIER, - author, - comment, request ); Assert.assertEquals(202, response.getStatus()); Assert.assertTrue(auditInfoCapture.hasCaptured()); - final AuditInfo auditInfo = auditInfoCapture.getValue(); - Assert.assertEquals(author, auditInfo.getAuthor()); - Assert.assertEquals(comment, auditInfo.getComment()); - Assert.assertEquals(ip, auditInfo.getIp()); + Assert.assertEquals(auditInfo, auditInfoCapture.getValue()); EasyMock.verify(lookupCoordinatorManager, request); } @@ -331,12 +329,7 @@ public void testSimpleDeleteTier() @Test public void testSimpleDelete() { - final String author = "some author"; - final String comment = "some comment"; - final String ip = "127.0.0.1"; - - final HttpServletRequest request = EasyMock.createStrictMock(HttpServletRequest.class); - EasyMock.expect(request.getRemoteAddr()).andReturn(ip).once(); + final HttpServletRequest request = createMockRequest(false); final Capture auditInfoCapture = Capture.newInstance(); final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( @@ -357,17 +350,12 @@ public void testSimpleDelete() final Response response = lookupCoordinatorResource.deleteLookup( LOOKUP_TIER, LOOKUP_NAME, - author, - comment, request ); Assert.assertEquals(202, response.getStatus()); Assert.assertTrue(auditInfoCapture.hasCaptured()); - final AuditInfo auditInfo = auditInfoCapture.getValue(); - Assert.assertEquals(author, auditInfo.getAuthor()); - Assert.assertEquals(comment, auditInfo.getComment()); - Assert.assertEquals(ip, auditInfo.getIp()); + Assert.assertEquals(auditInfo, auditInfoCapture.getValue()); EasyMock.verify(lookupCoordinatorManager, request); } @@ -376,12 +364,7 @@ public void testSimpleDelete() @Test public void testMissingDelete() { - final String author = "some author"; - final String comment = "some comment"; - final String ip = "127.0.0.1"; - - final HttpServletRequest request = EasyMock.createStrictMock(HttpServletRequest.class); - EasyMock.expect(request.getRemoteAddr()).andReturn(ip).once(); + final HttpServletRequest request = createMockRequest(false); final Capture auditInfoCapture = Capture.newInstance(); final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( @@ -402,17 +385,12 @@ public void testMissingDelete() final Response response = lookupCoordinatorResource.deleteLookup( LOOKUP_TIER, LOOKUP_NAME, - author, - comment, request ); Assert.assertEquals(404, response.getStatus()); Assert.assertTrue(auditInfoCapture.hasCaptured()); - final AuditInfo auditInfo = auditInfoCapture.getValue(); - Assert.assertEquals(author, auditInfo.getAuthor()); - Assert.assertEquals(comment, auditInfo.getComment()); - Assert.assertEquals(ip, auditInfo.getIp()); + Assert.assertEquals(auditInfo, auditInfoCapture.getValue()); EasyMock.verify(lookupCoordinatorManager, request); } @@ -421,13 +399,9 @@ public void testMissingDelete() @Test public void testExceptionalDelete() { - final String author = "some author"; - final String comment = "some comment"; - final String ip = "127.0.0.1"; final String errMsg = "some error"; - final HttpServletRequest request = EasyMock.createStrictMock(HttpServletRequest.class); - EasyMock.expect(request.getRemoteAddr()).andReturn(ip).once(); + final HttpServletRequest request = createMockRequest(false); final Capture auditInfoCapture = Capture.newInstance(); final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( @@ -448,18 +422,13 @@ public void testExceptionalDelete() final Response response = lookupCoordinatorResource.deleteLookup( LOOKUP_TIER, LOOKUP_NAME, - author, - comment, request ); Assert.assertEquals(500, response.getStatus()); Assert.assertEquals(ImmutableMap.of("error", errMsg), response.getEntity()); Assert.assertTrue(auditInfoCapture.hasCaptured()); - final AuditInfo auditInfo = auditInfoCapture.getValue(); - Assert.assertEquals(author, auditInfo.getAuthor()); - Assert.assertEquals(comment, auditInfo.getComment()); - Assert.assertEquals(ip, auditInfo.getIp()); + Assert.assertEquals(auditInfo, auditInfoCapture.getValue()); EasyMock.verify(lookupCoordinatorManager, request); } @@ -475,24 +444,18 @@ public void testInvalidDelete() MAPPER, MAPPER ); - Assert.assertEquals(400, lookupCoordinatorResource.deleteLookup("foo", null, null, null, null).getStatus()); - Assert.assertEquals(400, lookupCoordinatorResource.deleteLookup(null, null, null, null, null).getStatus()); - Assert.assertEquals(400, lookupCoordinatorResource.deleteLookup(null, "foo", null, null, null).getStatus()); - Assert.assertEquals(400, lookupCoordinatorResource.deleteLookup("foo", "", null, null, null).getStatus()); - Assert.assertEquals(400, lookupCoordinatorResource.deleteLookup("", "foo", null, null, null).getStatus()); + Assert.assertEquals(400, lookupCoordinatorResource.deleteLookup("foo", null, null).getStatus()); + Assert.assertEquals(400, lookupCoordinatorResource.deleteLookup(null, null, null).getStatus()); + Assert.assertEquals(400, lookupCoordinatorResource.deleteLookup(null, "foo", null).getStatus()); + Assert.assertEquals(400, lookupCoordinatorResource.deleteLookup("foo", "", null).getStatus()); + Assert.assertEquals(400, lookupCoordinatorResource.deleteLookup("", "foo", null).getStatus()); EasyMock.verify(lookupCoordinatorManager); } @Test public void testSimpleNew() throws Exception { - final String author = "some author"; - final String comment = "some comment"; - final String ip = "127.0.0.1"; - - final HttpServletRequest request = EasyMock.createStrictMock(HttpServletRequest.class); - EasyMock.expect(request.getContentType()).andReturn(MediaType.APPLICATION_JSON).once(); - EasyMock.expect(request.getRemoteAddr()).andReturn(ip).once(); + final HttpServletRequest request = createMockRequest(true); final Capture auditInfoCapture = Capture.newInstance(); final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( LookupCoordinatorManager.class); @@ -509,17 +472,12 @@ public void testSimpleNew() throws Exception ); final Response response = lookupCoordinatorResource.updateAllLookups( SINGLE_TIER_MAP_SOURCE.openStream(), - author, - comment, request ); Assert.assertEquals(202, response.getStatus()); Assert.assertTrue(auditInfoCapture.hasCaptured()); - final AuditInfo auditInfo = auditInfoCapture.getValue(); - Assert.assertEquals(author, auditInfo.getAuthor()); - Assert.assertEquals(comment, auditInfo.getComment()); - Assert.assertEquals(ip, auditInfo.getIp()); + Assert.assertEquals(auditInfo, auditInfoCapture.getValue()); EasyMock.verify(lookupCoordinatorManager, request); } @@ -527,14 +485,9 @@ public void testSimpleNew() throws Exception @Test public void testExceptionalNew() throws Exception { - final String author = "some author"; - final String comment = "some comment"; - final String ip = "127.0.0.1"; final String errMsg = "some error"; - final HttpServletRequest request = EasyMock.createStrictMock(HttpServletRequest.class); - EasyMock.expect(request.getContentType()).andReturn(MediaType.APPLICATION_JSON).once(); - EasyMock.expect(request.getRemoteAddr()).andReturn(ip).once(); + final HttpServletRequest request = createMockRequest(true); final Capture auditInfoCapture = Capture.newInstance(); final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( @@ -553,18 +506,13 @@ public void testExceptionalNew() throws Exception ); final Response response = lookupCoordinatorResource.updateAllLookups( SINGLE_TIER_MAP_SOURCE.openStream(), - author, - comment, request ); Assert.assertEquals(500, response.getStatus()); Assert.assertEquals(ImmutableMap.of("error", errMsg), response.getEntity()); Assert.assertTrue(auditInfoCapture.hasCaptured()); - final AuditInfo auditInfo = auditInfoCapture.getValue(); - Assert.assertEquals(author, auditInfo.getAuthor()); - Assert.assertEquals(comment, auditInfo.getComment()); - Assert.assertEquals(ip, auditInfo.getIp()); + Assert.assertEquals(auditInfo, auditInfoCapture.getValue()); EasyMock.verify(lookupCoordinatorManager, request); } @@ -572,13 +520,7 @@ public void testExceptionalNew() throws Exception @Test public void testFailedNew() throws Exception { - final String author = "some author"; - final String comment = "some comment"; - final String ip = "127.0.0.1"; - - final HttpServletRequest request = EasyMock.createStrictMock(HttpServletRequest.class); - EasyMock.expect(request.getContentType()).andReturn(MediaType.APPLICATION_JSON).once(); - EasyMock.expect(request.getRemoteAddr()).andReturn(ip).once(); + final HttpServletRequest request = createMockRequest(true); final Capture auditInfoCapture = Capture.newInstance(); final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( @@ -597,18 +539,13 @@ public void testFailedNew() throws Exception ); final Response response = lookupCoordinatorResource.updateAllLookups( SINGLE_TIER_MAP_SOURCE.openStream(), - author, - comment, request ); Assert.assertEquals(500, response.getStatus()); Assert.assertEquals(ImmutableMap.of("error", "Unknown error updating configuration"), response.getEntity()); Assert.assertTrue(auditInfoCapture.hasCaptured()); - final AuditInfo auditInfo = auditInfoCapture.getValue(); - Assert.assertEquals(author, auditInfo.getAuthor()); - Assert.assertEquals(comment, auditInfo.getComment()); - Assert.assertEquals(ip, auditInfo.getIp()); + Assert.assertEquals(auditInfo, auditInfoCapture.getValue()); EasyMock.verify(lookupCoordinatorManager, request); } @@ -616,13 +553,7 @@ public void testFailedNew() throws Exception @Test public void testSimpleNewLookup() throws Exception { - final String author = "some author"; - final String comment = "some comment"; - final String ip = "127.0.0.1"; - - final HttpServletRequest request = EasyMock.createStrictMock(HttpServletRequest.class); - EasyMock.expect(request.getContentType()).andReturn(MediaType.APPLICATION_JSON).once(); - EasyMock.expect(request.getRemoteAddr()).andReturn(ip).once(); + final HttpServletRequest request = createMockRequest(true); final Capture auditInfoCapture = Capture.newInstance(); final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( @@ -644,18 +575,13 @@ public void testSimpleNewLookup() throws Exception final Response response = lookupCoordinatorResource.createOrUpdateLookup( LOOKUP_TIER, LOOKUP_NAME, - author, - comment, EMPTY_MAP_SOURCE.openStream(), request ); Assert.assertEquals(202, response.getStatus()); Assert.assertTrue(auditInfoCapture.hasCaptured()); - final AuditInfo auditInfo = auditInfoCapture.getValue(); - Assert.assertEquals(author, auditInfo.getAuthor()); - Assert.assertEquals(comment, auditInfo.getComment()); - Assert.assertEquals(ip, auditInfo.getIp()); + Assert.assertEquals(auditInfo, auditInfoCapture.getValue()); EasyMock.verify(lookupCoordinatorManager, request); } @@ -664,13 +590,7 @@ public void testSimpleNewLookup() throws Exception @Test public void testDBErrNewLookup() throws Exception { - final String author = "some author"; - final String comment = "some comment"; - final String ip = "127.0.0.1"; - - final HttpServletRequest request = EasyMock.createStrictMock(HttpServletRequest.class); - EasyMock.expect(request.getContentType()).andReturn(MediaType.APPLICATION_JSON).once(); - EasyMock.expect(request.getRemoteAddr()).andReturn(ip).once(); + final HttpServletRequest request = createMockRequest(true); final Capture auditInfoCapture = Capture.newInstance(); final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( @@ -692,8 +612,6 @@ public void testDBErrNewLookup() throws Exception final Response response = lookupCoordinatorResource.createOrUpdateLookup( LOOKUP_TIER, LOOKUP_NAME, - author, - comment, EMPTY_MAP_SOURCE.openStream(), request ); @@ -701,10 +619,7 @@ public void testDBErrNewLookup() throws Exception Assert.assertEquals(500, response.getStatus()); Assert.assertEquals(ImmutableMap.of("error", "Unknown error updating configuration"), response.getEntity()); Assert.assertTrue(auditInfoCapture.hasCaptured()); - final AuditInfo auditInfo = auditInfoCapture.getValue(); - Assert.assertEquals(author, auditInfo.getAuthor()); - Assert.assertEquals(comment, auditInfo.getComment()); - Assert.assertEquals(ip, auditInfo.getIp()); + Assert.assertEquals(auditInfo, auditInfoCapture.getValue()); EasyMock.verify(lookupCoordinatorManager, request); } @@ -713,13 +628,8 @@ public void testDBErrNewLookup() throws Exception public void testExceptionalNewLookup() throws Exception { final String errMsg = "error message"; - final String author = "some author"; - final String comment = "some comment"; - final String ip = "127.0.0.1"; - final HttpServletRequest request = EasyMock.createStrictMock(HttpServletRequest.class); - EasyMock.expect(request.getContentType()).andReturn(MediaType.APPLICATION_JSON).once(); - EasyMock.expect(request.getRemoteAddr()).andReturn(ip).once(); + final HttpServletRequest request = createMockRequest(true); final Capture auditInfoCapture = Capture.newInstance(); final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( @@ -741,8 +651,6 @@ public void testExceptionalNewLookup() throws Exception final Response response = lookupCoordinatorResource.createOrUpdateLookup( LOOKUP_TIER, LOOKUP_NAME, - author, - comment, EMPTY_MAP_SOURCE.openStream(), request ); @@ -750,10 +658,7 @@ public void testExceptionalNewLookup() throws Exception Assert.assertEquals(500, response.getStatus()); Assert.assertEquals(ImmutableMap.of("error", errMsg), response.getEntity()); Assert.assertTrue(auditInfoCapture.hasCaptured()); - final AuditInfo auditInfo = auditInfoCapture.getValue(); - Assert.assertEquals(author, auditInfo.getAuthor()); - Assert.assertEquals(comment, auditInfo.getComment()); - Assert.assertEquals(ip, auditInfo.getIp()); + Assert.assertEquals(auditInfo, auditInfoCapture.getValue()); EasyMock.verify(lookupCoordinatorManager, request); } @@ -762,10 +667,6 @@ public void testExceptionalNewLookup() throws Exception @Test public void testNullValsNewLookup() throws Exception { - final String author = "some author"; - final String comment = "some comment"; - final String ip = "127.0.0.1"; - final HttpServletRequest request = EasyMock.createStrictMock(HttpServletRequest.class); final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( @@ -781,8 +682,6 @@ public void testNullValsNewLookup() throws Exception Assert.assertEquals(400, lookupCoordinatorResource.createOrUpdateLookup( null, LOOKUP_NAME, - author, - comment, EMPTY_MAP_SOURCE.openStream(), request ).getStatus()); @@ -790,8 +689,6 @@ public void testNullValsNewLookup() throws Exception Assert.assertEquals(400, lookupCoordinatorResource.createOrUpdateLookup( LOOKUP_TIER, null, - author, - comment, EMPTY_MAP_SOURCE.openStream(), request ).getStatus()); @@ -799,8 +696,6 @@ public void testNullValsNewLookup() throws Exception Assert.assertEquals(400, lookupCoordinatorResource.createOrUpdateLookup( LOOKUP_TIER, "", - author, - comment, EMPTY_MAP_SOURCE.openStream(), request ).getStatus()); @@ -808,8 +703,6 @@ public void testNullValsNewLookup() throws Exception Assert.assertEquals(400, lookupCoordinatorResource.createOrUpdateLookup( "", LOOKUP_NAME, - author, - comment, EMPTY_MAP_SOURCE.openStream(), request ).getStatus()); @@ -1225,4 +1118,18 @@ public void testGetEmptyAllLookupSpecs() Assert.assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus()); EasyMock.verify(lookupCoordinatorManager); } + + private HttpServletRequest createMockRequest(boolean hasJsonPayload) + { + final HttpServletRequest request = EasyMock.createStrictMock(HttpServletRequest.class); + if (hasJsonPayload) { + EasyMock.expect(request.getContentType()).andReturn(MediaType.APPLICATION_JSON).once(); + } + EasyMock.expect(request.getHeader(AuditManager.X_DRUID_AUTHOR)).andReturn("some author").once(); + EasyMock.expect(request.getHeader(AuditManager.X_DRUID_COMMENT)).andReturn("some comment").once(); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(AUTH_RESULT).once(); + EasyMock.expect(request.getRemoteAddr()).andReturn("127.0.0.1").once(); + + return request; + } } diff --git a/server/src/test/java/org/apache/druid/server/http/RulesResourceTest.java b/server/src/test/java/org/apache/druid/server/http/RulesResourceTest.java index 5b52e3d30ee9..ab4bff0f58c2 100644 --- a/server/src/test/java/org/apache/druid/server/http/RulesResourceTest.java +++ b/server/src/test/java/org/apache/druid/server/http/RulesResourceTest.java @@ -27,6 +27,7 @@ import org.apache.druid.java.util.common.Intervals; import org.apache.druid.metadata.MetadataRuleManager; import org.easymock.EasyMock; +import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; @@ -51,26 +52,10 @@ public void setUp() @Test public void testGetDatasourceRuleHistoryWithCount() { - AuditEntry entry1 = new AuditEntry( - "testKey", - "testType", - new AuditInfo( - "testAuthor", - "testComment", - "127.0.0.1" - ), - "testPayload", + AuditEntry entry1 = createAuditEntry( DateTimes.of("2013-01-02T00:00:00Z") ); - AuditEntry entry2 = new AuditEntry( - "testKey", - "testType", - new AuditInfo( - "testAuthor", - "testComment", - "127.0.0.1" - ), - "testPayload", + AuditEntry entry2 = createAuditEntry( DateTimes.of("2013-01-01T00:00:00Z") ); EasyMock.expect(auditManager.fetchAuditHistory(EasyMock.eq("datasource1"), EasyMock.eq("rules"), EasyMock.eq(2))) @@ -94,29 +79,17 @@ public void testGetDatasourceRuleHistoryWithInterval() { String interval = "P2D/2013-01-02T00:00:00Z"; Interval theInterval = Intervals.of(interval); - AuditEntry entry1 = new AuditEntry( - "testKey", - "testType", - new AuditInfo( - "testAuthor", - "testComment", - "127.0.0.1" - ), - "testPayload", + AuditEntry entry1 = createAuditEntry( DateTimes.of("2013-01-02T00:00:00Z") ); - AuditEntry entry2 = new AuditEntry( - "testKey", - "testType", - new AuditInfo( - "testAuthor", - "testComment", - "127.0.0.1" - ), - "testPayload", + AuditEntry entry2 = createAuditEntry( DateTimes.of("2013-01-01T00:00:00Z") ); - EasyMock.expect(auditManager.fetchAuditHistory(EasyMock.eq("datasource1"), EasyMock.eq("rules"), EasyMock.eq(theInterval))) + EasyMock.expect(auditManager.fetchAuditHistory( + EasyMock.eq("datasource1"), + EasyMock.eq("rules"), + EasyMock.eq(theInterval) + )) .andReturn(ImmutableList.of(entry1, entry2)) .once(); EasyMock.replay(auditManager); @@ -136,8 +109,8 @@ public void testGetDatasourceRuleHistoryWithInterval() public void testGetDatasourceRuleHistoryWithWrongCount() { EasyMock.expect(auditManager.fetchAuditHistory(EasyMock.eq("datasource1"), EasyMock.eq("rules"), EasyMock.eq(-1))) - .andThrow(new IllegalArgumentException("Limit must be greater than zero!")) - .once(); + .andThrow(new IllegalArgumentException("Limit must be greater than zero!")) + .once(); EasyMock.replay(auditManager); RulesResource rulesResource = new RulesResource(databaseRuleManager, auditManager); @@ -154,26 +127,10 @@ public void testGetDatasourceRuleHistoryWithWrongCount() @Test public void testGetAllDatasourcesRuleHistoryWithCount() { - AuditEntry entry1 = new AuditEntry( - "testKey", - "testType", - new AuditInfo( - "testAuthor", - "testComment", - "127.0.0.1" - ), - "testPayload", + AuditEntry entry1 = createAuditEntry( DateTimes.of("2013-01-02T00:00:00Z") ); - AuditEntry entry2 = new AuditEntry( - "testKey", - "testType", - new AuditInfo( - "testAuthor", - "testComment", - "127.0.0.1" - ), - "testPayload", + AuditEntry entry2 = createAuditEntry( DateTimes.of("2013-01-01T00:00:00Z") ); EasyMock.expect(auditManager.fetchAuditHistory(EasyMock.eq("rules"), EasyMock.eq(2))) @@ -197,26 +154,10 @@ public void testGetAllDatasourcesRuleHistoryWithInterval() { String interval = "P2D/2013-01-02T00:00:00Z"; Interval theInterval = Intervals.of(interval); - AuditEntry entry1 = new AuditEntry( - "testKey", - "testType", - new AuditInfo( - "testAuthor", - "testComment", - "127.0.0.1" - ), - "testPayload", + AuditEntry entry1 = createAuditEntry( DateTimes.of("2013-01-02T00:00:00Z") ); - AuditEntry entry2 = new AuditEntry( - "testKey", - "testType", - new AuditInfo( - "testAuthor", - "testComment", - "127.0.0.1" - ), - "testPayload", + AuditEntry entry2 = createAuditEntry( DateTimes.of("2013-01-01T00:00:00Z") ); EasyMock.expect(auditManager.fetchAuditHistory(EasyMock.eq("rules"), EasyMock.eq(theInterval))) @@ -239,8 +180,8 @@ public void testGetAllDatasourcesRuleHistoryWithInterval() public void testGetAllDatasourcesRuleHistoryWithWrongCount() { EasyMock.expect(auditManager.fetchAuditHistory(EasyMock.eq("rules"), EasyMock.eq(-1))) - .andThrow(new IllegalArgumentException("Limit must be greater than zero!")) - .once(); + .andThrow(new IllegalArgumentException("Limit must be greater than zero!")) + .once(); EasyMock.replay(auditManager); RulesResource rulesResource = new RulesResource(databaseRuleManager, auditManager); @@ -254,4 +195,17 @@ public void testGetAllDatasourcesRuleHistoryWithWrongCount() EasyMock.verify(auditManager); } + private AuditInfo createAuditInfo() + { + return new AuditInfo("testAuthor", "testIdentity", "testComment", "127.0.0.1"); + } + + private AuditEntry createAuditEntry(DateTime auditTime) + { + return new AuditEntry( + "testKey", "testType", createAuditInfo(), null, + AuditEntry.Payload.fromString("testPayload"), auditTime + ); + } + } diff --git a/server/src/test/java/org/apache/druid/server/lookup/cache/LookupCoordinatorManagerTest.java b/server/src/test/java/org/apache/druid/server/lookup/cache/LookupCoordinatorManagerTest.java index 94699ae34868..c94d8e436683 100644 --- a/server/src/test/java/org/apache/druid/server/lookup/cache/LookupCoordinatorManagerTest.java +++ b/server/src/test/java/org/apache/druid/server/lookup/cache/LookupCoordinatorManagerTest.java @@ -104,6 +104,7 @@ public class LookupCoordinatorManagerTest Collections.emptySet() ); + private final AuditInfo auditInfo = new AuditInfo("author", "identify", "comment", "127.0.0.1"); private static StubServiceEmitter SERVICE_EMITTER; @BeforeClass @@ -522,8 +523,6 @@ public Map> getKnownLook } }; manager.start(); - final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); - Assert.assertThrows(ISE.class, () -> manager.updateLookups(TIERED_LOOKUP_MAP_V0, auditInfo)); } @@ -546,7 +545,6 @@ public Map> getKnownLook }; manager.start(); - final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); EasyMock.reset(configManager); EasyMock.expect( configManager.set( @@ -578,7 +576,6 @@ public Map> getKnownLook }; manager.start(); - final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); EasyMock.reset(configManager); EasyMock.expect(configManager.set( EasyMock.eq(LookupCoordinatorManager.LOOKUP_CONFIG_KEY), @@ -598,7 +595,6 @@ public void testUpdateLookupsAddsNewLookup() ImmutableMap.of("prop", "old") ); - final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, druidNodeDiscoveryProvider, @@ -660,7 +656,6 @@ public void testUpdateLookupsOnlyUpdatesToTier() "v0", ImmutableMap.of("prop", "old") ); - final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, druidNodeDiscoveryProvider, @@ -731,7 +726,6 @@ public Map> getKnownLook } }; manager.start(); - final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); EasyMock.reset(configManager); EasyMock.expect(configManager.set( EasyMock.eq(LookupCoordinatorManager.LOOKUP_CONFIG_KEY), @@ -761,7 +755,6 @@ public Map> getKnownLook } }; manager.start(); - final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); try { manager.updateLookups(TIERED_LOOKUP_MAP_V0, auditInfo); @@ -779,7 +772,6 @@ public void testUpdateLookupsAddsNewTier() ImmutableMap.of("prop", "old") ); - final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, druidNodeDiscoveryProvider, @@ -853,7 +845,6 @@ public Map> getKnownLook } }; manager.start(); - final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); EasyMock.reset(configManager); EasyMock.expect( configManager.set( @@ -899,7 +890,6 @@ public Map> getKnownLook } }; manager.start(); - final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); EasyMock.reset(configManager); EasyMock.expect( configManager.set( @@ -944,7 +934,6 @@ public Map> getKnownLook } }; manager.start(); - final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); EasyMock.reset(configManager); EasyMock.expect( configManager.set( @@ -985,7 +974,6 @@ public Map> getKnownLook } }; manager.start(); - final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); Assert.assertFalse(manager.deleteLookup(LOOKUP_TIER, "foo", auditInfo)); } @@ -1007,7 +995,6 @@ public Map> getKnownLook } }; manager.start(); - final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); Assert.assertFalse(manager.deleteLookup(LOOKUP_TIER, "foo", auditInfo)); } diff --git a/services/src/main/java/org/apache/druid/cli/CliCoordinator.java b/services/src/main/java/org/apache/druid/cli/CliCoordinator.java index d885b8748df0..00252b849830 100644 --- a/services/src/main/java/org/apache/druid/cli/CliCoordinator.java +++ b/services/src/main/java/org/apache/druid/cli/CliCoordinator.java @@ -36,7 +36,6 @@ import com.google.inject.name.Names; import com.google.inject.util.Providers; import org.apache.curator.framework.CuratorFramework; -import org.apache.druid.audit.AuditManager; import org.apache.druid.client.CoordinatorSegmentWatcherConfig; import org.apache.druid.client.CoordinatorServerView; import org.apache.druid.client.DirectDruidClientFactory; @@ -97,7 +96,6 @@ import org.apache.druid.segment.metadata.SegmentMetadataQuerySegmentWalker; import org.apache.druid.server.QueryScheduler; import org.apache.druid.server.QuerySchedulerProvider; -import org.apache.druid.server.audit.AuditManagerProvider; import org.apache.druid.server.coordinator.CoordinatorConfigManager; import org.apache.druid.server.coordinator.DruidCoordinator; import org.apache.druid.server.coordinator.DruidCoordinatorConfig; @@ -248,10 +246,6 @@ public void configure(Binder binder) .toProvider(MetadataRuleManagerProvider.class) .in(ManageLifecycle.class); - binder.bind(AuditManager.class) - .toProvider(AuditManagerProvider.class) - .in(ManageLifecycle.class); - binder.bind(LookupCoordinatorManager.class).in(LazySingleton.class); binder.bind(CoordinatorConfigManager.class); diff --git a/services/src/main/java/org/apache/druid/cli/CliOverlord.java b/services/src/main/java/org/apache/druid/cli/CliOverlord.java index d72d5eb24737..f48c9be01b61 100644 --- a/services/src/main/java/org/apache/druid/cli/CliOverlord.java +++ b/services/src/main/java/org/apache/druid/cli/CliOverlord.java @@ -37,7 +37,6 @@ import com.google.inject.name.Names; import com.google.inject.servlet.GuiceFilter; import com.google.inject.util.Providers; -import org.apache.druid.audit.AuditManager; import org.apache.druid.client.indexing.IndexingService; import org.apache.druid.discovery.NodeRole; import org.apache.druid.guice.IndexingServiceFirehoseModule; @@ -110,7 +109,6 @@ import org.apache.druid.segment.realtime.appenderator.DummyForInjectionAppenderatorsManager; import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider; import org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider; -import org.apache.druid.server.audit.AuditManagerProvider; import org.apache.druid.server.coordinator.CoordinatorOverlordServiceConfig; import org.apache.druid.server.http.RedirectFilter; import org.apache.druid.server.http.RedirectInfo; @@ -246,10 +244,6 @@ public void configure(Binder binder) binder.install(runnerConfigModule()); configureOverlordHelpers(binder); - binder.bind(AuditManager.class) - .toProvider(AuditManagerProvider.class) - .in(ManageLifecycle.class); - if (standalone) { binder.bind(RedirectFilter.class).in(LazySingleton.class); binder.bind(RedirectInfo.class).to(OverlordRedirectInfo.class).in(LazySingleton.class);