From b6ae09a0f2845d41fe004c828ff3cae737a2c343 Mon Sep 17 00:00:00 2001 From: Felix Dittrich <31076102+f11h@users.noreply.github.com> Date: Wed, 9 Nov 2022 08:51:33 +0100 Subject: [PATCH] Fix: Delete Search Portal Entries (#269) * [WIP] Modify Entity DB Changeset Add Service Method * Split Scheduler Services for Cancellation Add Delete of SearchProtal Entries * Add Unit Test * Checkstyle * Fix Search getRootGroupByName --- .../quicktest/domain/Cancellation.java | 3 + .../repository/CancellationRepository.java | 3 + .../service/ArchiveSchedulingService.java | 113 ----------- .../CancellationSchedulingService.java | 183 ++++++++++++++++++ .../service/CancellationService.java | 27 ++- .../quicktest/service/KeycloakService.java | 156 +++++++++------ src/main/resources/application-cloud.yml | 4 + src/main/resources/application.yml | 4 + src/main/resources/db/changelog.yml | 3 + .../V024_update_cancellationTable_5.yml | 12 ++ .../service/CancellationCsvTest.java | 6 +- .../service/CancellationServiceTest.java | 56 +++++- src/test/resources/application.yml | 3 + 13 files changed, 392 insertions(+), 181 deletions(-) create mode 100644 src/main/java/app/coronawarn/quicktest/service/CancellationSchedulingService.java create mode 100644 src/main/resources/db/changelog/V024_update_cancellationTable_5.yml diff --git a/src/main/java/app/coronawarn/quicktest/domain/Cancellation.java b/src/main/java/app/coronawarn/quicktest/domain/Cancellation.java index 0a503a17..ea6fa916 100644 --- a/src/main/java/app/coronawarn/quicktest/domain/Cancellation.java +++ b/src/main/java/app/coronawarn/quicktest/domain/Cancellation.java @@ -92,6 +92,9 @@ public class Cancellation { @JsonIgnore private String dataExportError; + @Column(name = "search_portal_deleted") + private ZonedDateTime searchPortalDeleted; + @Transient() private ZonedDateTime finalDeletion; diff --git a/src/main/java/app/coronawarn/quicktest/repository/CancellationRepository.java b/src/main/java/app/coronawarn/quicktest/repository/CancellationRepository.java index 9775ecdf..fa3aa21c 100644 --- a/src/main/java/app/coronawarn/quicktest/repository/CancellationRepository.java +++ b/src/main/java/app/coronawarn/quicktest/repository/CancellationRepository.java @@ -32,6 +32,9 @@ public interface CancellationRepository extends JpaRepository findByMovedToLongtermArchiveIsNullAndCancellationDateBefore( ZonedDateTime expiryDate, Pageable pageable); + List findBySearchPortalDeletedIsNullAndCancellationDateBefore( + ZonedDateTime expiryDate, Pageable pageable); + List findByMovedToLongtermArchiveNotNullAndCsvCreatedIsNull(Pageable pageable); List findByCancellationDateBeforeAndDataDeletedIsNull(ZonedDateTime expiryDate); diff --git a/src/main/java/app/coronawarn/quicktest/service/ArchiveSchedulingService.java b/src/main/java/app/coronawarn/quicktest/service/ArchiveSchedulingService.java index 462a9c52..c176314a 100644 --- a/src/main/java/app/coronawarn/quicktest/service/ArchiveSchedulingService.java +++ b/src/main/java/app/coronawarn/quicktest/service/ArchiveSchedulingService.java @@ -1,25 +1,9 @@ package app.coronawarn.quicktest.service; -import app.coronawarn.quicktest.archive.domain.ArchiveCipherDtoV1; -import app.coronawarn.quicktest.config.CsvUploadConfig; -import app.coronawarn.quicktest.domain.Cancellation; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.ObjectMetadata; -import com.opencsv.CSVWriter; -import com.opencsv.bean.StatefulBeanToCsv; -import com.opencsv.bean.StatefulBeanToCsvBuilder; -import java.io.ByteArrayInputStream; -import java.io.StringWriter; -import java.nio.charset.StandardCharsets; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.time.ZonedDateTime; -import java.util.List; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import net.javacrumbs.shedlock.spring.annotation.SchedulerLock; import org.springframework.scheduling.annotation.Scheduled; -import org.springframework.security.crypto.codec.Hex; import org.springframework.stereotype.Service; @Slf4j @@ -28,10 +12,6 @@ public class ArchiveSchedulingService { private final ArchiveService archiveService; - private final CancellationService cancellationService; - - private final CsvUploadConfig s3Config; - private final AmazonS3 s3Client; /** * Scheduler used for moving quicktests from qt archive to longterm. @@ -44,97 +24,4 @@ public void moveToArchiveJob() { archiveService.moveToArchive(); log.info("Completed Job: moveToArchiveJob"); } - - /** - * Scheduler used for moving quicktests from qt archive to longterm when a cancellation was triggered. - */ - @Scheduled(cron = "${archive.cancellationArchiveJob.cron}") - @SchedulerLock(name = "CancellationArchiveJob", lockAtLeastFor = "PT0S", - lockAtMostFor = "${archive.cancellationArchiveJob.locklimit}") - public void cancellationArchiveJob() { - log.info("Starting Job: cancellationArchiveJob"); - processCancellationArchiveBatchRecursion(cancellationService.getReadyToArchiveBatch()); - log.info("Completed Job: cancellationArchiveJob"); - } - - private void processCancellationArchiveBatchRecursion(List cancellations) { - log.info("Process Cancellation Archive Batch with size of {}", cancellations.size()); - for (Cancellation cancellation : cancellations) { - String partnerId = cancellation.getPartnerId(); - archiveService.moveToArchiveByTenantId(partnerId); - cancellationService.updateMovedToLongterm(cancellation, ZonedDateTime.now()); - } - - List nextBatch = cancellationService.getReadyToArchiveBatch(); - if (!nextBatch.isEmpty()) { - processCancellationArchiveBatchRecursion(nextBatch); - } - } - - /** - * Scheduler used for moving longterm archives to bucket as a csv. - */ - @Scheduled(cron = "${archive.csvUploadJob.cron}") - @SchedulerLock(name = "CsvUploadJob", lockAtLeastFor = "PT0S", - lockAtMostFor = "${archive.csvUploadJob.locklimit}") - public void csvUploadJob() { - log.info("Starting Job: csvUploadJob"); - processCsvUploadBatchRecursion(cancellationService.getReadyToUploadBatch()); - log.info("Completed Job: csvUploadJob"); - } - - private void processCsvUploadBatchRecursion(List cancellations) { - log.info("Process CSV Upload Batch with size of {}", cancellations.size()); - for (Cancellation cancellation : cancellations) { - try { - List quicktests = - archiveService.getQuicktestsFromLongtermByTenantId(cancellation.getPartnerId()); - - StringWriter stringWriter = new StringWriter(); - CSVWriter csvWriter = - new CSVWriter(stringWriter, '\t', CSVWriter.NO_QUOTE_CHARACTER, - CSVWriter.DEFAULT_ESCAPE_CHARACTER, CSVWriter.DEFAULT_LINE_END); - StatefulBeanToCsv beanToCsv = - new StatefulBeanToCsvBuilder(csvWriter) - .build(); - beanToCsv.write(quicktests); - byte[] csvBytes = stringWriter.toString().getBytes(StandardCharsets.UTF_8); - - String objectId = cancellation.getPartnerId() + ".csv"; - - ObjectMetadata metadata = new ObjectMetadata(); - metadata.setContentLength(csvBytes.length); - - s3Client.putObject( - s3Config.getBucketName(), - objectId, - new ByteArrayInputStream(csvBytes), metadata); - - log.info("File stored to S3 with id {}", objectId); - - cancellationService.updateCsvCreated(cancellation, ZonedDateTime.now(), objectId, - getHash(csvBytes), quicktests.size(), csvBytes.length); - } catch (Exception e) { - String errorMessage = e.getClass().getName() + ": " + e.getMessage(); - - log.error("Could not convert Quicktest to CSV: " + errorMessage); - cancellationService.updateDataExportError(cancellation, errorMessage); - } - } - - List nextBatch = cancellationService.getReadyToUploadBatch(); - if (!nextBatch.isEmpty()) { - processCsvUploadBatchRecursion(nextBatch); - } - } - - private String getHash(byte[] bytes) { - try { - MessageDigest sha256 = MessageDigest.getInstance("SHA-256"); - byte[] hashBytes = sha256.digest(bytes); - return String.valueOf(Hex.encode(hashBytes)); - } catch (NoSuchAlgorithmException e) { - throw new RuntimeException("Failed to load SHA-256 Message Digest"); - } - } } diff --git a/src/main/java/app/coronawarn/quicktest/service/CancellationSchedulingService.java b/src/main/java/app/coronawarn/quicktest/service/CancellationSchedulingService.java new file mode 100644 index 00000000..05f9b3bc --- /dev/null +++ b/src/main/java/app/coronawarn/quicktest/service/CancellationSchedulingService.java @@ -0,0 +1,183 @@ +/*- + * ---license-start + * Corona-Warn-App / cwa-quick-test-backend + * --- + * Copyright (C) 2021 T-Systems International GmbH and all other contributors + * --- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ---license-end + */ + +package app.coronawarn.quicktest.service; + +import app.coronawarn.quicktest.archive.domain.ArchiveCipherDtoV1; +import app.coronawarn.quicktest.config.CsvUploadConfig; +import app.coronawarn.quicktest.domain.Cancellation; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.model.ObjectMetadata; +import com.opencsv.CSVWriter; +import com.opencsv.bean.StatefulBeanToCsv; +import com.opencsv.bean.StatefulBeanToCsvBuilder; +import java.io.ByteArrayInputStream; +import java.io.StringWriter; +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.time.ZonedDateTime; +import java.util.List; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import net.javacrumbs.shedlock.spring.annotation.SchedulerLock; +import org.keycloak.representations.idm.GroupRepresentation; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.security.crypto.codec.Hex; +import org.springframework.stereotype.Service; + +@Slf4j +@RequiredArgsConstructor +@Service +public class CancellationSchedulingService { + + private final ArchiveService archiveService; + private final CancellationService cancellationService; + + private final KeycloakService keycloakService; + + private final CsvUploadConfig s3Config; + private final AmazonS3 s3Client; + + /** + * Scheduler used for moving quicktests from qt archive to longterm when a cancellation was triggered. + */ + @Scheduled(cron = "${archive.cancellationArchiveJob.cron}") + @SchedulerLock(name = "CancellationArchiveJob", lockAtLeastFor = "PT0S", + lockAtMostFor = "${archive.cancellationArchiveJob.locklimit}") + public void cancellationArchiveJob() { + log.info("Starting Job: cancellationArchiveJob"); + processCancellationArchiveBatchRecursion(cancellationService.getReadyToArchiveBatch()); + log.info("Completed Job: cancellationArchiveJob"); + } + + private void processCancellationArchiveBatchRecursion(List cancellations) { + log.info("Process Cancellation Archive Batch with size of {}", cancellations.size()); + for (Cancellation cancellation : cancellations) { + String partnerId = cancellation.getPartnerId(); + archiveService.moveToArchiveByTenantId(partnerId); + cancellationService.updateMovedToLongterm(cancellation, ZonedDateTime.now()); + } + + List nextBatch = cancellationService.getReadyToArchiveBatch(); + if (!nextBatch.isEmpty()) { + processCancellationArchiveBatchRecursion(nextBatch); + } + } + + /** + * Scheduler used for moving longterm archives to bucket as a csv. + */ + @Scheduled(cron = "${archive.csvUploadJob.cron}") + @SchedulerLock(name = "CsvUploadJob", lockAtLeastFor = "PT0S", + lockAtMostFor = "${archive.csvUploadJob.locklimit}") + public void csvUploadJob() { + log.info("Starting Job: csvUploadJob"); + processCsvUploadBatchRecursion(cancellationService.getReadyToUploadBatch()); + log.info("Completed Job: csvUploadJob"); + } + + private void processCsvUploadBatchRecursion(List cancellations) { + log.info("Process CSV Upload Batch with size of {}", cancellations.size()); + for (Cancellation cancellation : cancellations) { + try { + List quicktests = + archiveService.getQuicktestsFromLongtermByTenantId(cancellation.getPartnerId()); + + StringWriter stringWriter = new StringWriter(); + CSVWriter csvWriter = + new CSVWriter(stringWriter, '\t', CSVWriter.NO_QUOTE_CHARACTER, + CSVWriter.DEFAULT_ESCAPE_CHARACTER, CSVWriter.DEFAULT_LINE_END); + StatefulBeanToCsv beanToCsv = + new StatefulBeanToCsvBuilder(csvWriter) + .build(); + beanToCsv.write(quicktests); + byte[] csvBytes = stringWriter.toString().getBytes(StandardCharsets.UTF_8); + + String objectId = cancellation.getPartnerId() + ".csv"; + + ObjectMetadata metadata = new ObjectMetadata(); + metadata.setContentLength(csvBytes.length); + + s3Client.putObject( + s3Config.getBucketName(), + objectId, + new ByteArrayInputStream(csvBytes), metadata); + + log.info("File stored to S3 with id {}", objectId); + + cancellationService.updateCsvCreated(cancellation, ZonedDateTime.now(), objectId, + getHash(csvBytes), quicktests.size(), csvBytes.length); + } catch (Exception e) { + String errorMessage = e.getClass().getName() + ": " + e.getMessage(); + + log.error("Could not convert Quicktest to CSV: " + errorMessage); + cancellationService.updateDataExportError(cancellation, errorMessage); + } + } + + List nextBatch = cancellationService.getReadyToUploadBatch(); + if (!nextBatch.isEmpty()) { + processCsvUploadBatchRecursion(nextBatch); + } + } + + /** + * Scheduler used for deleting SearchPortal entries. + */ + @Scheduled(cron = "${archive.cancellationSearchPortalDeleteJob.cron}") + @SchedulerLock(name = "CancellationSearchPortalDeleteJob", lockAtLeastFor = "PT0S", + lockAtMostFor = "${archive.cancellationSearchPortalDeleteJob.locklimit}") + public void cancellationSearchPortalDeleteJob() { + log.info("Starting Job: cancellationSearchPortalDeleteJob"); + processCancellationDeleteSearchPortalBatch(cancellationService.getReadyToDeleteSearchPortal()); + log.info("Completed Job: cancellationSearchPortalDeleteJob"); + } + + private void processCancellationDeleteSearchPortalBatch(List cancellations) { + log.info("Process Cancellation DeleteSearchPortal Batch with size of {}", cancellations.size()); + for (Cancellation cancellation : cancellations) { + GroupRepresentation rootGroup = keycloakService.getRootGroupByName(cancellation.getPartnerId()); + + if (rootGroup == null) { + log.error("Could not find RootGroup for Partner {}", cancellation.getPartnerId()); + } else { + keycloakService.deleteSubGroupsFromMapService(rootGroup); + } + + cancellationService.updateSearchPortalDeleted(cancellation, ZonedDateTime.now()); + } + + List nextBatch = cancellationService.getReadyToArchiveBatch(); + if (!nextBatch.isEmpty()) { + processCancellationArchiveBatchRecursion(nextBatch); + } + } + + private String getHash(byte[] bytes) { + try { + MessageDigest sha256 = MessageDigest.getInstance("SHA-256"); + byte[] hashBytes = sha256.digest(bytes); + return String.valueOf(Hex.encode(hashBytes)); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException("Failed to load SHA-256 Message Digest"); + } + } +} diff --git a/src/main/java/app/coronawarn/quicktest/service/CancellationService.java b/src/main/java/app/coronawarn/quicktest/service/CancellationService.java index 3db2dfd6..4b8a2cd7 100644 --- a/src/main/java/app/coronawarn/quicktest/service/CancellationService.java +++ b/src/main/java/app/coronawarn/quicktest/service/CancellationService.java @@ -138,7 +138,7 @@ public void updateCsvCreated(Cancellation cancellation, ZonedDateTime csvCreated * @param requester Username of the user who requested the download link */ public void updateDownloadLinkRequested( - Cancellation cancellation, ZonedDateTime downloadLinkRequested, String requester) { + Cancellation cancellation, ZonedDateTime downloadLinkRequested, String requester) { cancellation.setDownloadLinkRequested(downloadLinkRequested); cancellation.setDownloadLinkRequestedBy(requester); cancellationRepository.save(cancellation); @@ -170,6 +170,17 @@ public void updateDataExportError(Cancellation cancellation, String errorMessage cancellationRepository.save(cancellation); } + /** + * Set SearchPortalDeleted Flag/Timestamp and persist entity. + * + * @param cancellation Cancellation Entity + * @param dataDeleted timestamp of search portal deletion + */ + public void updateSearchPortalDeleted(Cancellation cancellation, ZonedDateTime dataDeleted) { + cancellation.setSearchPortalDeleted(dataDeleted); + cancellationRepository.save(cancellation); + } + /** * Searches in the DB for an existing cancellation entity which download request is older than 48h and not * moved_to_longterm_archive. @@ -185,6 +196,20 @@ public List getReadyToArchiveBatch() { ldt, PageRequest.of(0, archiveProperties.getCancellationArchiveJob().getChunkSize())); } + /** + * Searches in the DB for an existing cancellation entity with searchPortalDeleted null and + * cancellation_date in past. + * Returns only one batch of entities. Batch Size depends on configuration. + * + * @return List holding all entities found. + */ + public List getReadyToDeleteSearchPortal() { + ZonedDateTime ldt = ZonedDateTime.now(); + + return cancellationRepository.findBySearchPortalDeletedIsNullAndCancellationDateBefore( + ldt, PageRequest.of(0, archiveProperties.getCancellationArchiveJob().getChunkSize())); + } + /** * Searches in the DB for an existing cancellation entity which moved_to_longterm_archive is not null but * csv_created is null. diff --git a/src/main/java/app/coronawarn/quicktest/service/KeycloakService.java b/src/main/java/app/coronawarn/quicktest/service/KeycloakService.java index ac09c086..6bd94968 100644 --- a/src/main/java/app/coronawarn/quicktest/service/KeycloakService.java +++ b/src/main/java/app/coronawarn/quicktest/service/KeycloakService.java @@ -90,14 +90,14 @@ public class KeycloakService { * @throws KeycloakServiceException if User Creation has failed. */ public String createNewUserInGroup( - String firstName, - String lastName, - String username, - String password, - boolean roleCounter, - boolean roleLab, - String rootGroupPath, - String subGroupPath) throws KeycloakServiceException { + String firstName, + String lastName, + String username, + String password, + boolean roleCounter, + boolean roleLab, + String rootGroupPath, + String subGroupPath) throws KeycloakServiceException { CredentialRepresentation credentials = new CredentialRepresentation(); credentials.setType(CredentialRepresentation.PASSWORD); @@ -126,14 +126,14 @@ public String createNewUserInGroup( throw new KeycloakServiceException(KeycloakServiceException.Reason.ALREADY_EXISTS); } else if (HttpStatus.valueOf(userCreateResponse.getStatus()).is4xxClientError()) { log.error("Failed to create new user: {} {}", - userCreateResponse.getStatus(), - userCreateResponse.readEntity(String.class)); + userCreateResponse.getStatus(), + userCreateResponse.readEntity(String.class)); throw new KeycloakServiceException(KeycloakServiceException.Reason.BAD_REQUEST); } else if (userCreateResponse.getStatus() != HttpStatus.CREATED.value()) { log.error("Failed to create new user: {} {}", - userCreateResponse.getStatus(), - userCreateResponse.readEntity(String.class)); + userCreateResponse.getStatus(), + userCreateResponse.readEntity(String.class)); throw new KeycloakServiceException(KeycloakServiceException.Reason.SERVER_ERROR); } @@ -181,7 +181,7 @@ public void updateUserRoles(String userId, boolean roleCounter, boolean roleLab) // Delete not anymore needed roles Map roles = userResource.roles().realmLevel().listAll().stream() - .collect(Collectors.toMap(RoleRepresentation::getName, r -> r)); + .collect(Collectors.toMap(RoleRepresentation::getName, r -> r)); List deleteRoles = new ArrayList<>(); if (!roleLab && roles.containsKey(ROLE_LAB.replace(ROLE_PREFIX, ""))) { deleteRoles.add(roles.get(ROLE_LAB.replace(ROLE_PREFIX, ""))); @@ -198,8 +198,8 @@ public void updateUserRoles(String userId, boolean roleCounter, boolean roleLab) // Add new roles addRealmRoles(userId, getRoleNames( - (roleCounter && !roles.containsKey(ROLE_COUNTER.replace(ROLE_PREFIX, ""))), - (roleLab && !roles.containsKey(ROLE_LAB.replace(ROLE_PREFIX, ""))) + (roleCounter && !roles.containsKey(ROLE_COUNTER.replace(ROLE_PREFIX, ""))), + (roleLab && !roles.containsKey(ROLE_LAB.replace(ROLE_PREFIX, ""))) )); } @@ -250,10 +250,10 @@ public KeycloakGroupDetails getSubGroupDetails(String groupId) { groupDetails.setEmail(mapEntry.getEmail()); groupDetails.setWebsite(mapEntry.getWebsite()); groupDetails.setAppointmentRequired(mapEntryService.convertAppointmentToBoolean( - mapEntry.getAppointment())); + mapEntry.getAppointment())); if (mapEntry.getOpeningHours() != null) { groupDetails.setOpeningHours( - mapEntry.getOpeningHours().length > 0 ? Arrays.asList(mapEntry.getOpeningHours()) : null); + mapEntry.getOpeningHours().length > 0 ? Arrays.asList(mapEntry.getOpeningHours()) : null); } } else { groupDetails.setSearchPortalConsent(false); @@ -281,8 +281,8 @@ public KeycloakUserResponse getUserDetails(String userId, String rootGroupId) th try { realmRoles = realm().users().get(userId).roles().realmLevel().listAll() - .stream().map(RoleRepresentation::getName) - .collect(Collectors.toList()); + .stream().map(RoleRepresentation::getName) + .collect(Collectors.toList()); } catch (ClientErrorException e) { log.error("Could not get user roles"); throw new KeycloakServiceException(KeycloakServiceException.Reason.SERVER_ERROR); @@ -309,18 +309,48 @@ public KeycloakUserResponse getUserDetails(String userId, String rootGroupId) th */ public List getExtendedUserListForRootGroup(String groupId) { return getGroupMembers(groupId).stream() - .map(member -> { - KeycloakUserResponse userResponse = new KeycloakUserResponse(); - userResponse.setId(member.getId()); - userResponse.setFirstName(member.getFirstName()); - userResponse.setLastName(member.getLastName()); - userResponse.setUsername(member.getUsername()); - userResponse.setRoleLab(null); - userResponse.setRoleCounter(null); - userResponse.setSubGroup(getSubgroupId(member.getId(), groupId)); - return userResponse; - }) - .collect(Collectors.toList()); + .map(member -> { + KeycloakUserResponse userResponse = new KeycloakUserResponse(); + userResponse.setId(member.getId()); + userResponse.setFirstName(member.getFirstName()); + userResponse.setLastName(member.getLastName()); + userResponse.setUsername(member.getUsername()); + userResponse.setRoleLab(null); + userResponse.setRoleCounter(null); + userResponse.setSubGroup(getSubgroupId(member.getId(), groupId)); + return userResponse; + }) + .collect(Collectors.toList()); + } + + /** + * Search Keycloak for RootGroup with exact name. + * + * @param groupName to search for. + * @return {@link GroupRepresentation} of the found group or null + */ + public GroupRepresentation getRootGroupByName(String groupName) { + GroupRepresentation foundGroup = null; + final int pageSize = 100; + int page = 0; + + do { + List matchingGroups = realm().groups().groups(groupName, page * pageSize, pageSize); + + if (matchingGroups.isEmpty()) { + // Result of search is empty -> not matching group exists + break; + } + + // Search for groups having Path starting with "/" followed by GroupName --> Root Group + foundGroup = matchingGroups.stream() + .dropWhile(group -> !group.getPath().equals("/" + groupName)) + .findAny().orElse(null); + + page++; + } while (foundGroup == null); + + return foundGroup; } /** @@ -364,11 +394,11 @@ public void deleteGroup(String rootGroupName, String groupId) throws KeycloakSer GroupRepresentation group = realm().groups().group(groupId).toRepresentation(); List subGroupPocIds = getSubGroupIds(group).stream() - .map(this::getSubGroupDetails) - .filter(Objects::nonNull) - .map(KeycloakGroupDetails::getPocId) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + .map(this::getSubGroupDetails) + .filter(Objects::nonNull) + .map(KeycloakGroupDetails::getPocId) + .filter(Objects::nonNull) + .collect(Collectors.toList()); if (quickTestService.pendingTestsForTenantAndPocsExists(rootGroupName, subGroupPocIds)) { log.info("User tried to delete a group with pending quick tests"); @@ -399,11 +429,11 @@ public void deleteGroup(String rootGroupName, String groupId) throws KeycloakSer public void deleteSubGroupsFromMapService(GroupRepresentation group) { List subGroupPocIds = getSubGroupIds(group).stream() - .map(this::getSubGroupDetails) - .filter(Objects::nonNull) - .map(KeycloakGroupDetails::getPocId) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + .map(this::getSubGroupDetails) + .filter(Objects::nonNull) + .map(KeycloakGroupDetails::getPocId) + .filter(Objects::nonNull) + .collect(Collectors.toList()); for (String subGroupId : subGroupPocIds) { mapEntryService.deleteIfExists(subGroupId); @@ -422,7 +452,7 @@ private void getSubGroupIdsRecursion(List ids, GroupRepresentation group ids.add(group.getId()); group.getSubGroups() - .forEach(g -> getSubGroupIdsRecursion(ids, g)); + .forEach(g -> getSubGroupIdsRecursion(ids, g)); } /** @@ -435,7 +465,7 @@ private void getSubGroupIdsRecursion(List ids, GroupRepresentation group public void moveGroup(String groupId, String newParent) throws KeycloakServiceException { try { Response response = realm().groups().group(newParent).subGroup( - realm().groups().group(groupId).toRepresentation() + realm().groups().group(groupId).toRepresentation() ); if (response.getStatus() == HttpStatus.CONFLICT.value()) { @@ -471,8 +501,8 @@ public void moveUser(String userId, String rootGroupId, String newParent) throws // Remove user from all groups except root group realm().users().get(userId).groups().stream() - .filter(group -> !group.getId().equals(rootGroupId)) - .forEach(group -> userResource.leaveGroup(group.getId())); + .filter(group -> !group.getId().equals(rootGroupId)) + .forEach(group -> userResource.leaveGroup(group.getId())); realm().users().get(userId).joinGroup(newParent); log.info("Moved user {} into group {}", userId, newParent); @@ -495,12 +525,12 @@ public List getRootGroupsOfUser(String userId) { List rootGroups = realm().groups().groups(0, Integer.MAX_VALUE); List userGroupIds = realm().users().get(userId).groups().stream() - .map(GroupRepresentation::getId) - .collect(Collectors.toList()); + .map(GroupRepresentation::getId) + .collect(Collectors.toList()); return rootGroups.stream() - .filter(rg -> userGroupIds.contains(rg.getId())) - .collect(Collectors.toList()); + .filter(rg -> userGroupIds.contains(rg.getId())) + .collect(Collectors.toList()); } /** @@ -509,7 +539,7 @@ public List getRootGroupsOfUser(String userId) { * @param details The KeycloakGroupDetails object */ public void updateGroup(KeycloakGroupDetails details) - throws KeycloakServiceException { + throws KeycloakServiceException { log.info("Updating group with id {}", details.getId()); GroupResource groupResource = realm().groups().group(details.getId()); GroupRepresentation group; @@ -522,8 +552,8 @@ public void updateGroup(KeycloakGroupDetails details) group.setName(details.getName()); // do not update POC ID group.setAttributes(getGroupAttributes(details.getPocDetails(), - getFromAttributes(group.getAttributes(), POC_ID_ATTRIBUTE), details.getBsnr(), - details.getEnablePcr())); + getFromAttributes(group.getAttributes(), POC_ID_ATTRIBUTE), details.getBsnr(), + details.getEnablePcr())); try { groupResource.update(group); @@ -553,7 +583,7 @@ public void updateGroup(KeycloakGroupDetails details) * @param details The KeycloakGroupDetails object */ public void createGroup(KeycloakGroupDetails details, String parent) - throws KeycloakServiceException { + throws KeycloakServiceException { log.info("Creating new group"); GroupRepresentation newGroup = new GroupRepresentation(); newGroup.setName(details.getName()); @@ -568,7 +598,7 @@ public void createGroup(KeycloakGroupDetails details, String parent) // setting group properties with Group Details and POC ID newGroup = response.readEntity(GroupRepresentation.class); newGroup.setAttributes(getGroupAttributes(details.getPocDetails(), newGroup.getId(), details.getBsnr(), - details.getEnablePcr())); + details.getEnablePcr())); realm().groups().group(newGroup.getId()).update(newGroup); if (details.getSearchPortalConsent()) { details.setId(newGroup.getId()); @@ -594,9 +624,9 @@ public Optional getGroup(String name) { String path = name.startsWith("/") ? name : "/" + name; log.debug("Getting group: [{}]", path); return realm().groups().groups(name, 0, Integer.MAX_VALUE) - .stream() - .filter(group -> group.getPath().equals(path)) - .findFirst(); + .stream() + .filter(group -> group.getPath().equals(path)) + .findFirst(); } /** @@ -608,10 +638,10 @@ public Optional getGroup(String name) { */ private String getSubgroupId(String userId, String rootGroupId) { return realm().users().get(userId).groups().stream() - .filter(group -> !group.getId().equals(rootGroupId)) - .findFirst() - .map(GroupRepresentation::getId) - .orElse(null); + .filter(group -> !group.getId().equals(rootGroupId)) + .findFirst() + .map(GroupRepresentation::getId) + .orElse(null); } private List getRoleNames(boolean roleCounter, boolean roleLab) { @@ -639,8 +669,8 @@ private UserRepresentation findUserByUsername(String username) throws KeycloakSe private void addRealmRoles(String userId, List roleNames) { List roles = roleNames.stream() - .map(roleName -> realm().roles().get(roleName).toRepresentation()) - .collect(Collectors.toList()); + .map(roleName -> realm().roles().get(roleName).toRepresentation()) + .collect(Collectors.toList()); if (!roles.isEmpty()) { realm().users().get(userId).roles().realmLevel().add(roles); diff --git a/src/main/resources/application-cloud.yml b/src/main/resources/application-cloud.yml index b1d23e33..509d3ca8 100644 --- a/src/main/resources/application-cloud.yml +++ b/src/main/resources/application-cloud.yml @@ -151,6 +151,10 @@ archive: cron: ${QT_CANCELLATION_ARCHIVE_JOB_CRON:0 0/5 * * * *} locklimit: 1800000 chunkSize: ${QT_CANCELLATION_ARCHIVE_JOB_CHUNK_SIZE:100} + cancellationSearchPortalDeleteJob: + cron: ${QT_CANCELLATION_SEARCH_PORTAL_DELETE_JOB_CRON:0 0/5 * * * *} + locklimit: 1800000 + chunkSize: ${QT_CANCELLATION_SEARCH_PORTAL_JOB_CHUNK_SIZE:100} csvUploadJob: cron: ${QT_CANCELLATION_UPLOAD_JOB_CRON:0 0/5 * * * *} locklimit: 1800000 diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index 768d9037..785ea84f 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -129,6 +129,10 @@ archive: cron: 0 * * * * * locklimit: 1800000 chunkSize: 100 + cancellationSearchPortalDeleteJob: + cron: 0 * * * * * + locklimit: 1800000 + chunkSize: 100 csvUploadJob: cron: 0 * * * * * locklimit: 1800000 diff --git a/src/main/resources/db/changelog.yml b/src/main/resources/db/changelog.yml index 1fb74851..389defd2 100644 --- a/src/main/resources/db/changelog.yml +++ b/src/main/resources/db/changelog.yml @@ -68,3 +68,6 @@ databaseChangeLog: - include: file: changelog/V023_update_cancellationTable_4.yml relativeToChangelogFile: true + - include: + file: changelog/V024_update_cancellationTable_5.yml + relativeToChangelogFile: true diff --git a/src/main/resources/db/changelog/V024_update_cancellationTable_5.yml b/src/main/resources/db/changelog/V024_update_cancellationTable_5.yml new file mode 100644 index 00000000..fdf8c305 --- /dev/null +++ b/src/main/resources/db/changelog/V024_update_cancellationTable_5.yml @@ -0,0 +1,12 @@ +databaseChangeLog: + - changeSet: + id: update-cancellation-table-5 + author: f11h + changes: + - addColumn: + tableName: cancellation + column: + name: search_portal_deleted + type: datetime + constraints: + nullable: true diff --git a/src/test/java/app/coronawarn/quicktest/service/CancellationCsvTest.java b/src/test/java/app/coronawarn/quicktest/service/CancellationCsvTest.java index 6ad48711..f72ede32 100644 --- a/src/test/java/app/coronawarn/quicktest/service/CancellationCsvTest.java +++ b/src/test/java/app/coronawarn/quicktest/service/CancellationCsvTest.java @@ -67,7 +67,7 @@ class CancellationCsvTest { @Autowired - private ArchiveSchedulingService archiveSchedulingService; + private CancellationSchedulingService cancellationSchedulingService; @Autowired private QuickTestArchiveRepository shortTermArchiveRepository; @@ -108,7 +108,7 @@ void testCsvExport(int n) throws IOException, NoSuchAlgorithmException, CsvExcep Assertions.assertEquals(n, shortTermArchiveRepository.findAllByTenantId(PARTNER_ID, Pageable.unpaged()).count()); Assertions.assertEquals(0, longTermArchiveRepository.findAllByTenantId(PARTNER_ID_HASH).size()); - archiveSchedulingService.cancellationArchiveJob(); + cancellationSchedulingService.cancellationArchiveJob(); Assertions.assertEquals(0, shortTermArchiveRepository.findAllByTenantId(PARTNER_ID, Pageable.unpaged()).count()); Assertions.assertEquals(n, longTermArchiveRepository.findAllByTenantId(PARTNER_ID_HASH).size()); @@ -118,7 +118,7 @@ void testCsvExport(int n) throws IOException, NoSuchAlgorithmException, CsvExcep when(s3Client.putObject(anyString(), eq(expectedFileName), inputStreamArgumentCaptor.capture(), any())) .thenReturn(new PutObjectResult()); - archiveSchedulingService.csvUploadJob(); + cancellationSchedulingService.csvUploadJob(); verify(s3Client).putObject(anyString(), eq(expectedFileName), any(), any()); diff --git a/src/test/java/app/coronawarn/quicktest/service/CancellationServiceTest.java b/src/test/java/app/coronawarn/quicktest/service/CancellationServiceTest.java index 9d9d4800..1d986db5 100644 --- a/src/test/java/app/coronawarn/quicktest/service/CancellationServiceTest.java +++ b/src/test/java/app/coronawarn/quicktest/service/CancellationServiceTest.java @@ -20,12 +20,16 @@ package app.coronawarn.quicktest.service; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.endsWith; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import app.coronawarn.quicktest.domain.Cancellation; import app.coronawarn.quicktest.domain.QuickTestArchive; @@ -41,6 +45,7 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.keycloak.representations.idm.GroupRepresentation; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; @@ -60,6 +65,12 @@ class CancellationServiceTest { @Autowired private CancellationRepository cancellationRepository; + @Autowired + private CancellationSchedulingService cancellationSchedulingService; + + @MockBean + private KeycloakService keycloakServiceMock; + @MockBean private AmazonS3 s3Client; @@ -225,6 +236,49 @@ void testGetReadyToArchive() { assertFalse(results.isEmpty()); } + @Test + void testGetReadyForSearchPortalDeletion() { + Cancellation cancellation = cancellationService.createCancellation(PARTNER_ID, ZonedDateTime.now().plusDays(1)); + assertTrue(cancellationService.getReadyToDeleteSearchPortal().isEmpty()); + + cancellation.setCancellationDate(ZonedDateTime.now().minusDays(1)); + cancellationRepository.save(cancellation); + + List results = cancellationService.getReadyToDeleteSearchPortal(); + assertEquals(1, results.size()); + } + + @Test + void testDeleteSearchPortalEntries() { + Cancellation cancellation1 = cancellationService + .createCancellation("PARTNER_1", ZonedDateTime.now().minusDays(1)); + GroupRepresentation groupRepresentation1 = new GroupRepresentation(); + + Cancellation cancellation2 = cancellationService + .createCancellation("PARTNER_2", ZonedDateTime.now().plusDays(1)); + GroupRepresentation groupRepresentation2 = new GroupRepresentation(); + + Cancellation cancellation3 = cancellationService + .createCancellation("PARTNER_3", ZonedDateTime.now().minusDays(1)); + GroupRepresentation groupRepresentation3 = new GroupRepresentation(); + + Cancellation cancellation4 = cancellationService + .createCancellation("PARTNER_4", ZonedDateTime.now().plusDays(1)); + GroupRepresentation groupRepresentation4 = new GroupRepresentation(); + + when(keycloakServiceMock.getRootGroupByName(eq(cancellation1.getPartnerId()))).thenReturn(groupRepresentation1); + when(keycloakServiceMock.getRootGroupByName(eq(cancellation2.getPartnerId()))).thenReturn(groupRepresentation2); + when(keycloakServiceMock.getRootGroupByName(eq(cancellation3.getPartnerId()))).thenReturn(groupRepresentation3); + when(keycloakServiceMock.getRootGroupByName(eq(cancellation4.getPartnerId()))).thenReturn(groupRepresentation4); + + cancellationSchedulingService.cancellationSearchPortalDeleteJob(); + + verify(keycloakServiceMock).deleteSubGroupsFromMapService(groupRepresentation1); + verify(keycloakServiceMock, never()).deleteSubGroupsFromMapService(groupRepresentation2); + verify(keycloakServiceMock).deleteSubGroupsFromMapService(groupRepresentation3); + verify(keycloakServiceMock, never()).deleteSubGroupsFromMapService(groupRepresentation4); + } + @Test void testGetReadyToArchiveYoungerThen48h() { Cancellation cancellation = cancellationService.createCancellation(PARTNER_ID, CANCELLATION_DATE); diff --git a/src/test/resources/application.yml b/src/test/resources/application.yml index 5489ca4d..597ed43b 100644 --- a/src/test/resources/application.yml +++ b/src/test/resources/application.yml @@ -100,6 +100,9 @@ archive: cron: "-" locklimit: 1800000 older-than-in-seconds: 5000 + cancellationSearchPortalDeleteJob: + cron: '-' + locklimit: 1800000 cancellationArchiveJob: cron: "-" locklimit: 1800000