RED-3864: Added endpoint to delete wrong imported redactions

This commit is contained in:
deiflaender 2022-04-19 12:01:34 +02:00
parent 4c1fd58d5f
commit 5bbf4db5dc
8 changed files with 87 additions and 18 deletions

View File

@ -0,0 +1,20 @@
package com.iqser.red.service.persistence.service.v1.api.model.annotations;
import java.util.Set;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public class DeleteImportedRedactionsRequest {
private String dossierId;
private String fileId;
private Set<String> annotationIds;
}

View File

@ -14,6 +14,7 @@ import com.iqser.red.service.pdftron.redaction.v1.api.model.DocumentRequest;
import com.iqser.red.service.pdftron.redaction.v1.api.model.TextHighlightConversionRequest;
import com.iqser.red.service.pdftron.redaction.v1.api.model.TextHighlightRequest;
import com.iqser.red.service.pdftron.redaction.v1.api.model.TextHighlightResponse;
import com.iqser.red.service.persistence.service.v1.api.model.annotations.DeleteImportedRedactionsRequest;
@ResponseStatus(value = HttpStatus.NO_CONTENT)
public interface ReanalysisResource {
@ -62,6 +63,9 @@ public interface ReanalysisResource {
@PostMapping(value = IMPORT_REDACTIONS_PATH, consumes = MediaType.APPLICATION_JSON_VALUE)
void importRedactions(@RequestBody DocumentRequest documentRequest);
@PostMapping(value = IMPORT_REDACTIONS_PATH + "/delete", consumes = MediaType.APPLICATION_JSON_VALUE)
void deleteImportedRedactions(@RequestBody DeleteImportedRedactionsRequest deleteImportedRedactionsRequest);
@ResponseStatus(value = HttpStatus.OK)
@PostMapping(value = TEXT_HIGHLIGHT_CONVERSION_PATH, consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE)

View File

@ -13,6 +13,7 @@ import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Sets;
import com.iqser.red.service.pdftron.redaction.v1.api.model.DocumentRequest;
import com.iqser.red.service.pdftron.redaction.v1.api.model.PdfTronOptimizeRequest;
@ -22,18 +23,22 @@ import com.iqser.red.service.pdftron.redaction.v1.api.model.TextHighlightConvers
import com.iqser.red.service.pdftron.redaction.v1.api.model.TextHighlightOperation;
import com.iqser.red.service.pdftron.redaction.v1.api.model.TextHighlightRequest;
import com.iqser.red.service.pdftron.redaction.v1.api.model.TextHighlightResponse;
import com.iqser.red.service.peristence.v1.server.service.FileManagementStorageService;
import com.iqser.red.service.peristence.v1.server.service.FileStatusService;
import com.iqser.red.service.peristence.v1.server.service.IndexingService;
import com.iqser.red.service.persistence.management.v1.processor.client.PDFTronRedactionClient;
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
import com.iqser.red.service.persistence.management.v1.processor.exception.ConflictException;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.DossierPersistenceService;
import com.iqser.red.service.persistence.service.v1.api.model.annotations.DeleteImportedRedactionsRequest;
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.FileModel;
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.FileType;
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.ProcessingStatus;
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.WorkflowStatus;
import com.iqser.red.service.persistence.service.v1.api.resources.ReanalysisResource;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@ -45,10 +50,13 @@ public class ReanalysisController implements ReanalysisResource {
private final DossierPersistenceService dossierPersistenceService;
private final IndexingService indexingService;
private final PDFTronRedactionClient pDFTronRedactionClient;
private final FileManagementStorageService fileManagementStorageService;
private final ObjectMapper objectMapper;
@Override
public void reanalyzeDossier(@PathVariable(DOSSIER_ID_PARAM) String dossierId, @RequestParam(value = "force", required = false, defaultValue = FALSE) boolean force) {
public void reanalyzeDossier(@PathVariable(DOSSIER_ID_PARAM) String dossierId,
@RequestParam(value = "force", required = false, defaultValue = FALSE) boolean force) {
var relevantFiles = getAllFilesForDossier(dossierId, true);
reanalyseFiles(dossierId, force, relevantFiles);
@ -93,7 +101,8 @@ public class ReanalysisController implements ReanalysisResource {
fileStatusService.setStatusOcrProcessing(dossierId, fileId);
} else {
FileModel dossierFile = fileStatusService.getStatus(fileId);
if (dossierFile.getProcessingStatus().equals(ProcessingStatus.DELETED) || dossierFile.getWorkflowStatus().equals(WorkflowStatus.APPROVED)) {
if (dossierFile.getProcessingStatus().equals(ProcessingStatus.DELETED) || dossierFile.getWorkflowStatus()
.equals(WorkflowStatus.APPROVED)) {
throw new ConflictException("Cannot analyse a deleted/approved file");
}
if (dossierFile.getLastOCRTime() != null) {
@ -119,12 +128,25 @@ public class ReanalysisController implements ReanalysisResource {
throw new BadRequestException("The files differ in number of pages");
}
pDFTronRedactionClient.importRedactions(documentRequest);
fileStatusService.setStatusFullReprocess(documentRequest.getDossierId(), documentRequest.getFileId(), true);
fileStatusService.setStatusFullReprocess(documentRequest.getDossierId(), documentRequest.getFileId(), true, false);
}
@SneakyThrows
public void deleteImportedRedactions(@RequestBody DeleteImportedRedactionsRequest deleteImportedRedactionsRequest) {
var importedRedactions = fileManagementStorageService.getImportedRedactions(deleteImportedRedactionsRequest.getDossierId(), deleteImportedRedactionsRequest.getFileId());
importedRedactions.getImportedRedactions().entrySet().forEach(entry -> {
entry.getValue().removeIf(v -> deleteImportedRedactionsRequest.getAnnotationIds().contains(v.getId()));
});
fileManagementStorageService.storeObject(deleteImportedRedactionsRequest.getDossierId(), deleteImportedRedactionsRequest.getFileId(), FileType.IMPORTED_REDACTIONS, objectMapper.writeValueAsBytes(importedRedactions));
fileStatusService.setStatusFullReprocess(deleteImportedRedactionsRequest.getDossierId(), deleteImportedRedactionsRequest.getFileId(), true, false);
}
public void reindex(@RequestParam(value = DOSSIER_ID_PARAM, required = false) String dossierId,
@RequestParam(value = "dropIndex", required = false, defaultValue = FALSE) boolean dropIndex, @RequestBody Set<String> fileIds) {
@RequestParam(value = "dropIndex", required = false, defaultValue = FALSE) boolean dropIndex,
@RequestBody Set<String> fileIds) {
indexingService.reindex(dossierId, fileIds, dropIndex);
}
@ -134,11 +156,14 @@ public class ReanalysisController implements ReanalysisResource {
var textHighlightResponse = pDFTronRedactionClient.processTextHighlights(textHighlightRequest);
if (textHighlightRequest.getOperation().equals(TextHighlightOperation.REMOVE) || textHighlightRequest.getOperation().equals(TextHighlightOperation.CONVERT)) {
fileStatusService.updateFileModificationDate(textHighlightRequest.getFileId(), OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS));
if (textHighlightRequest.getOperation()
.equals(TextHighlightOperation.REMOVE) || textHighlightRequest.getOperation()
.equals(TextHighlightOperation.CONVERT)) {
fileStatusService.updateFileModificationDate(textHighlightRequest.getFileId(), OffsetDateTime.now()
.truncatedTo(ChronoUnit.MILLIS));
}
if (textHighlightRequest.getOperation().equals(TextHighlightOperation.CONVERT)) {
fileStatusService.setStatusFullReprocess(textHighlightRequest.getDossierId(), textHighlightRequest.getFileId(), true);
fileStatusService.setStatusFullReprocess(textHighlightRequest.getDossierId(), textHighlightRequest.getFileId(), true, false);
}
return textHighlightResponse;
}
@ -149,10 +174,11 @@ public class ReanalysisController implements ReanalysisResource {
boolean hasTextHighlights = pDFTronRedactionClient.convertTextHighlights(textHighlightRequest);
fileStatusService.updateHasHighlights(textHighlightRequest.getFileId(), hasTextHighlights);
fileStatusService.updateFileModificationDate(textHighlightRequest.getFileId(), OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS));
fileStatusService.updateFileModificationDate(textHighlightRequest.getFileId(), OffsetDateTime.now()
.truncatedTo(ChronoUnit.MILLIS));
if (textHighlightRequest.getOperation().equals(TextHighlightConversionOperation.CONVERT)) {
fileStatusService.setStatusFullReprocess(textHighlightRequest.getDossierId(), textHighlightRequest.getFileId(), true);
fileStatusService.setStatusFullReprocess(textHighlightRequest.getDossierId(), textHighlightRequest.getFileId(), true, false);
}
}

View File

@ -24,7 +24,7 @@ public class ExcludeFromAnalysisService {
if (!excluded) {
// if file has been re-enabled - process it
fileStatusService.setStatusFullReprocess(dossierId, fileId, false);
fileStatusService.setStatusFullReprocess(dossierId, fileId, false, true);
}
}
@ -37,7 +37,7 @@ public class ExcludeFromAnalysisService {
if (!excludedFromAutomaticAnalysis) {
// if file has been re-enabled - process it
fileStatusService.setStatusFullReprocess(dossierId, fileId, false);
fileStatusService.setStatusFullReprocess(dossierId, fileId, false, true);
}
}

View File

@ -11,6 +11,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.peristence.v1.server.utils.StorageIdUtils;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.FileType;
import com.iqser.red.service.redaction.v1.model.ImportedRedactions;
import com.iqser.red.service.redaction.v1.model.RedactionLog;
import com.iqser.red.service.redaction.v1.model.SectionGrid;
import com.iqser.red.storage.commons.exception.StorageObjectDoesNotExist;
@ -85,6 +86,22 @@ public class FileManagementStorageService {
}
public ImportedRedactions getImportedRedactions(String dossierId, String fileId){
InputStreamResource inputStreamResource;
try {
inputStreamResource = storageService.getObject(StorageIdUtils.getStorageId(dossierId, fileId, FileType.IMPORTED_REDACTIONS));
} catch (StorageObjectDoesNotExist e) {
throw new NotFoundException("ImportedRedactions does not exist");
}
try {
return objectMapper.readValue(inputStreamResource.getInputStream(), ImportedRedactions.class);
} catch (IOException e) {
throw new RuntimeException("Could not convert ImportedRedactions", e);
}
}
public boolean imageInfoExists(String dossierId, String fileId) {
return storageService.objectExists(StorageIdUtils.getStorageId(dossierId, fileId, FileType.IMAGE_INFO));

View File

@ -77,7 +77,7 @@ public class FileStatusProcessingUpdateService {
log.info("OCR Successful for dossier {} and file {}, Attempt to update status: {}", dossierId, fileId, retryContext.getRetryCount());
fileStatusService.updateLastOCRTime(fileId);
fileStatusService.setStatusFullReprocess(dossierId, fileId, false);
fileStatusService.setStatusFullReprocess(dossierId, fileId, false, true);
return null;
});

View File

@ -188,7 +188,7 @@ public class FileStatusService {
@Transactional
public void setStatusFullReprocess(String dossierId, String fileId, boolean priority) {
public void setStatusFullReprocess(String dossierId, String fileId, boolean priority, boolean requiresStructureAnalysis) {
FileEntity fileStatus = fileStatusPersistenceService.getStatus(fileId);
@ -199,9 +199,11 @@ public class FileStatusService {
fileStatusPersistenceService.updateProcessingStatus(fileId, ProcessingStatus.FULLREPROCESS);
log.info("Delete text and NER entities from file {} in dossier {}", fileId, dossierId);
fileManagementStorageService.deleteObject(dossierId, fileId, FileType.TEXT);
fileManagementStorageService.deleteObject(dossierId, fileId, FileType.NER_ENTITIES);
if(requiresStructureAnalysis) {
log.info("Delete text and NER entities from file {} in dossier {}", fileId, dossierId);
fileManagementStorageService.deleteObject(dossierId, fileId, FileType.TEXT);
fileManagementStorageService.deleteObject(dossierId, fileId, FileType.NER_ENTITIES);
}
addToAnalysisQueue(dossierId, fileId, priority, Sets.newHashSet());
}
@ -454,7 +456,7 @@ public class FileStatusService {
fileStatusPersistenceService.overwriteFile(fileId, uploader, filename, hasHighlights);
wipeFileData(dossierId, fileId);
setStatusFullReprocess(dossierId, fileId, false);
setStatusFullReprocess(dossierId, fileId, false, true);
}

View File

@ -70,7 +70,7 @@ public class AutomaticAnalysisScheduler {
if (next.isFullAnalysisRequired()) {
log.info("Queued file: {} for automatic full analysis! ", next.getFilename());
fileStatusService.setStatusFullReprocess(next.getDossierId(), next.getId(), false);
fileStatusService.setStatusFullReprocess(next.getDossierId(), next.getId(), false, true);
} else if (next.isReanalysisRequired()) {
log.info("Queued file: {} for automatic reanalysis! ", next.getFilename());
fileStatusService.setStatusReprocess(next.getDossierId(), next.getId(), false);