RED-1212: Enabled to exclude pages
This commit is contained in:
parent
3a4b8833c8
commit
b166ab29ee
@ -6,6 +6,7 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.Set;
|
||||
|
||||
@Data
|
||||
@Builder
|
||||
@ -19,6 +20,7 @@ public class AnalyzeRequest {
|
||||
private boolean reanalyseOnlyIfPossible;
|
||||
private ManualRedactions manualRedactions;
|
||||
private OffsetDateTime lastProcessed;
|
||||
private Set<Integer> excludedPages;
|
||||
|
||||
}
|
||||
|
||||
|
||||
@ -44,4 +44,6 @@ public class RedactionChangeLogEntry {
|
||||
|
||||
private boolean isDossierDictionaryEntry;
|
||||
|
||||
private boolean excluded;
|
||||
|
||||
}
|
||||
|
||||
@ -49,4 +49,6 @@ public class RedactionLogEntry {
|
||||
|
||||
private boolean isDossierDictionaryEntry;
|
||||
|
||||
private boolean excluded;
|
||||
|
||||
}
|
||||
|
||||
@ -14,16 +14,19 @@ public class AnalyzeResponseService {
|
||||
|
||||
boolean hasRequests = redactionLog.getRedactionLogEntry()
|
||||
.stream()
|
||||
.filter(entry -> !entry.isExcluded())
|
||||
.anyMatch(entry -> entry.isManual() && entry.getStatus()
|
||||
.equals(com.iqser.red.service.redaction.v1.model.Status.REQUESTED));
|
||||
|
||||
boolean hasRedactions = redactionLog.getRedactionLogEntry()
|
||||
.stream()
|
||||
.filter(entry -> !entry.isExcluded())
|
||||
.anyMatch(entry -> entry.isRedacted() && !entry.isManual() || entry.isManual() && entry.getStatus()
|
||||
.equals(com.iqser.red.service.redaction.v1.model.Status.APPROVED));
|
||||
|
||||
boolean hasImages = redactionLog.getRedactionLogEntry()
|
||||
.stream()
|
||||
.filter(entry -> !entry.isExcluded())
|
||||
.anyMatch(entry -> entry.isHint() && entry.getType().equals("image"));
|
||||
|
||||
boolean hasUpdates = redactionChangeLog != null && redactionChangeLog.getRedactionLogEntry() != null && !redactionChangeLog
|
||||
|
||||
@ -1,14 +1,44 @@
|
||||
package com.iqser.red.service.redaction.v1.server.redaction.service;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.kie.api.runtime.KieContainer;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.web.bind.annotation.RequestBody;
|
||||
|
||||
import com.iqser.red.service.file.management.v1.api.model.FileType;
|
||||
import com.iqser.red.service.redaction.v1.model.*;
|
||||
import com.iqser.red.service.redaction.v1.model.AnalyzeRequest;
|
||||
import com.iqser.red.service.redaction.v1.model.AnalyzeResult;
|
||||
import com.iqser.red.service.redaction.v1.model.Comment;
|
||||
import com.iqser.red.service.redaction.v1.model.IdRemoval;
|
||||
import com.iqser.red.service.redaction.v1.model.ManualForceRedact;
|
||||
import com.iqser.red.service.redaction.v1.model.ManualRedactionEntry;
|
||||
import com.iqser.red.service.redaction.v1.model.ManualRedactions;
|
||||
import com.iqser.red.service.redaction.v1.model.Rectangle;
|
||||
import com.iqser.red.service.redaction.v1.model.RedactionLog;
|
||||
import com.iqser.red.service.redaction.v1.model.RedactionLogEntry;
|
||||
import com.iqser.red.service.redaction.v1.model.SectionArea;
|
||||
import com.iqser.red.service.redaction.v1.server.classification.model.Document;
|
||||
import com.iqser.red.service.redaction.v1.server.classification.model.SectionText;
|
||||
import com.iqser.red.service.redaction.v1.server.classification.model.Text;
|
||||
import com.iqser.red.service.redaction.v1.server.client.LegalBasisClient;
|
||||
import com.iqser.red.service.redaction.v1.server.exception.RedactionException;
|
||||
import com.iqser.red.service.redaction.v1.server.redaction.model.Dictionary;
|
||||
import com.iqser.red.service.redaction.v1.server.redaction.model.*;
|
||||
import com.iqser.red.service.redaction.v1.server.redaction.model.DictionaryIncrement;
|
||||
import com.iqser.red.service.redaction.v1.server.redaction.model.DictionaryVersion;
|
||||
import com.iqser.red.service.redaction.v1.server.redaction.model.Entity;
|
||||
import com.iqser.red.service.redaction.v1.server.redaction.model.EntityPositionSequence;
|
||||
import com.iqser.red.service.redaction.v1.server.redaction.model.Image;
|
||||
import com.iqser.red.service.redaction.v1.server.redaction.model.RedRectangle2D;
|
||||
import com.iqser.red.service.redaction.v1.server.redaction.model.Section;
|
||||
import com.iqser.red.service.redaction.v1.server.redaction.model.SectionSearchableTextPair;
|
||||
import com.iqser.red.service.redaction.v1.server.redaction.utils.EntitySearchUtils;
|
||||
import com.iqser.red.service.redaction.v1.server.segmentation.PdfSegmentationService;
|
||||
import com.iqser.red.service.redaction.v1.server.storage.RedactionStorageService;
|
||||
@ -17,14 +47,6 @@ import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import org.kie.api.runtime.KieContainer;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.web.bind.annotation.RequestBody;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ -41,6 +63,7 @@ public class ReanalyzeService {
|
||||
private final AnalyzeResponseService analyzeResponseService;
|
||||
private final LegalBasisClient legalBasisClient;
|
||||
|
||||
|
||||
public AnalyzeResult analyze(AnalyzeRequest analyzeRequest) {
|
||||
|
||||
long startTime = System.currentTimeMillis();
|
||||
@ -66,11 +89,11 @@ public class ReanalyzeService {
|
||||
log.info("Redaction analysis successful...");
|
||||
|
||||
var legalBasis = legalBasisClient.getLegalBasisMapping(analyzeRequest.getDossierTemplateId());
|
||||
var redactionLog = new RedactionLog(classifiedDoc.getRedactionLogEntities(),legalBasis,
|
||||
classifiedDoc.getDictionaryVersion().getDossierTemplateVersion(),
|
||||
classifiedDoc.getDictionaryVersion().getDossierVersion(),
|
||||
classifiedDoc.getRulesVersion(),
|
||||
legalBasisClient.getVersion(analyzeRequest.getDossierTemplateId()));
|
||||
var redactionLog = new RedactionLog(classifiedDoc.getRedactionLogEntities(), legalBasis, classifiedDoc.getDictionaryVersion()
|
||||
.getDossierTemplateVersion(), classifiedDoc.getDictionaryVersion()
|
||||
.getDossierVersion(), classifiedDoc.getRulesVersion(), legalBasisClient.getVersion(analyzeRequest.getDossierTemplateId()));
|
||||
|
||||
excludeExcludedPages(redactionLog, analyzeRequest.getExcludedPages());
|
||||
|
||||
log.info("Analyzed with rules {} and dictionary {} for dossierTemplate: {}", classifiedDoc.getRulesVersion(), classifiedDoc
|
||||
.getDictionaryVersion(), analyzeRequest.getDossierTemplateId());
|
||||
@ -165,7 +188,8 @@ public class ReanalyzeService {
|
||||
|
||||
KieContainer kieContainer = droolsExecutionService.updateRules(analyzeRequest.getDossierTemplateId());
|
||||
|
||||
Dictionary dictionary = dictionaryService.getDeepCopyDictionary(analyzeRequest.getDossierTemplateId(), analyzeRequest.getDossierId());
|
||||
Dictionary dictionary = dictionaryService.getDeepCopyDictionary(analyzeRequest.getDossierTemplateId(), analyzeRequest
|
||||
.getDossierId());
|
||||
|
||||
List<SectionSearchableTextPair> sectionSearchableTextPairs = new ArrayList<>();
|
||||
for (SectionText reanalysisSection : reanalysisSections) {
|
||||
@ -240,8 +264,7 @@ public class ReanalyzeService {
|
||||
.getDossierTemplateId()));
|
||||
}
|
||||
|
||||
redactionLog.getRedactionLogEntry()
|
||||
.removeIf(entry -> sectionsToReanalyse.contains(entry.getSectionNumber()));
|
||||
redactionLog.getRedactionLogEntry().removeIf(entry -> sectionsToReanalyse.contains(entry.getSectionNumber()));
|
||||
redactionLog.getRedactionLogEntry().addAll(newRedactionLogEntries);
|
||||
return finalizeAnalysis(analyzeRequest, startTime, redactionLog, text, dictionaryIncrement);
|
||||
|
||||
@ -255,6 +278,8 @@ public class ReanalyzeService {
|
||||
redactionLog.setDictionaryVersion(dictionaryIncrement.getDictionaryVersion().getDossierTemplateVersion());
|
||||
redactionLog.setDossierDictionaryVersion(dictionaryIncrement.getDictionaryVersion().getDossierVersion());
|
||||
|
||||
excludeExcludedPages(redactionLog, analyzeRequest.getExcludedPages());
|
||||
|
||||
var changeLog = redactionChangeLogService.createAndStoreChangeLog(analyzeRequest.getDossierId(), analyzeRequest.getFileId(), redactionLog);
|
||||
redactionStorageService.storeObject(analyzeRequest.getDossierId(), analyzeRequest.getFileId(), FileType.REDACTION_LOG, redactionLog);
|
||||
|
||||
@ -292,4 +317,18 @@ public class ReanalyzeService {
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private void excludeExcludedPages(RedactionLog redactionLog, Set<Integer> excludedPages) {
|
||||
|
||||
redactionLog.getRedactionLogEntry().forEach(entry -> {
|
||||
entry.getPositions().forEach(pos -> {
|
||||
if (excludedPages != null && excludedPages.contains(pos.getPage())) {
|
||||
entry.setExcluded(true);
|
||||
} else {
|
||||
entry.setExcluded(false);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -90,6 +90,7 @@ public class RedactionChangeLogService {
|
||||
.comments(entry.getComments())
|
||||
.changeType(changeType)
|
||||
.isDossierDictionaryEntry(entry.isDossierDictionaryEntry())
|
||||
.excluded(entry.isExcluded())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@ -633,6 +633,7 @@ public class RedactionIntegrationTest {
|
||||
long start = System.currentTimeMillis();
|
||||
ClassPathResource pdfFileResource = new ClassPathResource("files/new/Single Study - Oral (Gavage) Mouse.pdf");
|
||||
AnalyzeRequest request = prepareStorage(pdfFileResource.getInputStream());
|
||||
request.setExcludedPages(Set.of(1));
|
||||
|
||||
AnalyzeResult result = reanalyzeService.analyze(request);
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user