Merge branch 'RED-8168' into 'master'

RED-8168: Calculation of analysis flags should be async

Closes RED-8168

See merge request redactmanager/persistence-service!297
This commit is contained in:
Kilian Schüttler 2024-01-18 16:12:36 +01:00
commit 0d9f5276a5
19 changed files with 371 additions and 222 deletions

View File

@ -31,8 +31,6 @@ public class ViewedPagesController implements ViewedPagesResource {
private final AccessControlService accessControlService;
private final ViewedPagesPersistenceService viewedPagesPersistenceService;
private final FileStatusService fileStatusService;
private final AnalysisFlagsCalculationService analysisFlagsCalculationService;
@PreAuthorize("hasAuthority('" + MANAGE_VIEWED_PAGES + "')")
@ -40,8 +38,6 @@ public class ViewedPagesController implements ViewedPagesResource {
accessControlService.verifyUserIsReviewer(dossierId, fileId);
viewedPagesPersistenceService.insertPage(fileId, KeycloakSecurity.getUserId(), viewedPagesRequest.getPage());
var file = fileStatusService.getStatus(fileId);
analysisFlagsCalculationService.calculateFlags(file.getDossierId(), fileId);
}
@ -50,9 +46,6 @@ public class ViewedPagesController implements ViewedPagesResource {
accessControlService.verifyUserIsReviewer(dossierId, fileId);
viewedPagesPersistenceService.removePage(fileId, KeycloakSecurity.getUserId(), page);
var file = fileStatusService.getStatus(fileId);
analysisFlagsCalculationService.calculateFlags(file.getDossierId(), fileId);
}

View File

@ -64,6 +64,8 @@ public class MessagingConfiguration {
public static final String CV_ANALYSIS_RESPONSE_QUEUE = "cv_analysis_response_queue";
public static final String CV_ANALYSIS_DLQ = "cv_analysis_dead_letter_queue";
public static final String ANALYSIS_FLAG_CALCULATION_QUEUE = "analysis_flag_calculation_queue";
public static final String OCR_STATUS_UPDATE_RESPONSE_QUEUE = "ocr_status_update_response_queue";
public static final String OCR_STATUS_UPDATE_RESPONSE_DQL = "ocr_status_update_response_dql";
@ -291,6 +293,12 @@ public class MessagingConfiguration {
return QueueBuilder.durable(REPORT_RESULT_DLQ).build();
}
@Bean
public Queue analysisFlagCalculationQueue() {
return QueueBuilder.durable(ANALYSIS_FLAG_CALCULATION_QUEUE).build();
}
@Bean
public Queue indexingQueue() {

View File

@ -168,6 +168,9 @@ public class FileEntity {
@Column
private OffsetDateTime lastManualChangeDate;
@Column
private OffsetDateTime lastFlagCalculation;
@Column
private boolean hasHighlights;

View File

@ -183,6 +183,29 @@ public class CreateJobsConfiguration {
.build();
}
@Bean
public Trigger analysisFlagCalculationSchedulerTrigger() throws ParseException {
return TriggerBuilder.newTrigger()
.forJob(analysisFlagCalculationSchedulerJobDetail())
.withIdentity("AnalysisFlagCalculationSchedulerTrigger")
.withDescription("Triggers AnalysisFlagCalculationSchedulerJob every 10 seconds")
.withSchedule(CronScheduleBuilder.cronSchedule(new CronExpression("*/10 * * * * ?")))
.build();
}
@Bean
public JobDetail analysisFlagCalculationSchedulerJobDetail() {
return JobBuilder.newJob()
.ofType(AnalysisFlagCalculationSchedulerJob.class)
.storeDurably()
.withIdentity("AnalysisFlagCalculationSchedulerJob")
.withDescription("Automatically queues files for analysis flag calculation")
.build();
}
}

View File

@ -0,0 +1,16 @@
package com.iqser.red.service.persistence.management.v1.processor.model;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public class AnalysisFlagCalculationMessage {
String dossierId;
String fileId;
}

View File

@ -0,0 +1,5 @@
package com.iqser.red.service.persistence.management.v1.processor.model;
public record FileIdentifier(String dossierId, String fileId) {
}

View File

@ -6,6 +6,7 @@ import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.ViewedPageEntity;
@ -14,28 +15,36 @@ import com.iqser.red.service.persistence.management.v1.processor.service.persist
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.EntityLogEntry;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.EntryState;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.EntryType;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.ManualChange;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.ManualRedactionType;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
import io.micrometer.core.annotation.Timed;
import io.micrometer.observation.ObservationRegistry;
import io.micrometer.observation.annotation.Observed;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
import lombok.experimental.FieldDefaults;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class AnalysisFlagsCalculationService {
private final FileStatusPersistenceService fileStatusPersistenceService;
private final EntityLogService entityLogService;
private final ViewedPagesPersistenceService viewedPagesPersistenceService;
FileStatusPersistenceService fileStatusPersistenceService;
EntityLogService entityLogService;
ViewedPagesPersistenceService viewedPagesPersistenceService;
ObservationRegistry observationRegistry;
@Timed("redactmanager_calculateFlags")
@Observed(name = "AnalysisFlagsCalculationService", contextualName = "calculate-flags")
public void calculateFlags(String dossierId, String fileId) {
addIdsToTrace(dossierId, fileId);
long startTime = System.currentTimeMillis();
var file = fileStatusPersistenceService.getStatus(fileId);
@ -56,9 +65,6 @@ public class AnalysisFlagsCalculationService {
OffsetDateTime lastManualChangeDate = null;
for (EntityLogEntry entry : entityLog.getEntityLogEntry()) {
if (entry.isExcluded()) {
continue;
}
String type = getType(entry.getType());
@ -67,15 +73,7 @@ public class AnalysisFlagsCalculationService {
if (entry.getManualChanges() != null && !entry.getManualChanges().isEmpty()) {
for (var manualChange : entry.getManualChanges()) {
if (!entryType.equals(EntryType.HINT) && !entryType.equals(EntryType.RECOMMENDATION) && StringUtils.isNotEmpty(entry.getReason()) && (manualChange.getManualRedactionType()
.equals(ManualRedactionType.ADD_LOCALLY) || manualChange.getManualRedactionType()
.equals(ManualRedactionType.RECATEGORIZE) || manualChange.getManualRedactionType()
.equals(ManualRedactionType.REMOVE_LOCALLY) || manualChange.getManualRedactionType()
.equals(ManualRedactionType.FORCE_REDACT) || manualChange.getManualRedactionType()
.equals(ManualRedactionType.FORCE_HINT) || manualChange.getManualRedactionType()
.equals(ManualRedactionType.LEGAL_BASIS_CHANGE) || manualChange.getManualRedactionType()
.equals(ManualRedactionType.RESIZE)) && manualChange.getProcessedDate() != null && (lastRedactionModification == null || manualChange.getProcessedDate()
.isAfter(lastRedactionModification))) {
if (entryTypeIsNotRecommendationOrHintAndManualChangeIsNotRemoved(entry, manualChange, entryType, lastRedactionModification)) {
lastRedactionModification = manualChange.getProcessedDate();
}
@ -89,28 +87,29 @@ public class AnalysisFlagsCalculationService {
}
}
if (lastChange != null && (lastRedactionModification == null || lastChange.getDateTime()
.isAfter(lastRedactionModification)) && !entryType.equals(EntryType.HINT) && !entryType.equals(EntryType.RECOMMENDATION)) {
if (lastChange != null && //
(lastRedactionModification == null || lastChange.getDateTime().isAfter(lastRedactionModification)) && //
!entryType.equals(EntryType.HINT) && !entryType.equals(EntryType.RECOMMENDATION)) {
lastRedactionModification = lastChange.getDateTime();
}
if (!hasRedactions && entry.getState().equals(EntryState.APPLIED) && !entryType.equals(EntryType.RECOMMENDATION)) {
if (!hasRedactions && entry.getState().equals(EntryState.APPLIED) && //
(entryType.equals(EntryType.ENTITY) || entryType.equals(EntryType.IMAGE) || entryType.equals(EntryType.AREA))) {
hasRedactions = true;
}
if (!hasHints && entryType.equals(EntryType.HINT) && !entry.getState().equals(EntryState.REMOVED)) {
if (!hasHints && (entryType.equals(EntryType.HINT) || entryType.equals(EntryType.IMAGE_HINT)) && entry.getState().equals(EntryState.SKIPPED)) {
hasHints = true;
}
if (!hasImages && (type.equals("image") || entryType.equals(EntryType.IMAGE))) {
if (!hasImages && (entryType.equals(EntryType.IMAGE) || entryType.equals(EntryType.IMAGE_HINT))) {
hasImages = true;
}
OffsetDateTime viewedPage = entry.getPositions().isEmpty() ? null : viewedPages.get(entry.getPositions().get(0).getPageNumber());
var viewedPage = entry.getPositions().isEmpty() ? null : viewedPages.get(entry.getPositions().get(0).getPageNumber());
if (file.getWorkflowStatus() != WorkflowStatus.APPROVED && lastChange != null && lastChange.getDateTime() != null && viewedPage != null && viewedPage.isBefore(
lastChange.getDateTime())) {
if (file.getWorkflowStatus() != WorkflowStatus.APPROVED && lastChange != null && lastChange.getDateTime() != null && viewedPage != null && //
viewedPage.isBefore(lastChange.getDateTime())) {
hasUpdates = true;
}
@ -123,6 +122,7 @@ public class AnalysisFlagsCalculationService {
fileStatusPersistenceService.updateFlags(fileId, hasRedactions, hasHints, hasImages, hasSuggestions, hasComments, hasUpdates);
}
if (lastRedactionModification != null && (file.getRedactionModificationDate() == null || file.getRedactionModificationDate().isBefore(lastRedactionModification))) {
fileStatusPersistenceService.setLastRedactionModificationDateForFile(fileId, lastRedactionModification);
}
@ -130,6 +130,37 @@ public class AnalysisFlagsCalculationService {
fileStatusPersistenceService.setLastManualChangeDate(fileId, lastManualChangeDate);
}
fileStatusPersistenceService.setLastFlagCalculation(fileId, OffsetDateTime.now());
}
private void addIdsToTrace(String dossierId, String fileId) {
if (observationRegistry.getCurrentObservation() != null) {
observationRegistry.getCurrentObservation()
.highCardinalityKeyValue("dossierId", dossierId)
.highCardinalityKeyValue("fileId", fileId);
}
}
private static boolean entryTypeIsNotRecommendationOrHintAndManualChangeIsNotRemoved(EntityLogEntry entry,
ManualChange manualChange,
EntryType entryType,
OffsetDateTime lastRedactionModification) {
return !entryType.equals(EntryType.HINT) && //
!entryType.equals(EntryType.RECOMMENDATION) && //
StringUtils.isNotEmpty(entry.getReason()) && //
(manualChange.getManualRedactionType().equals(ManualRedactionType.ADD_LOCALLY) || //
manualChange.getManualRedactionType().equals(ManualRedactionType.RECATEGORIZE) || //
manualChange.getManualRedactionType().equals(ManualRedactionType.REMOVE_LOCALLY) || //
manualChange.getManualRedactionType().equals(ManualRedactionType.FORCE_REDACT) || //
manualChange.getManualRedactionType().equals(ManualRedactionType.FORCE_HINT) || //
manualChange.getManualRedactionType().equals(ManualRedactionType.LEGAL_BASIS_CHANGE) || //
manualChange.getManualRedactionType().equals(ManualRedactionType.RESIZE)) && //
manualChange.getProcessedDate() != null && //
(lastRedactionModification == null || manualChange.getProcessedDate().isAfter(lastRedactionModification));
}

View File

@ -69,7 +69,10 @@ public class EntityLogMergeService {
log.debug("Merging EntityLog");
List<BaseAnnotation> allManualChanges = allManualChanges(manualRedactions);
List<String> manualChangesIds = allManualChanges.stream().map(BaseAnnotation::getAnnotationId).toList();
List<EntityLogEntry> matchingEntities = entityLog.getEntityLogEntry().stream().filter(entityLogEntry -> manualChangesIds.contains(entityLogEntry.getId())).collect(Collectors.toList());
List<EntityLogEntry> matchingEntities = entityLog.getEntityLogEntry()
.stream()
.filter(entityLogEntry -> manualChangesIds.contains(entityLogEntry.getId()))
.collect(Collectors.toList());
final int analysisNumber = entityLog.getAnalysisNumber();
// Sort manual changes by date, so we process them in order of when they were requested
@ -104,7 +107,8 @@ public class EntityLogMergeService {
}
if (isFalsePositive(manualRedactionEntry)) {
var matchingEntities = entityLog.getEntityLogEntry().stream()
var matchingEntities = entityLog.getEntityLogEntry()
.stream()
.filter(entityLogEntry -> equalPosition(manualRedactionEntry.getPositions().get(0), entityLogEntry.getPositions().get(0)))
.toList();
matchingEntities.forEach(matchingEntity -> mergeFalsePositive(entityLog, matchingEntity));
@ -121,11 +125,7 @@ public class EntityLogMergeService {
.build());
List<Change> changes = new ArrayList<>();
changes.add(Change.builder()
.analysisNumber(entityLog.getAnalysisNumber())
.dateTime(manualRedactionEntry.getRequestDate())
.type(ChangeType.ADDED)
.build());
changes.add(Change.builder().analysisNumber(entityLog.getAnalysisNumber()).dateTime(manualRedactionEntry.getRequestDate()).type(ChangeType.ADDED).build());
boolean isHint = isHint(manualRedactionEntry.getType(), dossier);
@ -169,11 +169,7 @@ public class EntityLogMergeService {
existingEntry.setState(EntryState.REMOVED);
List<Change> falsePositiveChanges = new ArrayList<>();
falsePositiveChanges.add(Change.builder()
.analysisNumber(entityLog.getAnalysisNumber())
.dateTime(OffsetDateTime.now())
.type(ChangeType.REMOVED)
.build());
falsePositiveChanges.add(Change.builder().analysisNumber(entityLog.getAnalysisNumber()).dateTime(OffsetDateTime.now()).type(ChangeType.REMOVED).build());
if (existingEntry.getChanges() != null && !existingEntry.getChanges().isEmpty()) {
existingEntry.getChanges().addAll(falsePositiveChanges);
} else {
@ -182,7 +178,6 @@ public class EntityLogMergeService {
}
private void mergeIdsToRemove(IdRemoval idRemoval, List<EntityLogEntry> entityLogEntries, int analysisNumber) {
var entity = entityLogEntries.stream().filter(entityLogEntry -> entityLogEntry.getId().equals(idRemoval.getAnnotationId())).findAny();
@ -199,6 +194,7 @@ public class EntityLogMergeService {
});
}
private void mergeResizeRedactions(ManualResizeRedaction manualResizeRedaction, List<EntityLogEntry> entityLogEntries, int analysisNumber) {
var entity = entityLogEntries.stream().filter(entityLogEntry -> entityLogEntry.getId().equals(manualResizeRedaction.getAnnotationId())).findAny();
@ -220,6 +216,7 @@ public class EntityLogMergeService {
});
}
private void mergeLegalBasisChanges(ManualLegalBasisChange manualLegalBasisChange, List<EntityLogEntry> entityLogEntries, int analysisNumber) {
var entity = entityLogEntries.stream().filter(entityLogEntry -> entityLogEntry.getId().equals(manualLegalBasisChange.getAnnotationId())).findAny();
@ -229,13 +226,14 @@ public class EntityLogMergeService {
entityLogEntry.setValue(manualLegalBasisChange.getValue());
addChanges(entityLogEntry.getChanges(), ChangeType.CHANGED, analysisNumber, manualLegalBasisChange.getRequestDate());
Map<String, String> propertyChanges = getPropertyChanges(manualLegalBasisChange);
entityLogEntry.getManualChanges().add(ManualChange.builder()
.manualRedactionType(ManualRedactionType.LEGAL_BASIS_CHANGE)
.requestedDate(manualLegalBasisChange.getRequestDate())
.processedDate(null)
.propertyChanges(propertyChanges)
.userId(manualLegalBasisChange.getUser())
.build());
entityLogEntry.getManualChanges()
.add(ManualChange.builder()
.manualRedactionType(ManualRedactionType.LEGAL_BASIS_CHANGE)
.requestedDate(manualLegalBasisChange.getRequestDate())
.processedDate(null)
.propertyChanges(propertyChanges)
.userId(manualLegalBasisChange.getUser())
.build());
});
}
@ -250,7 +248,7 @@ public class EntityLogMergeService {
if (!Strings.isNullOrEmpty(manualLegalBasisChange.getValue())) {
propertyChanges.put("value", manualLegalBasisChange.getValue());
}
if(!Strings.isNullOrEmpty(manualLegalBasisChange.getSection())) {
if (!Strings.isNullOrEmpty(manualLegalBasisChange.getSection())) {
propertyChanges.put("section", manualLegalBasisChange.getSection());
}
return propertyChanges;
@ -277,6 +275,7 @@ public class EntityLogMergeService {
});
}
private void mergeForceRedactions(ManualForceRedaction forceRedaction, List<EntityLogEntry> entityLogEntries, int analysisNumber) {
var entity = entityLogEntries.stream().filter(entityLogEntry -> entityLogEntry.getId().equals(forceRedaction.getAnnotationId())).findAny();
@ -296,6 +295,7 @@ public class EntityLogMergeService {
});
}
private EntryType getEntryType(boolean isHint, String type) {
if (type.equals("image") || type.equals("logo") || type.equals("signature") || type.equals("formula")) {
@ -305,6 +305,7 @@ public class EntityLogMergeService {
}
}
private ManualRedactionType calculateManualRedactionType(ManualRedactionEntry manualRedactionEntry) {
if (manualRedactionEntry.isAddToDictionary() || manualRedactionEntry.isAddToDossierDictionary()) {
@ -313,19 +314,17 @@ public class EntityLogMergeService {
return ManualRedactionType.ADD_LOCALLY;
}
private void addChanges(List<Change> changes, ChangeType changeType, int analysisNumber, OffsetDateTime offsetDateTime) {
if (!changes.isEmpty()) {
changes.add(Change.builder()
.analysisNumber(analysisNumber + 1)
.dateTime(offsetDateTime)
.type(changeType)
.build());
changes.add(Change.builder().analysisNumber(analysisNumber + 1).dateTime(offsetDateTime).type(changeType).build());
} else {
changes.add(Change.builder().analysisNumber(analysisNumber).dateTime(OffsetDateTime.now()).type(changeType).build());
}
}
private boolean isHint(String type, DossierEntity dossier) {
String typeId = toTypeId(type, dossier.getDossierTemplateId());
@ -343,19 +342,21 @@ public class EntityLogMergeService {
return typeEntity.isHint();
}
private boolean equalPosition(Rectangle position1, Position position2) {
return position1.getTopLeftX() == position2.x()
&& position1.getTopLeftY() == position2.y()
&& position1.getWidth() == position2.w()
&& position1.getHeight() == position2.h();
return position1.getTopLeftX() == position2.x() && position1.getTopLeftY() == position2.y() && position1.getWidth() == position2.w() && position1.getHeight() == position2.h();
}
private List<Position> convertPositions(List<Rectangle> rectangles) {
return rectangles.stream().map(rectangle -> new Position(rectangle.getTopLeftX(), rectangle.getTopLeftY(), rectangle.getWidth(), rectangle.getHeight(), rectangle.getPage())).collect(Collectors.toList());
return rectangles.stream()
.map(rectangle -> new Position(rectangle.getTopLeftX(), rectangle.getTopLeftY(), rectangle.getWidth(), rectangle.getHeight(), rectangle.getPage()))
.collect(Collectors.toList());
}
private List<BaseAnnotation> allManualChanges(ManualRedactions manualRedactions) {
return Stream.of(manualRedactions.getEntriesToAdd(),
@ -366,6 +367,7 @@ public class EntityLogMergeService {
manualRedactions.getLegalBasisChanges()).flatMap(Collection::stream).map(baseAnnotation -> (BaseAnnotation) baseAnnotation).toList();
}
public void sendToAnalyseQueue(String fileId, DossierEntity dossier, FileModel fileModel, ManualRedactions manualRedactions) {
var fileEntity = fileStatusPersistenceService.getStatus(fileId);

View File

@ -102,7 +102,6 @@ public class FileStatusManagementService {
}
fileStatusService.setStatusSuccessful(fileId, assignee != null ? WorkflowStatus.UNDER_REVIEW : WorkflowStatus.NEW);
fileStatusService.setAssignee(fileId, assignee);
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
indexingService.addToIndexingQueue(IndexMessageType.UPDATE, null, dossierId, fileId, 2);
}
@ -117,7 +116,6 @@ public class FileStatusManagementService {
}
fileStatusService.setStatusSuccessful(fileId, assignee != null ? WorkflowStatus.UNDER_APPROVAL : WorkflowStatus.NEW);
fileStatusService.setAssignee(fileId, approverId);
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
indexingService.addToIndexingQueue(IndexMessageType.UPDATE, null, dossierId, fileId, 2);
}
@ -171,7 +169,6 @@ public class FileStatusManagementService {
excludedPages.addAll(pages);
fileStatusService.setExcludedPages(fileId, excludedPages);
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
}
@ -181,7 +178,6 @@ public class FileStatusManagementService {
Set<Integer> excludedPages = fileStatus.getExcludedPages();
excludedPages.removeAll(pages);
fileStatusService.setExcludedPages(fileId, excludedPages);
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
}
}

View File

@ -56,7 +56,6 @@ public class FileStatusProcessingUpdateService {
}
manualRedactionService.updateProcessedDate(analyzeResult.getManualRedactions());
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
if (analyzeResult.getAddedFileAttributes() != null && !analyzeResult.getAddedFileAttributes().isEmpty()) {
fileStatusPersistenceService.addFileAttributes(dossierId, fileId, analyzeResult.getAddedFileAttributes());

View File

@ -19,6 +19,7 @@ import com.iqser.red.service.persistence.management.v1.processor.entity.dossier.
import com.iqser.red.service.persistence.management.v1.processor.entity.dossier.FileEntity;
import com.iqser.red.service.persistence.management.v1.processor.exception.InternalServerErrorException;
import com.iqser.red.service.persistence.management.v1.processor.model.CvAnalysisServiceRequest;
import com.iqser.red.service.persistence.management.v1.processor.model.FileIdentifier;
import com.iqser.red.service.persistence.management.v1.processor.model.NerServiceRequest;
import com.iqser.red.service.persistence.management.v1.processor.model.OCRStatusUpdateResponse;
import com.iqser.red.service.persistence.management.v1.processor.model.image.ImageServiceRequest;
@ -50,33 +51,36 @@ import com.knecon.fforesight.databasetenantcommons.providers.utils.MagicConverte
import com.knecon.fforesight.service.ocr.v1.api.model.DocumentRequest;
import jakarta.transaction.Transactional;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.experimental.FieldDefaults;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class FileStatusService {
private final FileStatusPersistenceService fileStatusPersistenceService;
private final DossierPersistenceService dossierPersistenceService;
private final RabbitTemplate rabbitTemplate;
private final ManualRedactionProviderService manualRedactionProviderService;
private final FileManagementStorageService fileManagementStorageService;
private final LegalBasisChangePersistenceService legalBasisChangePersistenceService;
private final RecategorizationPersistenceService recategorizationPersistenceService;
private final CommentPersistenceService commentPersistenceService;
private final ForceRedactionPersistenceService forceRedactionPersistenceService;
private final RemoveRedactionPersistenceService removeRedactionPersistenceService;
private final AddRedactionPersistenceService addRedactionPersistenceService;
private final ResizeRedactionPersistenceService resizeRedactionPersistenceService;
private final FileAttributeConfigPersistenceService fileAttributeConfigPersistenceService;
private final FileManagementServiceSettings settings;
private final ReanalysisRequiredStatusService reanalysisRequiredStatusService;
private final ViewedPagesPersistenceService viewedPagesPersistenceService;
private final FileManagementServiceSettings fileManagementServiceSettings;
private final LayoutParsingRequestFactory layoutParsingRequestFactory;
FileStatusPersistenceService fileStatusPersistenceService;
DossierPersistenceService dossierPersistenceService;
RabbitTemplate rabbitTemplate;
ManualRedactionProviderService manualRedactionProviderService;
FileManagementStorageService fileManagementStorageService;
LegalBasisChangePersistenceService legalBasisChangePersistenceService;
RecategorizationPersistenceService recategorizationPersistenceService;
CommentPersistenceService commentPersistenceService;
ForceRedactionPersistenceService forceRedactionPersistenceService;
RemoveRedactionPersistenceService removeRedactionPersistenceService;
AddRedactionPersistenceService addRedactionPersistenceService;
ResizeRedactionPersistenceService resizeRedactionPersistenceService;
FileAttributeConfigPersistenceService fileAttributeConfigPersistenceService;
FileManagementServiceSettings settings;
ReanalysisRequiredStatusService reanalysisRequiredStatusService;
ViewedPagesPersistenceService viewedPagesPersistenceService;
FileManagementServiceSettings fileManagementServiceSettings;
LayoutParsingRequestFactory layoutParsingRequestFactory;
@Transactional
@ -107,6 +111,12 @@ public class FileStatusService {
}
public Set<FileIdentifier> getFileIdentifiersWhereAnalysisFlagCalculationIsRequired() {
return fileStatusPersistenceService.getFileIdentifiersWhereAnalysisFlagCalculationIsRequired();
}
@Transactional
public List<FileModel> getStatusesAddedBefore(OffsetDateTime end) {
@ -131,6 +141,7 @@ public class FileStatusService {
return reanalysisRequiredStatusService.enhanceFileStatusWithAnalysisRequirements(converted);
}
public boolean fileExists(String fileId) {
return fileStatusPersistenceService.statusExists(fileId);
@ -639,6 +650,7 @@ public class FileStatusService {
}
@Transactional
public void overwriteFile(String dossierId, String fileId, String uploader, String filename, boolean keepManualRedactions) {

View File

@ -0,0 +1,59 @@
package com.iqser.red.service.persistence.management.v1.processor.service.job;
import static com.iqser.red.service.persistence.management.v1.processor.utils.TenantUtils.isTenantReadyForPersistence;
import java.time.OffsetDateTime;
import java.util.Set;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration;
import com.iqser.red.service.persistence.management.v1.processor.model.AnalysisFlagCalculationMessage;
import com.iqser.red.service.persistence.management.v1.processor.model.FileIdentifier;
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusService;
import com.knecon.fforesight.tenantcommons.TenantContext;
import com.knecon.fforesight.tenantcommons.TenantProvider;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
import lombok.experimental.FieldDefaults;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
/*
* This Job checks if there might be a change in the analysis flags and then schedules a flag calculation using a queue to balance the load.
*/
public class AnalysisFlagCalculationSchedulerJob implements Job {
FileStatusService fileStatusService;
RabbitTemplate rabbitTemplate;
TenantProvider tenantProvider;
@Override
public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException {
tenantProvider.getTenants().forEach(tenant -> {
if (isTenantReadyForPersistence(tenant)) {
TenantContext.setTenantId(tenant.getTenantId());
log.debug("Detecting Files for analysis flag calculation. Tenant: {}", tenant.getDisplayName());
Set<FileIdentifier> fileIdentifiers = fileStatusService.getFileIdentifiersWhereAnalysisFlagCalculationIsRequired();
log.debug("Found {} files which require analysis flag calculation. Tenant: {}", fileIdentifiers.size(), tenant.getDisplayName());
fileIdentifiers.forEach(fileIdentifier -> rabbitTemplate.convertAndSend(MessagingConfiguration.ANALYSIS_FLAG_CALCULATION_QUEUE,
new AnalysisFlagCalculationMessage(fileIdentifier.dossierId(), fileIdentifier.fileId())));
TenantContext.clear();
}
});
}
}

View File

@ -3,6 +3,7 @@ package com.iqser.red.service.persistence.management.v1.processor.service.manual
import static com.knecon.fforesight.databasetenantcommons.providers.utils.MagicConverter.convert;
import java.nio.charset.StandardCharsets;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@ -24,6 +25,7 @@ import com.iqser.red.service.persistence.management.v1.processor.service.EntityL
import com.iqser.red.service.persistence.management.v1.processor.service.EntityLogService;
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.DossierPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.FileStatusPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.annotations.AddRedactionPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.annotations.ForceRedactionPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.annotations.LegalBasisChangePersistenceService;
@ -69,11 +71,11 @@ public class ManualRedactionService {
ResizeRedactionPersistenceService resizeRedactionPersistenceService;
FileStatusService fileStatusService;
ManualRedactionProviderService manualRedactionProviderService;
AnalysisFlagsCalculationService analysisFlagsCalculationService;
EntityLogService entityLogService;
HashFunction hashFunction = Hashing.murmur3_128();
ManualRedactionDictionaryUpdateHandler manualRedactionDictionaryUpdateHandler;
EntityLogMergeService entityLogMergeService;
FileStatusPersistenceService fileStatusPersistenceService;
@Transactional
@ -114,19 +116,17 @@ public class ManualRedactionService {
FileModel fileStatus = fileStatusService.getStatus(fileId);
if (!manualRedactionEntryEntities.isEmpty() && fileStatus.isExcludedFromAutomaticAnalysis()) {
ManualRedactions manualRedactions = ManualRedactions.builder()
.entriesToAdd(convertEntriesToAdd(manualRedactionEntryEntities))
.build();
ManualRedactions manualRedactions = ManualRedactions.builder().entriesToAdd(convertEntriesToAdd(manualRedactionEntryEntities)).build();
entityLogMergeService.sendToAnalyseQueue(fileId, dossierEntity, fileStatusService.getStatus(fileId), manualRedactions);
} else {
reprocess(dossierId, fileId);
}
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
fileStatusPersistenceService.setLastManualChangeDate(fileId, OffsetDateTime.now());
return response;
}
private Set<ManualRedactionEntry> convertEntriesToAdd(List<ManualRedactionEntryEntity> source) {
return source.stream().map(entry -> convert(entry, ManualRedactionEntry.class, new ManualRedactionMapper())).collect(Collectors.toSet());
@ -167,7 +167,7 @@ public class ManualRedactionService {
reprocess(dossierId, fileId);
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
fileStatusPersistenceService.setLastManualChangeDate(fileId, OffsetDateTime.now());
return response;
}
@ -192,7 +192,7 @@ public class ManualRedactionService {
reprocess(dossierId, fileId);
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
fileStatusPersistenceService.setLastManualChangeDate(fileId, OffsetDateTime.now());
return response;
}
@ -215,7 +215,7 @@ public class ManualRedactionService {
response.add(ManualAddResponse.builder().annotationId(legalBasisChangeRequest.getAnnotationId()).commentId(commentId).build());
}
reprocess(dossierId, fileId);
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
fileStatusPersistenceService.setLastManualChangeDate(fileId, OffsetDateTime.now());
return response;
}
@ -254,7 +254,7 @@ public class ManualRedactionService {
}
reprocess(dossierId, fileId);
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
fileStatusPersistenceService.setLastManualChangeDate(fileId, OffsetDateTime.now());
return response;
}
@ -298,15 +298,13 @@ public class ManualRedactionService {
FileModel fileStatus = fileStatusService.getStatus(fileId);
if (!manualResizeRedactionEntities.isEmpty() && fileStatus.isExcludedFromAutomaticAnalysis()) {
ManualRedactions manualRedactions = ManualRedactions.builder()
.resizeRedactions(convertResizeRedactions(manualResizeRedactionEntities))
.build();
ManualRedactions manualRedactions = ManualRedactions.builder().resizeRedactions(convertResizeRedactions(manualResizeRedactionEntities)).build();
entityLogMergeService.sendToAnalyseQueue(fileId, dossierPersistenceService.getAndValidateDossier(dossierId), fileStatusService.getStatus(fileId), manualRedactions);
} else {
reprocess(dossierId, fileId);
}
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
fileStatusPersistenceService.setLastManualChangeDate(fileId, OffsetDateTime.now());
return response;
}

View File

@ -21,11 +21,11 @@ import com.iqser.red.service.persistence.management.v1.processor.entity.annotati
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.ManualRecategorizationEntity;
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.ManualRedactionEntryEntity;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
import com.iqser.red.service.persistence.management.v1.processor.service.AnalysisFlagsCalculationService;
import com.iqser.red.service.persistence.management.v1.processor.service.EntityLogService;
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.AuditPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.DossierPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.FileStatusPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.annotations.AddRedactionPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.annotations.ForceRedactionPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.annotations.LegalBasisChangePersistenceService;
@ -60,7 +60,7 @@ public class ManualRedactionUndoService {
ManualRedactionProviderService manualRedactionProviderService;
AuditPersistenceService auditPersistenceService;
FileStatusService fileStatusService;
AnalysisFlagsCalculationService analysisFlagsCalculationService;
FileStatusPersistenceService fileStatusPersistenceService;
RecategorizationPersistenceService recategorizationPersistenceService;
DossierPersistenceService dossierPersistenceService;
AddRedactionPersistenceService addRedactionPersistenceService;
@ -91,6 +91,7 @@ public class ManualRedactionUndoService {
undoLegalBasisChange(dossierId, fileId, manualRedactionWrappers);
undoResize(dossierId, fileId, manualRedactionWrappers);
reprocess(dossierId, fileId);
fileStatusPersistenceService.setLastManualChangeDate(fileId, OffsetDateTime.now());
}
@ -132,7 +133,7 @@ public class ManualRedactionUndoService {
for (var annotationId : annotationIds) {
resizeRedactionPersistenceService.softDelete(fileId, annotationId, now);
}
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
}
@ -163,7 +164,6 @@ public class ManualRedactionUndoService {
legalBasisChangePersistenceService.softDelete(fileId, annotationId, OffsetDateTime.now());
}
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
}
@ -205,7 +205,7 @@ public class ManualRedactionUndoService {
recategorizationPersistenceService.updateStatus(fileId, annotationId, AnnotationStatus.APPROVED, Collections.emptySet(), Collections.emptySet());
recategorizationPersistenceService.softDelete(fileId, annotationId, OffsetDateTime.now());
}
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
}
@ -238,7 +238,6 @@ public class ManualRedactionUndoService {
forceRedactionPersistenceService.softDelete(fileId, annotationId, now);
}
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
}
@ -276,7 +275,6 @@ public class ManualRedactionUndoService {
removeRedactionPersistenceService.softDelete(fileId, annotationId, OffsetDateTime.now());
}
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
}
@ -313,7 +311,6 @@ public class ManualRedactionUndoService {
addRedactionPersistenceService.softDelete(fileId, annotationId, OffsetDateTime.now());
}
analysisFlagsCalculationService.calculateFlags(dossierId, fileId);
}

View File

@ -16,6 +16,7 @@ import com.iqser.red.service.persistence.management.v1.processor.entity.dossier.
import com.iqser.red.service.persistence.management.v1.processor.entity.dossier.FileEntity;
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
import com.iqser.red.service.persistence.management.v1.processor.model.FileIdentifier;
import com.iqser.red.service.persistence.management.v1.processor.model.OCRStatusUpdateResponse;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository.FileAttributesRepository;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository.FileRepository;
@ -573,4 +574,20 @@ public class FileStatusPersistenceService {
return fileRepository.existsById(fileId);
}
public Set<FileIdentifier> getFileIdentifiersWhereAnalysisFlagCalculationIsRequired() {
return fileRepository.getFileIdentifiersWhereAnalysisFlagCalculationIsRequired()
.stream()
.map(tuple -> new FileIdentifier((String) tuple.get(1), (String) tuple.get(0)))
.collect(Collectors.toSet());
}
@Transactional
public void setLastFlagCalculation(String fileId, OffsetDateTime lastFlagCalculation) {
fileRepository.setLastFlagCalculationDate(fileId, lastFlagCalculation, OffsetDateTime.now());
}
}

View File

@ -16,6 +16,7 @@ import com.iqser.red.service.persistence.management.v1.processor.service.persist
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.ProcessingStatus;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
import jakarta.persistence.Tuple;
import jakarta.transaction.Transactional;
public interface FileRepository extends JpaRepository<FileEntity, String> {
@ -30,11 +31,7 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Modifying
@Query("update FileEntity e set e.hasRedactions = :hasRedactions ," +
" e.hasHints = :hasHints, e.hasSuggestions = :hasSuggestions," +
" e.hasImages = :hasImages, e.hasUpdates = :hasUpdates, e.hasAnnotationComments = :hasComments, " +
" e.lastUpdated = :lastUpdated " +
" where e.id =:fileId")
@Query("update FileEntity e set e.hasRedactions = :hasRedactions ," + " e.hasHints = :hasHints, e.hasSuggestions = :hasSuggestions," + " e.hasImages = :hasImages, e.hasUpdates = :hasUpdates, e.hasAnnotationComments = :hasComments, " + " e.lastUpdated = :lastUpdated " + " where e.id =:fileId")
void updateFlags(@Param("fileId") String fileId,
@Param("lastUpdated") OffsetDateTime lastUpdated,
@Param("hasRedactions") boolean hasRedactions,
@ -44,13 +41,9 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Param("hasComments") boolean hasComments,
@Param("hasUpdates") boolean hasUpdates);
@Modifying
@Query("update FileEntity f set f.numberOfPages = :numberOfPages, f.processingStatus = :processingStatus, " +
"f.dictionaryVersion = :dictionaryVersion, f.rulesVersion = :rulesVersion, f.componentRulesVersion = :componentRulesVersion, f.legalBasisVersion = :legalBasisVersion, " +
"f.analysisDuration = :analysisDuration, f.dossierDictionaryVersion = :dossierDictionaryVersion, " +
"f.analysisVersion = :analysisVersion, f.numberOfAnalyses = :analysisNumber, f.lastUpdated = :lastUpdated, " +
"f.lastProcessed = :lastProcessed, f.processingErrorCounter = :processingErrorCounter " +
"where f.id = :fileId")
@Query("update FileEntity f set f.numberOfPages = :numberOfPages, f.processingStatus = :processingStatus, " + "f.dictionaryVersion = :dictionaryVersion, f.rulesVersion = :rulesVersion, f.componentRulesVersion = :componentRulesVersion, f.legalBasisVersion = :legalBasisVersion, " + "f.analysisDuration = :analysisDuration, f.dossierDictionaryVersion = :dossierDictionaryVersion, " + "f.analysisVersion = :analysisVersion, f.numberOfAnalyses = :analysisNumber, f.lastUpdated = :lastUpdated, " + "f.lastProcessed = :lastProcessed, f.processingErrorCounter = :processingErrorCounter " + "where f.id = :fileId")
void updateProcessingStatus(@Param("fileId") String fileId,
@Param("numberOfPages") int numberOfPages,
@Param("processingStatus") ProcessingStatus processingStatus,
@ -66,33 +59,31 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Param("analysisNumber") int analysisNumber,
@Param("processingErrorCounter") int processingErrorCounter);
@Modifying
@Query("update FileEntity f set f.workflowStatus = :workflowStatus, f.lastUpdated = :lastUpdated, f.approvalDate = :approvalDate," +
" f.excludedFromAutomaticAnalysis = :excludedFromAutomaticAnalysis where f.id = :fileId")
@Query("update FileEntity f set f.workflowStatus = :workflowStatus, f.lastUpdated = :lastUpdated, f.approvalDate = :approvalDate," + " f.excludedFromAutomaticAnalysis = :excludedFromAutomaticAnalysis where f.id = :fileId")
void updateWorkflowStatus(@Param("fileId") String fileId,
@Param("workflowStatus") WorkflowStatus workflowStatus,
@Param("lastUpdated") OffsetDateTime lastUpdated,
@Param("approvalDate") OffsetDateTime approvalDate,
@Param("excludedFromAutomaticAnalysis") boolean excludedFromAutomaticAnalysis);
@Modifying(clearAutomatically = true, flushAutomatically = true)
@Query("update FileEntity f set f.workflowStatus = :workflowStatus, f.lastUpdated = :lastUpdated, f.approvalDate = :approvalDate " +
" where f.id = :fileId")
@Query("update FileEntity f set f.workflowStatus = :workflowStatus, f.lastUpdated = :lastUpdated, f.approvalDate = :approvalDate " + " where f.id = :fileId")
void updateWorkflowStatus(@Param("fileId") String fileId,
@Param("workflowStatus") WorkflowStatus workflowStatus,
@Param("lastUpdated") OffsetDateTime lastUpdated,
@Param("approvalDate") OffsetDateTime approvalDate);
@Modifying(clearAutomatically = true)
@Query("update FileEntity f set f.fileSize = :fileSize where f.id = :fileId")
void updateFileSize(@Param("fileId") String fileId, @Param("fileSize") long fileSize);
@Modifying(clearAutomatically = true)
@Query("update FileEntity f set f.processingStatus = :processingStatus, f.lastUpdated = :lastUpdated," +
" f.hasHighlights = :hasHighlights, f.processingErrorCounter = :processingErrorCounter " +
" where f.id = :fileId")
@Query("update FileEntity f set f.processingStatus = :processingStatus, f.lastUpdated = :lastUpdated," + " f.hasHighlights = :hasHighlights, f.processingErrorCounter = :processingErrorCounter " + " where f.id = :fileId")
void updateProcessingStatus(@Param("fileId") String fileId,
@Param("processingStatus") ProcessingStatus processingStatus,
@Param("lastUpdated") OffsetDateTime lastUpdated,
@ -101,8 +92,7 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Modifying(clearAutomatically = true)
@Query("update FileEntity f set f.processingStatus = :processingStatus, f.lastUpdated = :lastUpdated, f.processingErrorCounter = :processingErrorCounter " +
"where f.id = :fileId")
@Query("update FileEntity f set f.processingStatus = :processingStatus, f.lastUpdated = :lastUpdated, f.processingErrorCounter = :processingErrorCounter " + "where f.id = :fileId")
void updateProcessingStatus(@Param("fileId") String fileId,
@Param("processingStatus") ProcessingStatus processingStatus,
@Param("lastUpdated") OffsetDateTime lastUpdated,
@ -110,8 +100,7 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Modifying
@Query("update FileEntity f set f.errorCause = :cause, f.errorQueue = :queue, f.errorService = :service, f.errorTimestamp = :timestamp " +
"where f.id = :fileId")
@Query("update FileEntity f set f.errorCause = :cause, f.errorQueue = :queue, f.errorService = :service, f.errorTimestamp = :timestamp " + "where f.id = :fileId")
void updateStatusErrorInfo(@Param("fileId") String fileId,
@Param("cause") String cause,
@Param("queue") String queue,
@ -120,8 +109,7 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Modifying(clearAutomatically = true, flushAutomatically = true)
@Query("update FileEntity f set f.processingStatus = :processingStatus, f.lastUpdated = :lastUpdated, f.lastProcessed = :lastProcessed, f.processingErrorCounter = :processingErrorCounter " +
"where f.id = :fileId")
@Query("update FileEntity f set f.processingStatus = :processingStatus, f.lastUpdated = :lastUpdated, f.lastProcessed = :lastProcessed, f.processingErrorCounter = :processingErrorCounter " + "where f.id = :fileId")
void updateProcessingStatus(@Param("fileId") String fileId,
@Param("processingStatus") ProcessingStatus processingStatus,
@Param("lastUpdated") OffsetDateTime lastUpdated,
@ -130,8 +118,7 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Modifying(clearAutomatically = true)
@Query("update FileEntity f set f.processingStatus = :processingStatus, f.lastUpdated = :lastUpdated," +
"f.lastIndexed = :lastIndexed, f.processingErrorCounter = :processingErrorCounter where f.id = :fileId")
@Query("update FileEntity f set f.processingStatus = :processingStatus, f.lastUpdated = :lastUpdated," + "f.lastIndexed = :lastIndexed, f.processingErrorCounter = :processingErrorCounter where f.id = :fileId")
void setUpdateStatusIndexingSuccessful(@Param("fileId") String fileId,
@Param("processingStatus") ProcessingStatus processingStatus,
@Param("lastUpdated") OffsetDateTime lastUpdated,
@ -141,33 +128,26 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Modifying
@Query("update FileEntity f set f.lastUpdated = :lastUpdated, f.ocrStartTime = :ocrStartTime where f.id = :fileId")
void updateOCRStartTime(@Param("fileId") String fileId,
@Param("lastUpdated") OffsetDateTime lastUpdated,
@Param("ocrStartTime") OffsetDateTime ocrStartTime);
void updateOCRStartTime(@Param("fileId") String fileId, @Param("lastUpdated") OffsetDateTime lastUpdated, @Param("ocrStartTime") OffsetDateTime ocrStartTime);
@Modifying
@Query("update FileEntity f set f.fileManipulationDate = :fileManipulationDate where f.id = :fileId")
void updateFileModificationDate(@Param("fileId") String fileId,
@Param("fileManipulationDate") OffsetDateTime fileManipulationDate);
void updateFileModificationDate(@Param("fileId") String fileId, @Param("fileManipulationDate") OffsetDateTime fileManipulationDate);
@Modifying
@Query("update FileEntity f set f.hasHighlights = :hasHighlights where f.id = :fileId")
void updateHasHighlights(@Param("fileId") String fileId,
@Param("hasHighlights") boolean hasHighlights);
void updateHasHighlights(@Param("fileId") String fileId, @Param("hasHighlights") boolean hasHighlights);
@Modifying
@Query("update FileEntity f set f.lastUpdated = :lastUpdated, f.hasAnnotationComments = :hasAnnotationComments where f.id = :fileId")
void updateHasComments(@Param("fileId") String fileId,
@Param("lastUpdated") OffsetDateTime lastUpdated,
@Param("hasAnnotationComments") boolean hasAnnotationComments);
void updateHasComments(@Param("fileId") String fileId, @Param("lastUpdated") OffsetDateTime lastUpdated, @Param("hasAnnotationComments") boolean hasAnnotationComments);
@Modifying
@Query("update FileEntity f set f.processingStatus = :processingStatus, f.lastUpdated = :lastUpdated, " +
"f.deleted = :softDeletedTime where f.id = :fileId")
@Query("update FileEntity f set f.processingStatus = :processingStatus, f.lastUpdated = :lastUpdated, " + "f.deleted = :softDeletedTime where f.id = :fileId")
int setSoftDelete(@Param("fileId") String fileId,
@Param("processingStatus") ProcessingStatus processingStatus,
@Param("lastUpdated") OffsetDateTime lastUpdated,
@ -175,13 +155,7 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Modifying
@Query("update FileEntity f set f.processingStatus = :processingStatus, f.lastUpdated = :lastUpdated," +
"f.hardDeletedTime = :hardDeletedTime, " +
"f.deleted = case " +
" when f.deleted is null then :deleted " +
" when f.deleted is not null then f.deleted " +
"end " +
"where f.id = :fileId")
@Query("update FileEntity f set f.processingStatus = :processingStatus, f.lastUpdated = :lastUpdated," + "f.hardDeletedTime = :hardDeletedTime, " + "f.deleted = case " + " when f.deleted is null then :deleted " + " when f.deleted is not null then f.deleted " + "end " + "where f.id = :fileId")
int setHardDelete(@Param("fileId") String fileId,
@Param("processingStatus") ProcessingStatus processingStatus,
@Param("lastUpdated") OffsetDateTime lastUpdated,
@ -190,8 +164,7 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Modifying
@Query("update FileEntity f set f.assignee = :assignee, f.lastReviewer = :lastReviewer, f.lastApprover = :lastApprover, " +
"f.lastUpdated = :lastUpdated where f.id = :fileId")
@Query("update FileEntity f set f.assignee = :assignee, f.lastReviewer = :lastReviewer, f.lastApprover = :lastApprover, " + "f.lastUpdated = :lastUpdated where f.id = :fileId")
int setAssignee(@Param("fileId") String fileId,
@Param("assignee") String assignee,
@Param("lastReviewer") String lastReviewer,
@ -201,9 +174,7 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Modifying(clearAutomatically = true)
@Query("update FileEntity f set f.excluded = :excluded, f.lastUpdated = :lastUpdated where f.id = :fileId")
int toggleExclusion(@Param("fileId") String fileId,
@Param("excluded") boolean excluded,
@Param("lastUpdated") OffsetDateTime lastUpdated);
int toggleExclusion(@Param("fileId") String fileId, @Param("excluded") boolean excluded, @Param("lastUpdated") OffsetDateTime lastUpdated);
@Modifying(clearAutomatically = true)
@ -214,18 +185,7 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Modifying(clearAutomatically = true)
@Query("update FileEntity f set f.filename = :filename, f.uploader = :uploader, f.processingStatus = :processingStatus, " +
"f.workflowStatus = :workflowStatus, f.lastUploaded = :lastUploaded, f.lastUpdated = :lastUpdated, " +
"f.fileManipulationDate = :lastUploaded, " +
"f.ocrEndTime = null, f.ocrStartTime = null, f.numberOfPagesToOCR = null, f.numberOfOCRedPages = null, " +
"f.excluded = false, f.lastProcessed = null, f.lastReviewer = null, f.lastApprover = null, " +
"f.assignee = null, f.approvalDate = null, f.numberOfAnalyses = 0, f.lastManualChangeDate = null, " +
"f.redactionModificationDate = null, " +
"f.dictionaryVersion = 0, f.dossierDictionaryVersion = 0, f.rulesVersion = 0, f.hasImages = false, " +
"f.hasHints = false, f.hasRedactions = false, f.hasSuggestions = false, f.hasUpdates = false, " +
"f.deleted = null, f.hardDeletedTime = null, f.hasHighlights = false, f.excludedFromAutomaticAnalysis = false, " +
"f.processingErrorCounter = 0, f.errorCause = null, f.errorQueue = null, f.errorService = null, " +
"f.errorTimestamp = null where f.id = :fileId")
@Query("update FileEntity f set f.filename = :filename, f.uploader = :uploader, f.processingStatus = :processingStatus, " + "f.workflowStatus = :workflowStatus, f.lastUploaded = :lastUploaded, f.lastUpdated = :lastUpdated, " + "f.fileManipulationDate = :lastUploaded, " + "f.ocrEndTime = null, f.ocrStartTime = null, f.numberOfPagesToOCR = null, f.numberOfOCRedPages = null, " + "f.excluded = false, f.lastProcessed = null, f.lastReviewer = null, f.lastApprover = null, " + "f.assignee = null, f.approvalDate = null, f.numberOfAnalyses = 0, f.lastManualChangeDate = null, " + "f.redactionModificationDate = null, " + "f.dictionaryVersion = 0, f.dossierDictionaryVersion = 0, f.rulesVersion = 0, f.hasImages = false, " + "f.hasHints = false, f.hasRedactions = false, f.hasSuggestions = false, f.hasUpdates = false, " + "f.deleted = null, f.hardDeletedTime = null, f.hasHighlights = false, f.excludedFromAutomaticAnalysis = false, " + "f.processingErrorCounter = 0, f.errorCause = null, f.errorQueue = null, f.errorService = null, " + "f.errorTimestamp = null where f.id = :fileId")
int overwriteFile(@Param("fileId") String fileId,
@Param("filename") String filename,
@Param("uploader") String uploader,
@ -236,16 +196,7 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Modifying(clearAutomatically = true)
@Query("update FileEntity f set f.filename = :filename, f.uploader = :uploader, f.processingStatus = :processingStatus, " +
"f.lastUploaded = :lastUploaded, f.lastUpdated = :lastUpdated, f.fileManipulationDate = :lastUploaded, " +
"f.lastProcessed = null," +
"f.approvalDate = null, f.numberOfAnalyses = 0, f.lastManualChangeDate = null, f.redactionModificationDate = null, " +
"f.dictionaryVersion = 0, f.dossierDictionaryVersion = 0, f.rulesVersion = 0, f.hasImages = false, " +
"f.hasHints = false, f.hasRedactions = false, f.hasSuggestions = false, f.hasUpdates = false, " +
"f.deleted = null, f.hardDeletedTime = null, f.hasHighlights = false, f.processingErrorCounter = 0, " +
"f.ocrStartTime = null, f.ocrEndTime = null, f.numberOfPagesToOCR = null, f.numberOfOCRedPages = null, " +
"f.errorCause = null, f.errorQueue = null, f.errorService = null, f.errorTimestamp = null " +
"where f.id = :fileId")
@Query("update FileEntity f set f.filename = :filename, f.uploader = :uploader, f.processingStatus = :processingStatus, " + "f.lastUploaded = :lastUploaded, f.lastUpdated = :lastUpdated, f.fileManipulationDate = :lastUploaded, " + "f.lastProcessed = null," + "f.approvalDate = null, f.numberOfAnalyses = 0, f.lastManualChangeDate = null, f.redactionModificationDate = null, " + "f.dictionaryVersion = 0, f.dossierDictionaryVersion = 0, f.rulesVersion = 0, f.hasImages = false, " + "f.hasHints = false, f.hasRedactions = false, f.hasSuggestions = false, f.hasUpdates = false, " + "f.deleted = null, f.hardDeletedTime = null, f.hasHighlights = false, f.processingErrorCounter = 0, " + "f.ocrStartTime = null, f.ocrEndTime = null, f.numberOfPagesToOCR = null, f.numberOfOCRedPages = null, " + "f.errorCause = null, f.errorQueue = null, f.errorService = null, f.errorTimestamp = null " + "where f.id = :fileId")
int overwriteFileAndKeepManualRedactions(@Param("fileId") String fileId,
@Param("filename") String filename,
@Param("uploader") String uploader,
@ -254,17 +205,11 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Param("lastUpdated") OffsetDateTime lastUpdated);
@Query("select count(f) from FileEntity f inner join DossierEntity d on d.id = f.dossierId " +
"where d.dossierTemplateId = :dossierTemplateId" +
" and ((f.deleted is not null and f.hardDeletedTime is null) or " +
" (d.softDeletedTime is not null and d.hardDeletedTime is null)) and d.archivedTime is null")
@Query("select count(f) from FileEntity f inner join DossierEntity d on d.id = f.dossierId " + "where d.dossierTemplateId = :dossierTemplateId" + " and ((f.deleted is not null and f.hardDeletedTime is null) or " + " (d.softDeletedTime is not null and d.hardDeletedTime is null)) and d.archivedTime is null")
int countSoftDeletedFilesPerDossierTemplateId(@Param("dossierTemplateId") String dossierTemplateId);
@Query("select count(f) from FileEntity f inner join DossierEntity d on d.id = f.dossierId " +
"where d.id = :dossierId" +
" and ((f.deleted is not null and f.hardDeletedTime is null) or " +
" (d.softDeletedTime is not null and d.hardDeletedTime is null))")
@Query("select count(f) from FileEntity f inner join DossierEntity d on d.id = f.dossierId " + "where d.id = :dossierId" + " and ((f.deleted is not null and f.hardDeletedTime is null) or " + " (d.softDeletedTime is not null and d.hardDeletedTime is null))")
int countSoftDeletedFilesPerDossierId(@Param("dossierId") String dossierId);
@ -286,63 +231,45 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Param("lastUpdated") OffsetDateTime lastUpdated);
@Query("select f from FileEntity f join DossierEntity d on d.id = f.dossierId " +
"where f.excluded = false and f.workflowStatus <> 'APPROVED' and f.excludedFromAutomaticAnalysis = false " +
" and ( f.processingStatus = 'PROCESSED' or f.processingStatus = 'ERROR' )" +
" and d.softDeletedTime is null and d.hardDeletedTime is null and d.archivedTime is null " +
" and f.deleted is null and f.hardDeletedTime is null and f.processingErrorCounter <= :maxRetries")
@Modifying(clearAutomatically = true)
@Query("update FileEntity f set f.lastFlagCalculation = :lastFlagCalculation, f.lastUpdated = :lastUpdated where f.id = :fileId")
void setLastFlagCalculationDate(@Param("fileId") String fileId,
@Param("lastFlagCalculation") OffsetDateTime lastFlagCalculation,
@Param("lastUpdated") OffsetDateTime lastUpdated);
@Query("select f from FileEntity f join DossierEntity d on d.id = f.dossierId " + "where f.excluded = false and f.workflowStatus <> 'APPROVED' and f.excludedFromAutomaticAnalysis = false " + " and ( f.processingStatus = 'PROCESSED' or f.processingStatus = 'ERROR' )" + " and d.softDeletedTime is null and d.hardDeletedTime is null and d.archivedTime is null " + " and f.deleted is null and f.hardDeletedTime is null and f.processingErrorCounter <= :maxRetries")
List<FileEntity> getAllRelevantStatusesForReanalysisScheduler(@Param("maxRetries") int maxRetries);
@Modifying
@Query("update FileEntity f set f.lastFileAttributeChange = :date, f.lastUpdated = :date where f.id = :fileId")
void updateLastAttributeChangeDate(@Param("fileId") String fileId,
@Param("date") OffsetDateTime date);
void updateLastAttributeChangeDate(@Param("fileId") String fileId, @Param("date") OffsetDateTime date);
@Query("select f from FileEntity f where f.deleted is not null and f.hardDeletedTime is null and f.dossierId in :dossierIds")
List<FileEntity> getSoftDeletedFiles(@Param("dossierIds") List<String> dossierIds);
@Query("select f.processingStatus as processingStatus, count(f) as count from FileEntity f " +
"inner join DossierEntity d on d.id = f.dossierId " +
"where f.deleted is null and f.hardDeletedTime is null " +
"and d.softDeletedTime is null and d.hardDeletedTime is null and d.archivedTime is null " +
"and d.dossierTemplateId = :dossierTemplateId " +
"group by f.processingStatus ")
@Query("select f.processingStatus as processingStatus, count(f) as count from FileEntity f " + "inner join DossierEntity d on d.id = f.dossierId " + "where f.deleted is null and f.hardDeletedTime is null " + "and d.softDeletedTime is null and d.hardDeletedTime is null and d.archivedTime is null " + "and d.dossierTemplateId = :dossierTemplateId " + "group by f.processingStatus ")
List<FileProcessingStatusProjection> countFilesByProcessingStatus(@Param("dossierTemplateId") String dossierTemplateId);
@Query("select f.workflowStatus as workflowStatus, count(f) as count from FileEntity f " +
"inner join DossierEntity d on d.id = f.dossierId " +
"where f.deleted is null and f.hardDeletedTime is null " +
"and d.softDeletedTime is null and d.hardDeletedTime is null and d.archivedTime is null " +
"and d.dossierTemplateId = :dossierTemplateId " +
"group by f.workflowStatus ")
@Query("select f.workflowStatus as workflowStatus, count(f) as count from FileEntity f " + "inner join DossierEntity d on d.id = f.dossierId " + "where f.deleted is null and f.hardDeletedTime is null " + "and d.softDeletedTime is null and d.hardDeletedTime is null and d.archivedTime is null " + "and d.dossierTemplateId = :dossierTemplateId " + "group by f.workflowStatus ")
List<FileWorkflowStatusProjection> countFilesByWorkflowStatus(@Param("dossierTemplateId") String dossierTemplateId);
@Query(value = "select COALESCE(sum(number_of_pages),0) as numberOfAnalyzedPages, " +
"COALESCE(sum(json_array_length(cast(excluded_pages AS json))),0) as numberOfExcludedPages " +
"from file join dossier on file.dossier_id = dossier.id " +
"where file.deleted is null and file.hard_deleted_time is null " +
"and dossier.archived_time is null and dossier.soft_deleted_time is null and dossier.hard_deleted_time is null" +
" and dossier.dossier_template_id = :dossierTemplateId", nativeQuery = true)
@Query(value = "select COALESCE(sum(number_of_pages),0) as numberOfAnalyzedPages, " + "COALESCE(sum(json_array_length(cast(excluded_pages AS json))),0) as numberOfExcludedPages " + "from file join dossier on file.dossier_id = dossier.id " + "where file.deleted is null and file.hard_deleted_time is null " + "and dossier.archived_time is null and dossier.soft_deleted_time is null and dossier.hard_deleted_time is null" + " and dossier.dossier_template_id = :dossierTemplateId", nativeQuery = true)
FilePageCountsProjection countPages(@Param("dossierTemplateId") String dossierTemplateId);
@Query("select count(f) from FileEntity f inner join DossierEntity d on d.id = f.dossierId where " +
"f.hardDeletedTime is null and f.deleted is null and " +
"d.dossierTemplateId = :dossierTemplateId and " +
"d.softDeletedTime is null and d.hardDeletedTime is null and d.archivedTime is null")
@Query("select count(f) from FileEntity f inner join DossierEntity d on d.id = f.dossierId where " + "f.hardDeletedTime is null and f.deleted is null and " + "d.dossierTemplateId = :dossierTemplateId and " + "d.softDeletedTime is null and d.hardDeletedTime is null and d.archivedTime is null")
int countActiveFiles(@Param("dossierTemplateId") String dossierTemplateId);
@Transactional
@Modifying(clearAutomatically = true)
@Query(value = "update FileEntity f set f.numberOfOCRedPages = :numberOfOCRedPages, " +
"f.numberOfPagesToOCR = :numberOfPagesToOCR, f.ocrEndTime = :ocrEndTime, " +
"f.lastUpdated = :lastUpdated where f.id = :fileId")
@Query(value = "update FileEntity f set f.numberOfOCRedPages = :numberOfOCRedPages, " + "f.numberOfPagesToOCR = :numberOfPagesToOCR, f.ocrEndTime = :ocrEndTime, " + "f.lastUpdated = :lastUpdated where f.id = :fileId")
void updateOCRStatus(@Param("fileId") String fileId,
@Param("numberOfPagesToOCR") int numberOfPagesToOCR,
@Param("numberOfOCRedPages") int numberOfOCRedPages,
@ -353,17 +280,28 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Transactional
@Modifying(clearAutomatically = true)
@Query(value = "update FileEntity f set f.lastLayoutProcessed = :offsetDateTime where f.id = :fileId")
void updateLayoutProcessedTime(@Param("fileId") String fileId,
@Param("offsetDateTime") OffsetDateTime offsetDateTime);
void updateLayoutProcessedTime(@Param("fileId") String fileId, @Param("offsetDateTime") OffsetDateTime offsetDateTime);
@Query("select f.filename from FileEntity f where f.id = :fileId")
Optional<String> getFilenameById(@Param("fileId") String fileId);
@Modifying(clearAutomatically = true)
@Query("update FileEntity f set f.ocrStartTime = NULL, f.ocrEndTime = NULL, f.numberOfPagesToOCR = NULL, f.numberOfOCRedPages = NULL where f.id = :fileId")
void resetOcrStartAndEndDate(@Param("fileId") String fileId);
/**
* Analysis flags must be checked once after the initial analysis,
* or whenever a manual change/analysis has happened since the last flag calculation,
* or a page has been viewed by the assignee since the last flag calculation.
*
* @return a List of Tuples of fileIds and dossierIds, where a flag calculation is necessary
*/
@Query("select distinct f.id, f.dossierId from FileEntity f, ViewedPageEntity v where (f.lastFlagCalculation is NULL and f.lastProcessed is not NULL) or f.lastManualChangeDate > f.lastFlagCalculation or f.lastProcessed > f.lastFlagCalculation or (v.file.id = f.id and v.id.userId = f.assignee and f.lastFlagCalculation < v.viewedTime)")
List<Tuple> getFileIdentifiersWhereAnalysisFlagCalculationIsRequired();
}

View File

@ -0,0 +1,38 @@
package com.iqser.red.service.persistence.management.v1.processor.service.queue;
import org.springframework.amqp.core.Message;
import org.springframework.amqp.rabbit.annotation.RabbitHandler;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration;
import com.iqser.red.service.persistence.management.v1.processor.model.AnalysisFlagCalculationMessage;
import com.iqser.red.service.persistence.management.v1.processor.service.AnalysisFlagsCalculationService;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.experimental.FieldDefaults;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class AnalysisFlagsCalculationMessageReceiver {
AnalysisFlagsCalculationService analysisFlagsCalculationService;
ObjectMapper mapper;
@SneakyThrows
@RabbitHandler
@RabbitListener(queues = MessagingConfiguration.ANALYSIS_FLAG_CALCULATION_QUEUE)
public void receiveAnalysisFlagCalculationMessage(Message message) {
var analysisFlagCalculationMessage = mapper.readValue(message.getBody(), AnalysisFlagCalculationMessage.class);
log.info("Calculating Flags for fileId {} and dossierId {}", analysisFlagCalculationMessage.getFileId(), analysisFlagCalculationMessage.getDossierId());
analysisFlagsCalculationService.calculateFlags(analysisFlagCalculationMessage.getDossierId(), analysisFlagCalculationMessage.getFileId());
}
}

View File

@ -173,3 +173,5 @@ databaseChangeLog:
file: db/changelog/tenant/115-add-saas-migration-status-table.yaml
- include:
file: db/changelog/tenant/116-fix-null-fields-in-manual-redaction-table.yaml
- include:
file: db/changelog/tenant/117-add-last-flag-calculation-date-to-file.yaml

View File

@ -0,0 +1,12 @@
databaseChangeLog:
- changeSet:
id: add_flag_calculation_date
author: kilian
changes:
- addColumn:
columns:
- column:
name: last_flag_calculation
type: TIMESTAMP WITHOUT TIME ZONE
defaultValueComputed: CURRENT_TIMESTAMP
tableName: file