RED-9850: duplicate key exception in mapper

This commit is contained in:
Kilian Schüttler 2024-09-06 14:23:55 +02:00
parent c2781e7ff4
commit 9859d3d797
6 changed files with 130 additions and 61 deletions

View File

@ -8,6 +8,7 @@ import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.ViewedPageEntity;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
import com.iqser.red.service.persistence.management.v1.processor.model.websocket.FileEventType;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.FileStatusPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.ViewedPagesPersistenceService;
@ -47,12 +48,20 @@ public class AnalysisFlagsCalculationService {
long startTime = System.currentTimeMillis();
var file = fileStatusPersistenceService.getStatus(fileId);
var entityLog = entityLogService.getEntityLog(dossierId, fileId, true);
FlagCalculationData data;
try {
data = getFlagCalculationData(dossierId, fileId);
} catch (NotFoundException e) {
log.warn("EntityLog does not exist, skipping flag calculation.");
fileStatusPersistenceService.setLastFlagCalculation(fileId, OffsetDateTime.now());
fileStatusPersistenceService.updateFlags(fileId, false, false, false, false, false, false);
return;
}
var viewedPagesForCurrentAssignee = viewedPagesPersistenceService.findViewedPages(fileId, file.getAssignee());
fileStatusPersistenceService.setLastFlagCalculation(fileId, OffsetDateTime.now());
Map<Integer, OffsetDateTime> viewedPages = viewedPagesForCurrentAssignee.stream()
Map<Integer, OffsetDateTime> viewedPages = data.viewedPagesForCurrentAssignee()
.stream()
.collect(Collectors.toMap(ViewedPageEntity::getPage, ViewedPageEntity::getViewedTime));
boolean hasRedactions = false;
@ -65,7 +74,7 @@ public class AnalysisFlagsCalculationService {
OffsetDateTime lastRedactionModification = null;
OffsetDateTime lastManualChangeDate = null;
for (EntityLogEntry entry : entityLog.getEntityLogEntry()) {
for (EntityLogEntry entry : data.entityLog().getEntityLogEntry()) {
String type = getType(entry.getType());
@ -112,7 +121,7 @@ public class AnalysisFlagsCalculationService {
OffsetDateTime viewedPage = entry.getPositions().isEmpty() ? null : viewedPages.get(entry.getPositions()
.get(0).getPageNumber());
if (file.getWorkflowStatus() != WorkflowStatus.APPROVED && lastChange != null && lastChange.getDateTime() != null && viewedPage != null && //
if (data.file().getWorkflowStatus() != WorkflowStatus.APPROVED && lastChange != null && lastChange.getDateTime() != null && viewedPage != null && //
viewedPage.isBefore(lastChange.getDateTime())) {
hasUpdates = true;
}
@ -123,30 +132,50 @@ public class AnalysisFlagsCalculationService {
}
log.info("Flag Calculations for file: {} took: {}ms", fileId, System.currentTimeMillis() - startTime);
if (file.isHasRedactions() == hasRedactions
&& file.isHasHints() == hasHints
&& file.isHasImages() == hasImages
&& file.isHasSuggestions() == hasSuggestions
&& file.isHasAnnotationComments() == hasComments
&& file.isHasUpdates() == hasUpdates) {
log.debug("Flag Calculations for file: {} took: {}ms", fileId, System.currentTimeMillis() - startTime);
if (data.file().isHasRedactions() == hasRedactions
&& data.file().isHasHints() == hasHints
&& data.file().isHasImages() == hasImages
&& data.file().isHasSuggestions() == hasSuggestions
&& data.file().isHasAnnotationComments() == hasComments
&& data.file().isHasUpdates() == hasUpdates) {
log.info("Nothing Changed for file: {}", fileId);
} else {
fileStatusPersistenceService.updateFlags(fileId, hasRedactions, hasHints, hasImages, hasSuggestions, hasComments, hasUpdates);
}
if (lastRedactionModification != null && (file.getRedactionModificationDate() == null || file.getRedactionModificationDate().isBefore(lastRedactionModification))) {
if (lastRedactionModification != null && (data.file().getRedactionModificationDate() == null || data.file()
.getRedactionModificationDate()
.isBefore(lastRedactionModification))) {
fileStatusPersistenceService.setLastRedactionModificationDateForFile(fileId, lastRedactionModification);
}
if (lastManualChangeDate != null && (file.getLastManualChangeDate() == null || file.getLastManualChangeDate().isBefore(lastManualChangeDate))) {
if (lastManualChangeDate != null && (data.file().getLastManualChangeDate() == null || data.file().getLastManualChangeDate().isBefore(lastManualChangeDate))) {
fileStatusPersistenceService.setLastManualChangeDate(fileId, lastManualChangeDate);
}
fileStatusPersistenceService.setLastFlagCalculation(fileId, OffsetDateTime.now());
websocketService.sendFileEvent(dossierId, fileId, FileEventType.UPDATE);
}
private FlagCalculationData getFlagCalculationData(String dossierId, String fileId) {
var file = fileStatusPersistenceService.getStatus(fileId);
var entityLog = entityLogService.getEntityLog(dossierId, fileId, true);
var viewedPagesForCurrentAssignee = viewedPagesPersistenceService.findViewedPages(fileId, file.getAssignee());
return new FlagCalculationData(file, entityLog, viewedPagesForCurrentAssignee);
}
private record FlagCalculationData(
com.iqser.red.service.persistence.management.v1.processor.entity.dossier.FileEntity file,
com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.EntityLog entityLog,
java.util.List<ViewedPageEntity> viewedPagesForCurrentAssignee
) {
}
private void addIdsToTrace(String dossierId, String fileId) {
if (observationRegistry.getCurrentObservation() != null) {

View File

@ -74,43 +74,41 @@ public class EntityLogMergeService {
Map<String, List<BaseAnnotation>> allManualChanges = groupManualChanges(unprocessedManualRedactions);
List<EntityLogEntry> entityLogEntries = new LinkedList<>(entityLog.getEntityLogEntry());
List<EntityLogEntry> mergedEntityLogEntries = merge(unprocessedManualRedactions, entityLog.getEntityLogEntry(), dossier, analysisNumber, allManualChanges);
merge(unprocessedManualRedactions, entityLog.getEntityLogEntry(), dossier, analysisNumber, entityLogEntries, allManualChanges);
entityLog.setEntityLogEntry(entityLogEntries);
entityLog.setEntityLogEntry(mergedEntityLogEntries);
return entityLog;
}
@Observed(name = "EntityLogMergeService", contextualName = "merge-entity-log-entries")
public List<EntityLogEntry> mergeEntityLogEntries(ManualRedactions unprocessedManualRedactions, List<String> entityLogEntryIds, DossierEntity dossier, String fileId) {
public List<EntityLogEntry> mergeEntityLogEntries(ManualRedactions unprocessedManualRedactions, List<EntityLogEntry> entityLogEntries, DossierEntity dossier, String fileId) {
final int analysisNumber = entityLogMongoService.findLatestAnalysisNumber(dossier.getId(), fileId)
.orElseThrow(() -> new BadRequestException("Can't load analysis number"));
Map<String, List<BaseAnnotation>> allManualChanges = groupManualChanges(unprocessedManualRedactions);
List<EntityLogEntry> entityLogEntries = entityLogMongoService.findEntityLogEntriesByIds(dossier.getId(), fileId, entityLogEntryIds);
merge(unprocessedManualRedactions, entityLogEntries, dossier, analysisNumber, entityLogEntries, allManualChanges);
return entityLogEntries;
return merge(unprocessedManualRedactions, entityLogEntries, dossier, analysisNumber, allManualChanges);
}
private void merge(ManualRedactions unprocessedManualRedactions,
List<EntityLogEntry> entityLog,
DossierEntity dossier,
int analysisNumber,
List<EntityLogEntry> entityLogEntries,
Map<String, List<BaseAnnotation>> allManualChanges) {
private List<EntityLogEntry> merge(ManualRedactions unprocessedManualRedactions,
List<EntityLogEntry> entityLogEntries,
DossierEntity dossier,
int analysisNumber,
Map<String, List<BaseAnnotation>> allManualChanges) {
Map<String, EntityLogEntry> addedLocalManualEntries = buildUnprocessedLocalManualRedactions(unprocessedManualRedactions, entityLog, dossier, analysisNumber)//
List<EntityLogEntry> mergedEntityLogEntries = new LinkedList<>(entityLogEntries);
Map<String, EntityLogEntry> addedLocalManualEntries = buildUnprocessedLocalManualRedactions(unprocessedManualRedactions, entityLogEntries, dossier, analysisNumber)//
.collect(Collectors.toMap(EntityLogEntry::getId, Function.identity()));
entityLogEntries.addAll(addedLocalManualEntries.values());
buildPendingDictionaryChanges(unprocessedManualRedactions).forEach(entityLogEntries::add);
processEntityLogEntries(dossier, entityLogEntries, addedLocalManualEntries, analysisNumber, allManualChanges);
mergedEntityLogEntries.addAll(addedLocalManualEntries.values());
buildPendingDictionaryChanges(unprocessedManualRedactions).forEach(mergedEntityLogEntries::add);
processEntityLogEntries(dossier, mergedEntityLogEntries, addedLocalManualEntries, analysisNumber, allManualChanges);
return mergedEntityLogEntries;
}
@ -342,7 +340,7 @@ public class EntityLogMergeService {
entityLogEntry.setState(EntryState.IGNORED);
//special case, only for local add, other local changes and then remove
if (!entityLogEntry.getEngines().isEmpty() && Set.of(Engine.MANUAL).containsAll(entityLogEntry.getEngines())) {
if (entityLogEntry.getEngines().equals(Set.of(Engine.MANUAL))) {
entityLogEntry.setState(EntryState.REMOVED);
}
entityLogEntry.getEngines().add(Engine.MANUAL);

View File

@ -37,7 +37,7 @@ public class EntityLogMongoWrapperService {
if (includeUnprocessed) {
DossierEntity dossier = dossierService.getDossierById(dossierId);
ManualRedactions unprocessedManualRedactions = manualRedactionProviderService.getManualRedactions(fileId, ManualChangesQueryOptions.unprocessedOnly(), ids);
entityLogEntries = entityLogMergeService.mergeEntityLogEntries(unprocessedManualRedactions, entityLogEntries.stream().map(EntityLogEntry::getId).toList(), dossier, fileId);
entityLogEntries = entityLogMergeService.mergeEntityLogEntries(unprocessedManualRedactions, entityLogEntries, dossier, fileId);
}
return entityLogEntries;
}

View File

@ -8,19 +8,17 @@ import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.ManualRecategorizationEntity;
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
import com.iqser.red.service.persistence.management.v1.processor.service.EntityLogMongoWrapperService;
import com.iqser.red.service.persistence.service.v1.api.shared.model.RequestEntryPair;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.Engine;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.EntityLog;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.EntityLogEntry;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.EntryState;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.EntryType;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.AddRedactionRequest;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.ForceRedactionRequest;
@ -95,15 +93,24 @@ public class ManualRedactionMapper {
.toList(),
includeUnprocessed);
Map<String, EntityLogEntry> iddToEntityLogEntryMap = entityLogEntries.stream()
.collect(Collectors.toMap(EntityLogEntry::getId, Function.identity()));
Map<String, List<EntityLogEntry>> idsToEntityLogEntryMap = entityLogEntries.stream()
.collect(Collectors.groupingBy(EntityLogEntry::getId));
List<RequestEntryPair<RemoveRedactionRequest>> requests = new ArrayList<>();
for (var removeRedactionRequest : removeRedactionRequests) {
EntityLogEntry entityLogEntry = iddToEntityLogEntryMap.get(removeRedactionRequest.getAnnotationId());
if (!idsToEntityLogEntryMap.containsKey(removeRedactionRequest.getAnnotationId())) {
continue;
}
List<EntityLogEntry> entityLogEntriesWithId = idsToEntityLogEntryMap.get(removeRedactionRequest.getAnnotationId());
for (EntityLogEntry entityLogEntry : entityLogEntriesWithId) {
if (invalidDictionaryRequest(removeRedactionRequest, entityLogEntry) || invalidLocalRequest(removeRedactionRequest, entityLogEntry)) {
continue;
}
if (entityLogEntry != null) {
var request = RemoveRedactionRequest.builder()
.annotationId(removeRedactionRequest.getAnnotationId())
.user(KeycloakSecurity.getUserId())
@ -127,6 +134,24 @@ public class ManualRedactionMapper {
}
private static boolean invalidLocalRequest(RemoveRedactionRequestModel removeRedactionRequest, EntityLogEntry entityLogEntry) {
return !isDictionaryRequest(removeRedactionRequest) && entityLogEntry.getState().equals(EntryState.REMOVED);
}
private static boolean isDictionaryRequest(RemoveRedactionRequestModel removeRedactionRequest) {
return removeRedactionRequest.isRemoveFromDictionary() || removeRedactionRequest.isRemoveFromAllDossiers();
}
private static boolean invalidDictionaryRequest(RemoveRedactionRequestModel removeRedactionRequest, EntityLogEntry entityLogEntry) {
return isDictionaryRequest(removeRedactionRequest) && entityLogEntry.getState().equals(EntryState.PENDING);
}
public List<RequestEntryPair<ForceRedactionRequest>> toForceRedactionRequestList(String dossierId,
String fileId,
Set<ForceRedactionRequestModel> forceRedactionRequests,
@ -199,16 +224,25 @@ public class ManualRedactionMapper {
.toList(),
includeUnprocessed);
Map<String, EntityLogEntry> annotationIdToEntityLogEntryMap = entityLogEntries.stream()
.collect(Collectors.toMap(EntityLogEntry::getId, Function.identity()));
Map<String, List<EntityLogEntry>> annotationIdToEntityLogEntryMap = entityLogEntries.stream()
.collect(Collectors.groupingBy(EntityLogEntry::getId));
List<RequestEntryPair<RecategorizationRequest>> requests = new ArrayList<>();
for (RecategorizationRequestModel recategorizationRequest : recategorizationRequests) {
EntityLogEntry entityLogEntry = annotationIdToEntityLogEntryMap.get(recategorizationRequest.getAnnotationId());
List<EntityLogEntry> entityLogEntriesById = annotationIdToEntityLogEntryMap.get(recategorizationRequest.getAnnotationId());
if (entityLogEntriesById == null) {
continue;
}
for (EntityLogEntry entityLogEntry : entityLogEntriesById) {
if (invalidDictionaryRequest(recategorizationRequest, entityLogEntry) || invalidLocalRequest(recategorizationRequest, entityLogEntry)) {
continue;
}
if (entityLogEntry != null) {
String changedValue;
String changedTypeId;
@ -259,6 +293,24 @@ public class ManualRedactionMapper {
}
private static boolean invalidLocalRequest(RecategorizationRequestModel removeRedactionRequest, EntityLogEntry entityLogEntry) {
return !isDictionaryRequest(removeRedactionRequest) && entityLogEntry.getState().equals(EntryState.REMOVED);
}
private static boolean isDictionaryRequest(RecategorizationRequestModel removeRedactionRequest) {
return removeRedactionRequest.isAddToDictionary() || removeRedactionRequest.isAddToAllDossiers();
}
private static boolean invalidDictionaryRequest(RecategorizationRequestModel removeRedactionRequest, EntityLogEntry entityLogEntry) {
return isDictionaryRequest(removeRedactionRequest) && entityLogEntry.getState().equals(EntryState.PENDING);
}
private void checkSectionLength(String changedSection) {
if (changedSection == null) {
@ -341,16 +393,6 @@ public class ManualRedactionMapper {
}
private static EntityLogEntry getEntityLogEntry(EntityLog entityLog, String annotationId) {
return entityLog.getEntityLogEntry()
.stream()
.filter(entry -> entry.getId().equals(annotationId))
.findFirst()
.orElseThrow(() -> new NotFoundException("Annotation does not exist in entity log."));
}
public static DictionaryEntryType getDictionaryEntryType(EntityLogEntry entityLogEntry) {
if (entityLogEntry.getEntryType().equals(EntryType.FALSE_RECOMMENDATION)) {

View File

@ -382,7 +382,7 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
select distinct f.id, f.dossierId
from FileEntity f
left join ViewedPageEntity v on f.id = v.file.id and v.id.userId = f.assignee
where f.deleted is NULL and f.hardDeletedTime is NULL
where f.deleted is NULL and f.hardDeletedTime is NULL and f.processingStatus = 'PROCESSED'
and ((f.lastFlagCalculation is NULL and f.lastProcessed is not NULL) or f.lastManualChangeDate > f.lastFlagCalculation or f.lastProcessed > f.lastFlagCalculation or f.lastFlagCalculation < v.viewedTime)
""")
List<Tuple> getFileIdentifiersWhereAnalysisFlagCalculationIsRequired();

View File

@ -32,7 +32,7 @@ public class AnalysisFlagsCalculationMessageReceiver {
public void receiveAnalysisFlagCalculationMessage(Message message) {
var analysisFlagCalculationMessage = mapper.readValue(message.getBody(), AnalysisFlagCalculationMessage.class);
log.info("Calculating Flags for fileId {} and dossierId {}", analysisFlagCalculationMessage.getFileId(), analysisFlagCalculationMessage.getDossierId());
log.debug("Calculating Flags for fileId {} and dossierId {}", analysisFlagCalculationMessage.getFileId(), analysisFlagCalculationMessage.getDossierId());
analysisFlagsCalculationService.calculateFlags(analysisFlagCalculationMessage.getDossierId(), analysisFlagCalculationMessage.getFileId());
}