RED-7384: various fixes for migration

This commit is contained in:
Kilian Schüttler 2024-02-13 09:41:03 +01:00
parent 94a75aa100
commit 5f598ff70c
22 changed files with 5212 additions and 10153 deletions

View File

@ -12,7 +12,6 @@ import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.AnnotationStatus;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.ManualRedactions;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.entitymapped.BaseAnnotation;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.entitymapped.IdRemoval;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.entitymapped.ManualForceRedaction;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.entitymapped.ManualLegalBasisChange;
@ -43,9 +42,9 @@ public class LegacyRedactionLogMergeService {
private final DictionaryService dictionaryService;
public RedactionLog mergeManualChanges(RedactionLog redactionLog, ManualRedactions manualRedactions, String dossierTemplateId) {
public RedactionLog addManualAddEntriesAndRemoveSkippedImported(RedactionLog redactionLog, ManualRedactions manualRedactions, String dossierTemplateId) {
var skippedImportedRedactions = new HashSet<>();
Set<String> skippedImportedRedactions = new HashSet<>();
log.info("Merging Redaction log with manual redactions");
if (manualRedactions != null) {
@ -57,13 +56,14 @@ public class LegacyRedactionLogMergeService {
for (RedactionLogEntry entry : redactionLog.getRedactionLogEntry()) {
processRedactionLogEntry(manualRedactionWrappers.stream()
.filter(ManualRedactionWrapper::isApproved)
.filter(mr -> entry.getId().equals(mr.getId()))
.collect(Collectors.toList()), entry, dossierTemplateId);
if (entry.isImported() && !entry.isRedacted()) {
skippedImportedRedactions.add(entry.getId());
if (entry.isImported()) {
processRedactionLogEntry(manualRedactionWrappers.stream()
.filter(ManualRedactionWrapper::isApproved)
.filter(mr -> entry.getId().equals(mr.getId()))
.collect(Collectors.toList()), entry, dossierTemplateId);
if (!entry.isRedacted()) {
skippedImportedRedactions.add(entry.getId());
}
}
}
@ -97,35 +97,40 @@ public class LegacyRedactionLogMergeService {
List<ManualRedactionWrapper> manualRedactionWrappers = new ArrayList<>();
manualRedactions.getRecategorizations().forEach(item -> {
if (item.getSoftDeletedTime() == null) {
manualRedactionWrappers.add(new ManualRedactionWrapper(item.getAnnotationId(), item.getRequestDate(), item, item.isApproved()));
}
});
manualRedactions.getRecategorizations()
.forEach(item -> {
if (item.getSoftDeletedTime() == null) {
manualRedactionWrappers.add(new ManualRedactionWrapper(item.getAnnotationId(), item.getRequestDate(), item, item.isApproved()));
}
});
manualRedactions.getIdsToRemove().forEach(item -> {
if (item.getSoftDeletedTime() == null) {
manualRedactionWrappers.add(new ManualRedactionWrapper(item.getAnnotationId(), item.getRequestDate(), item, item.isApproved()));
}
});
manualRedactions.getIdsToRemove()
.forEach(item -> {
if (item.getSoftDeletedTime() == null) {
manualRedactionWrappers.add(new ManualRedactionWrapper(item.getAnnotationId(), item.getRequestDate(), item, item.isApproved()));
}
});
manualRedactions.getForceRedactions().forEach(item -> {
if (item.getSoftDeletedTime() == null) {
manualRedactionWrappers.add(new ManualRedactionWrapper(item.getAnnotationId(), item.getRequestDate(), item, item.isApproved()));
}
});
manualRedactions.getForceRedactions()
.forEach(item -> {
if (item.getSoftDeletedTime() == null) {
manualRedactionWrappers.add(new ManualRedactionWrapper(item.getAnnotationId(), item.getRequestDate(), item, item.isApproved()));
}
});
manualRedactions.getLegalBasisChanges().forEach(item -> {
if (item.getSoftDeletedTime() == null) {
manualRedactionWrappers.add(new ManualRedactionWrapper(item.getAnnotationId(), item.getRequestDate(), item, item.isApproved()));
}
});
manualRedactions.getLegalBasisChanges()
.forEach(item -> {
if (item.getSoftDeletedTime() == null) {
manualRedactionWrappers.add(new ManualRedactionWrapper(item.getAnnotationId(), item.getRequestDate(), item, item.isApproved()));
}
});
manualRedactions.getResizeRedactions().forEach(item -> {
if (item.getSoftDeletedTime() == null) {
manualRedactionWrappers.add(new ManualRedactionWrapper(item.getAnnotationId(), item.getRequestDate(), item, item.isApproved()));
}
});
manualRedactions.getResizeRedactions()
.forEach(item -> {
if (item.getSoftDeletedTime() == null) {
manualRedactionWrappers.add(new ManualRedactionWrapper(item.getAnnotationId(), item.getRequestDate(), item, item.isApproved()));
}
});
Collections.sort(manualRedactionWrappers);
@ -227,7 +232,7 @@ public class LegacyRedactionLogMergeService {
redactionLogEntry.getManualChanges()
.add(ManualChange.from(manualRemoval)
.withManualRedactionType(manualRemoval.isRemoveFromDictionary() ? ManualRedactionType.REMOVE_FROM_DICTIONARY : ManualRedactionType.REMOVE_LOCALLY));
.withManualRedactionType(manualRemoval.isRemoveFromDictionary() ? ManualRedactionType.REMOVE_FROM_DICTIONARY : ManualRedactionType.REMOVE_LOCALLY));
}
@ -364,7 +369,9 @@ public class LegacyRedactionLogMergeService {
return false;
}
return (!manualRedactionEntry.isAddToDictionary() && !manualRedactionEntry.isAddToDossierDictionary()) || ((manualRedactionEntry.isAddToDictionary() || manualRedactionEntry.isAddToDossierDictionary()) && manualRedactionEntry.getProcessedDate() == null);
return (!manualRedactionEntry.isAddToDictionary() && !manualRedactionEntry.isAddToDossierDictionary()) || ((manualRedactionEntry.isAddToDictionary()
|| manualRedactionEntry.isAddToDossierDictionary())
&& manualRedactionEntry.getProcessedDate() == null);
}

View File

@ -58,10 +58,10 @@ public class MigrationMessageReceiver {
if (redactionLog.getAnalysisVersion() == 0) {
redactionLog = legacyVersion0MigrationService.mergeDuplicateAnnotationIds(redactionLog);
} else if (migrationRequest.getManualRedactions() != null) {
redactionLog = legacyRedactionLogMergeService.mergeManualChanges(redactionLog, migrationRequest.getManualRedactions(), migrationRequest.getDossierTemplateId());
redactionLog = legacyRedactionLogMergeService.addManualAddEntriesAndRemoveSkippedImported(redactionLog, migrationRequest.getManualRedactions(), migrationRequest.getDossierTemplateId());
}
MigratedEntityLog migratedEntityLog = redactionLogToEntityLogMigrationService.migrate(redactionLog, document, migrationRequest.getDossierTemplateId());
MigratedEntityLog migratedEntityLog = redactionLogToEntityLogMigrationService.migrate(redactionLog, document, migrationRequest.getDossierTemplateId(), migrationRequest.getManualRedactions());
redactionStorageService.storeObject(migrationRequest.getDossierId(), migrationRequest.getFileId(), FileType.ENTITY_LOG, migratedEntityLog.getEntityLog());
redactionStorageService.storeObject(migrationRequest.getDossierId(), migrationRequest.getFileId(), FileType.MIGRATED_IDS, migratedEntityLog.getMigratedIds());

View File

@ -2,6 +2,7 @@ package com.iqser.red.service.redaction.v1.server.migration;
import java.awt.geom.Rectangle2D;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
@ -16,11 +17,15 @@ import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.EntityLog;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.EntityLogLegalBasis;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.migration.MigratedIds;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.ManualRedactions;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.entitymapped.BaseAnnotation;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.entitymapped.ManualResizeRedaction;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.ManualRedactionType;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.Rectangle;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.RedactionLog;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.RedactionLogEntry;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.RedactionLogLegalBasis;
import com.iqser.red.service.redaction.v1.model.MigrationRequest;
import com.iqser.red.service.redaction.v1.server.model.PrecursorEntity;
import com.iqser.red.service.redaction.v1.server.model.MigratedEntityLog;
import com.iqser.red.service.redaction.v1.server.model.MigrationEntity;
@ -33,6 +38,7 @@ import com.iqser.red.service.redaction.v1.server.model.document.nodes.Image;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.ImageType;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.SemanticNode;
import com.iqser.red.service.redaction.v1.server.service.DictionaryService;
import com.iqser.red.service.redaction.v1.server.service.ManualChangesApplicationService;
import com.iqser.red.service.redaction.v1.server.service.document.EntityCreationService;
import com.iqser.red.service.redaction.v1.server.service.document.EntityEnrichmentService;
import com.iqser.red.service.redaction.v1.server.service.document.EntityFindingUtility;
@ -55,12 +61,16 @@ public class RedactionLogToEntityLogMigrationService {
EntityFindingUtility entityFindingUtility;
EntityEnrichmentService entityEnrichmentService;
DictionaryService dictionaryService;
ManualChangesApplicationService manualChangesApplicationService;
public MigratedEntityLog migrate(RedactionLog redactionLog, Document document, String dossierTemplateId) {
public MigratedEntityLog migrate(RedactionLog redactionLog, Document document, String dossierTemplateId, ManualRedactions manualRedactions) {
List<MigrationEntity> entitiesToMigrate = calculateMigrationEntitiesFromRedactionLog(redactionLog, document, dossierTemplateId);
MigratedIds migratedIds = entitiesToMigrate.stream().collect(new MigratedIdsCollector());
MigratedIds migratedIds = entitiesToMigrate.stream()
.collect(new MigratedIdsCollector());
applyManualChanges(entitiesToMigrate, manualRedactions);
EntityLog entityLog = new EntityLog();
entityLog.setAnalysisNumber(redactionLog.getAnalysisNumber());
@ -69,15 +79,20 @@ public class RedactionLogToEntityLogMigrationService {
entityLog.setDossierDictionaryVersion(redactionLog.getDossierDictionaryVersion());
entityLog.setLegalBasisVersion(redactionLog.getLegalBasisVersion());
entityLog.setAnalysisVersion(redactionLog.getAnalysisVersion());
entityLog.setLegalBasis(redactionLog.getLegalBasis().stream().map(RedactionLogToEntityLogMigrationService::toEntityLogLegalBasis).toList());
entityLog.setLegalBasis(redactionLog.getLegalBasis()
.stream()
.map(RedactionLogToEntityLogMigrationService::toEntityLogLegalBasis)
.toList());
Map<String, String> oldToNewIDMapping = migratedIds.buildOldToNewMapping();
entityLog.setEntityLogEntry(entitiesToMigrate.stream().map(migrationEntity -> migrationEntity.toEntityLogEntry(oldToNewIDMapping)).toList());
entityLog.setEntityLogEntry(entitiesToMigrate.stream()
.map(migrationEntity -> migrationEntity.toEntityLogEntry(oldToNewIDMapping))
.toList());
if (getNumberOfApprovedEntries(redactionLog) != entityLog.getEntityLogEntry().size()) {
String message = String.format("Not all entities have been found during the migration redactionLog has %d entries and new entityLog %d",
redactionLog.getRedactionLogEntry().size(),
entityLog.getEntityLogEntry().size());
redactionLog.getRedactionLogEntry().size(),
entityLog.getEntityLogEntry().size());
log.error(message);
throw new AssertionError(message);
}
@ -91,6 +106,41 @@ public class RedactionLogToEntityLogMigrationService {
}
private void applyManualChanges(List<MigrationEntity> entitiesToMigrate, ManualRedactions manualRedactions) {
if (manualRedactions == null) {
return;
}
Map<String, List<BaseAnnotation>> manualChangesPerAnnotationId = Stream.of(manualRedactions.getIdsToRemove(),
manualRedactions.getEntriesToAdd(),
manualRedactions.getForceRedactions(),
manualRedactions.getResizeRedactions(),
manualRedactions.getLegalBasisChanges(),
manualRedactions.getRecategorizations(),
manualRedactions.getLegalBasisChanges())
.flatMap(Collection::stream)
.collect(Collectors.groupingBy(BaseAnnotation::getAnnotationId));
entitiesToMigrate.forEach(migrationEntity -> manualChangesPerAnnotationId.getOrDefault(migrationEntity.getOldId(), Collections.emptyList())
.forEach(manualChange -> {
if (manualChange instanceof ManualResizeRedaction manualResizeRedaction && migrationEntity.getMigratedEntity() instanceof TextEntity textEntity) {
ManualResizeRedaction migratedManualResizeRedaction = ManualResizeRedaction.builder()
.positions(manualResizeRedaction.getPositions())
.annotationId(migrationEntity.getNewId())
.updateDictionary(manualResizeRedaction.getUpdateDictionary())
.addToAllDossiers(manualResizeRedaction.isAddToAllDossiers())
.textAfter(manualResizeRedaction.getTextAfter())
.textBefore(manualResizeRedaction.getTextBefore())
.build();
manualChangesApplicationService.resize(textEntity, migratedManualResizeRedaction);
} else {
migrationEntity.getMigratedEntity().getManualOverwrite().addChange(manualChange);
}
}));
}
private static long getNumberOfApprovedEntries(RedactionLog redactionLog) {
return redactionLog.getRedactionLogEntry().size();
@ -101,7 +151,9 @@ public class RedactionLogToEntityLogMigrationService {
List<MigrationEntity> images = getImageBasedMigrationEntities(redactionLog, document, dossierTemplateId);
List<MigrationEntity> textMigrationEntities = getTextBasedMigrationEntities(redactionLog, document, dossierTemplateId);
return Stream.of(textMigrationEntities.stream(), images.stream()).flatMap(Function.identity()).toList();
return Stream.of(textMigrationEntities.stream(), images.stream())
.flatMap(Function.identity())
.toList();
}
@ -113,18 +165,26 @@ public class RedactionLogToEntityLogMigrationService {
private List<MigrationEntity> getImageBasedMigrationEntities(RedactionLog redactionLog, Document document, String dossierTemplateId) {
List<Image> images = document.streamAllImages().collect(Collectors.toList());
List<Image> images = document.streamAllImages()
.collect(Collectors.toList());
List<RedactionLogEntry> redactionLogImages = redactionLog.getRedactionLogEntry().stream().filter(RedactionLogEntry::isImage).toList();
List<RedactionLogEntry> redactionLogImages = redactionLog.getRedactionLogEntry()
.stream()
.filter(RedactionLogEntry::isImage)
.toList();
List<MigrationEntity> migrationEntities = new LinkedList<>();
for (RedactionLogEntry redactionLogImage : redactionLogImages) {
List<RectangleWithPage> imagePositions = redactionLogImage.getPositions().stream().map(RectangleWithPage::fromRedactionLogRectangle).toList();
List<RectangleWithPage> imagePositions = redactionLogImage.getPositions()
.stream()
.map(RectangleWithPage::fromRedactionLogRectangle)
.toList();
assert imagePositions.size() == 1;
Optional<Image> optionalClosestImage = images.stream()
.filter(image -> image.onPage(redactionLogImage.getPositions().get(0).getPage()))
.min(Comparator.comparingDouble(image -> entityFindingUtility.calculateDistance(image.getPosition(), imagePositions.get(0).rectangle2D())))
.filter(image -> entityFindingUtility.calculateDistance(image.getPosition(), imagePositions.get(0).rectangle2D()) <= MATCH_THRESHOLD);
.filter(image -> image.onPage(redactionLogImage.getPositions()
.get(0).getPage()))
.min(Comparator.comparingDouble(image -> EntityFindingUtility.calculateDistance(image.getPosition(), imagePositions.get(0).rectangle2D())))
.filter(image -> EntityFindingUtility.calculateDistance(image.getPosition(), imagePositions.get(0).rectangle2D()) <= MATCH_THRESHOLD);
Image closestImage;
if (optionalClosestImage.isEmpty()) { // if no fitting image can be found create a new one with the previous values!
@ -135,6 +195,7 @@ public class RedactionLogToEntityLogMigrationService {
}
String ruleIdentifier;
String reason = Optional.ofNullable(redactionLogImage.getReason()).orElse("");
if (redactionLogImage.getMatchedRule().isBlank() || redactionLogImage.getMatchedRule() == null) {
ruleIdentifier = "OLDIMG.0.0";
} else {
@ -142,11 +203,11 @@ public class RedactionLogToEntityLogMigrationService {
}
if (redactionLogImage.lastChangeIsRemoved()) {
closestImage.remove(ruleIdentifier, redactionLogImage.getReason());
closestImage.remove(ruleIdentifier, reason);
} else if (redactionLogImage.isRedacted()) {
closestImage.apply(ruleIdentifier, redactionLogImage.getReason(), redactionLogImage.getLegalBasis());
closestImage.apply(ruleIdentifier, reason, redactionLogImage.getLegalBasis());
} else {
closestImage.skip(ruleIdentifier, redactionLogImage.getReason());
closestImage.skip(ruleIdentifier, reason);
}
migrationEntities.add(new MigrationEntity(null, redactionLogImage, closestImage, redactionLogImage.getId(), closestImage.getId()));
}
@ -160,13 +221,24 @@ public class RedactionLogToEntityLogMigrationService {
.documentTree(document.getDocumentTree())
.imageType(ImageType.fromString(redactionLogImage.getType()))
.transparent(redactionLogImage.isImageHasTransparency())
.page(document.getPages().stream().filter(p -> p.getNumber() == redactionLogImage.getPositions().get(0).getPage()).findFirst().orElseThrow())
.position(toRectangle2D(redactionLogImage.getPositions().get(0)))
.page(document.getPages()
.stream()
.filter(p -> p.getNumber() == redactionLogImage.getPositions()
.get(0).getPage())
.findFirst()
.orElseThrow())
.position(toRectangle2D(redactionLogImage.getPositions()
.get(0)))
.build();
List<Integer> treeId = document.getDocumentTree().createNewMainEntryAndReturnId(image);
image.setTreeId(treeId);
image.setId(IdBuilder.buildId(image.getPages(), image.getBBox().values().stream().toList(), "", ""));
image.setId(IdBuilder.buildId(image.getPages(),
image.getBBox().values()
.stream()
.toList(),
"",
""));
return image;
}
@ -186,33 +258,37 @@ public class RedactionLogToEntityLogMigrationService {
.map(entry -> MigrationEntity.fromRedactionLogEntry(entry, dictionaryService.isHint(entry.getType(), dossierTemplateId)))
.peek(migrationEntity -> {
if (migrationEntity.getPrecursorEntity().getEntityType().equals(EntityType.HINT) &&//
!migrationEntity.getRedactionLogEntry().isHint() &&//
!migrationEntity.getRedactionLogEntry().isRedacted()) {
migrationEntity.getPrecursorEntity().ignore(migrationEntity.getPrecursorEntity().getRuleIdentifier(), migrationEntity.getRedactionLogEntry().getReason());
!migrationEntity.getRedactionLogEntry().isHint() &&//
!migrationEntity.getRedactionLogEntry().isRedacted()) {
migrationEntity.getPrecursorEntity().ignore(migrationEntity.getPrecursorEntity().getRuleIdentifier(), migrationEntity.getPrecursorEntity().getReason());
} else if (migrationEntity.getRedactionLogEntry().lastChangeIsRemoved()) {
migrationEntity.getPrecursorEntity().remove(migrationEntity.getPrecursorEntity().getRuleIdentifier(), migrationEntity.getRedactionLogEntry().getReason());
migrationEntity.getPrecursorEntity().remove(migrationEntity.getPrecursorEntity().getRuleIdentifier(), migrationEntity.getPrecursorEntity().getReason());
} else if (lastManualChangeIsRemove(migrationEntity)) {
migrationEntity.getPrecursorEntity().ignore(migrationEntity.getPrecursorEntity().getRuleIdentifier(), migrationEntity.getPrecursorEntity().getReason());
} else if (migrationEntity.getPrecursorEntity().isApplied() && migrationEntity.getRedactionLogEntry().isRecommendation()) {
migrationEntity.getPrecursorEntity().skip(migrationEntity.getPrecursorEntity().getRuleIdentifier(), migrationEntity.getPrecursorEntity().getReason());
migrationEntity.getPrecursorEntity()
.skip(migrationEntity.getPrecursorEntity().getRuleIdentifier(), migrationEntity.getPrecursorEntity().getReason());
} else if (migrationEntity.getPrecursorEntity().isApplied()) {
migrationEntity.getPrecursorEntity()
.apply(migrationEntity.getPrecursorEntity().getRuleIdentifier(),
migrationEntity.getPrecursorEntity().getReason(),
migrationEntity.getPrecursorEntity().getLegalBasis());
migrationEntity.getPrecursorEntity().getReason(),
migrationEntity.getPrecursorEntity().getLegalBasis());
} else {
migrationEntity.getPrecursorEntity().skip(migrationEntity.getPrecursorEntity().getRuleIdentifier(), migrationEntity.getPrecursorEntity().getReason());
migrationEntity.getPrecursorEntity()
.skip(migrationEntity.getPrecursorEntity().getRuleIdentifier(), migrationEntity.getPrecursorEntity().getReason());
}
})
.toList();
Map<String, List<TextEntity>> tempEntitiesByValue = entityFindingUtility.findAllPossibleEntitiesAndGroupByValue(document,
entitiesToMigrate.stream().map(MigrationEntity::getPrecursorEntity).toList());
entitiesToMigrate.stream()
.map(MigrationEntity::getPrecursorEntity)
.toList());
for (MigrationEntity migrationEntity : entitiesToMigrate) {
Optional<TextEntity> optionalTextEntity = entityFindingUtility.findClosestEntityAndReturnEmptyIfNotFound(migrationEntity.getPrecursorEntity(),
tempEntitiesByValue,
MATCH_THRESHOLD);
tempEntitiesByValue,
MATCH_THRESHOLD);
if (optionalTextEntity.isEmpty()) {
migrationEntity.setMigratedEntity(migrationEntity.getPrecursorEntity());
@ -224,11 +300,14 @@ public class RedactionLogToEntityLogMigrationService {
TextEntity entity = createCorrectEntity(migrationEntity.getPrecursorEntity(), document, optionalTextEntity.get().getTextRange());
migrationEntity.setMigratedEntity(entity);
migrationEntity.setOldId(migrationEntity.getPrecursorEntity().getId());
migrationEntity.setNewId(entity.getPositionsOnPagePerPage().get(0).getId()); // Can only be on one page, since redactionLogEntries can only be on one page
migrationEntity.setNewId(entity.getId()); // Can only be on one page, since redactionLogEntries can only be on one page
}
tempEntitiesByValue.values().stream().flatMap(Collection::stream).forEach(TextEntity::removeFromGraph);
tempEntitiesByValue.values()
.stream()
.flatMap(Collection::stream)
.forEach(TextEntity::removeFromGraph);
return entitiesToMigrate;
}
@ -239,8 +318,7 @@ public class RedactionLogToEntityLogMigrationService {
return false;
}
return migrationEntity.getRedactionLogEntry()
.getManualChanges()
return migrationEntity.getRedactionLogEntry().getManualChanges()
.stream()
.reduce((a, b) -> b)
.map(m -> m.getManualRedactionType().equals(ManualRedactionType.REMOVE_LOCALLY))

View File

@ -2,8 +2,8 @@ package com.iqser.red.service.redaction.v1.server.model;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
@ -20,10 +20,9 @@ import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlo
import com.iqser.red.service.redaction.v1.server.model.document.entity.EntityType;
import com.iqser.red.service.redaction.v1.server.model.document.entity.IEntity;
import com.iqser.red.service.redaction.v1.server.model.document.entity.ManualChangeOverwrite;
import com.iqser.red.service.redaction.v1.server.model.document.entity.PositionOnPage;
import com.iqser.red.service.redaction.v1.server.model.document.entity.TextEntity;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.Image;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.ImageType;
import com.iqser.red.service.redaction.v1.server.service.ManualChangeFactory;
import lombok.AllArgsConstructor;
import lombok.Data;
@ -43,11 +42,11 @@ public final class MigrationEntity {
public static MigrationEntity fromRedactionLogEntry(RedactionLogEntry redactionLogEntry, boolean hint) {
return new MigrationEntity(createManualEntity(redactionLogEntry, hint), redactionLogEntry);
return new MigrationEntity(createPrecursorEntity(redactionLogEntry, hint), redactionLogEntry);
}
public static PrecursorEntity createManualEntity(RedactionLogEntry redactionLogEntry, boolean hint) {
public static PrecursorEntity createPrecursorEntity(RedactionLogEntry redactionLogEntry, boolean hint) {
String ruleIdentifier = buildRuleIdentifier(redactionLogEntry);
List<RectangleWithPage> rectangleWithPages = redactionLogEntry.getPositions().stream().map(RectangleWithPage::fromRedactionLogRectangle).toList();
@ -57,7 +56,7 @@ public final class MigrationEntity {
.value(redactionLogEntry.getValue())
.entityPosition(rectangleWithPages)
.ruleIdentifier(ruleIdentifier)
.reason(redactionLogEntry.getReason())
.reason(Optional.ofNullable(redactionLogEntry.getReason()).orElse(""))
.legalBasis(redactionLogEntry.getLegalBasis())
.type(redactionLogEntry.getType())
.section(redactionLogEntry.getSection())
@ -169,9 +168,10 @@ public final class MigrationEntity {
} else {
throw new UnsupportedOperationException("Unknown subclass " + migratedEntity.getClass());
}
entityLogEntry.setChanges(redactionLogEntry.getChanges().stream().map(MigrationEntity::toEntityLogChanges).toList());
entityLogEntry.setManualChanges(migrateManualChanges(redactionLogEntry.getManualChanges()));
entityLogEntry.setManualChanges(ManualChangeFactory.toManualChangeList(migratedEntity.getManualOverwrite().getManualChangeLog(), redactionLogEntry.isHint()));
entityLogEntry.setColor(redactionLogEntry.getColor());
entityLogEntry.setChanges(redactionLogEntry.getChanges().stream().map(MigrationEntity::toEntityLogChanges).toList());
entityLogEntry.setReference(migrateSetOfIds(redactionLogEntry.getReference(), oldToNewIdMapping));
entityLogEntry.setImportedRedactionIntersections(migrateSetOfIds(redactionLogEntry.getImportedRedactionIntersections(), oldToNewIdMapping));
entityLogEntry.setEngines(getMigratedEngines(redactionLogEntry));
@ -197,14 +197,13 @@ public final class MigrationEntity {
.isPresent();
}
private List<com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.ManualChange> migrateManualChanges(List<ManualChange> manualChanges) {
if (manualChanges == null) {
return Collections.emptyList();
}
return manualChanges.stream()
.map(MigrationEntity::toEntityLogManualChanges)
.toList();
return manualChanges.stream().map(MigrationEntity::toEntityLogManualChanges).toList();
}
@ -238,16 +237,16 @@ public final class MigrationEntity {
public EntityLogEntry createEntityLogEntry(Image image) {
String imageType = image.getImageType().equals(ImageType.OTHER) ? "image" : image.getImageType().toString().toLowerCase(Locale.ENGLISH);
return EntityLogEntry.builder()
List<Position> positions = getPositionsFromOverride(image).orElse(List.of(new Position(image.getPosition(), image.getPage().getNumber())));
return EntityLogEntry.builder()
.id(image.getId())
.value(image.value())
.type(imageType)
.type(image.type())
.reason(image.buildReasonWithManualChangeDescriptions())
.legalBasis(image.legalBasis())
.matchedRule(image.getMatchedRule().getRuleIdentifier().toString())
.dictionaryEntry(false)
.positions(List.of(new Position(image.getPosition(), image.getPage().getNumber())))
.positions(positions)
.containingNodeId(image.getTreeId())
.closestHeadline(image.getHeadline().getTextBlock().getSearchText())
.section(redactionLogEntry.getSection())
@ -263,13 +262,12 @@ public final class MigrationEntity {
public EntityLogEntry createEntityLogEntry(PrecursorEntity precursorEntity) {
String type = precursorEntity.getManualOverwrite().getType().orElse(precursorEntity.getType());
return EntityLogEntry.builder()
.id(precursorEntity.getId())
.reason(precursorEntity.buildReasonWithManualChangeDescriptions())
.legalBasis(precursorEntity.legalBasis())
.value(precursorEntity.value())
.type(type)
.type(precursorEntity.type())
.state(buildEntryState(precursorEntity))
.entryType(buildEntryType(precursorEntity))
.section(redactionLogEntry.getSection())
@ -296,13 +294,9 @@ public final class MigrationEntity {
public EntityLogEntry createEntityLogEntry(TextEntity entity) {
assert entity.getPositionsOnPagePerPage().size() == 1;
PositionOnPage positionOnPage = entity.getPositionsOnPagePerPage().get(0);
List<Position> rectanglesPerLine = positionOnPage.getRectanglePerLine()
.stream()
.map(rectangle2D -> new Position(rectangle2D, positionOnPage.getPage().getNumber()))
.toList();
List<Position> rectanglesPerLine = getRectanglesPerLine(entity);
return EntityLogEntry.builder()
.id(positionOnPage.getId())
.id(entity.getId())
.positions(rectanglesPerLine)
.reason(entity.buildReasonWithManualChangeDescriptions())
.legalBasis(entity.legalBasis())
@ -325,6 +319,23 @@ public final class MigrationEntity {
}
private static List<Position> getRectanglesPerLine(TextEntity entity) {
return getPositionsFromOverride(entity).orElse(entity.getPositionsOnPagePerPage()
.get(0)
.getRectanglePerLine()
.stream()
.map(rectangle2D -> new Position(rectangle2D, entity.getPositionsOnPagePerPage().get(0).getPage().getNumber()))
.toList());
}
private static Optional<List<Position>> getPositionsFromOverride(IEntity entity) {
return entity.getManualOverwrite().getPositions().map(rects -> rects.stream().map(r -> new Position(r.rectangle2D(), r.pageNumber())).toList());
}
private EntryState buildEntryState(IEntity entity) {
if (entity.applied() && entity.active()) {

View File

@ -8,6 +8,7 @@ import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.springframework.stereotype.Service;
import org.springframework.web.bind.annotation.RequestBody;
@ -68,19 +69,15 @@ public class AnalyzeService {
ComponentDroolsExecutionService componentDroolsExecutionService;
KieContainerCreationService kieContainerCreationService;
DictionarySearchService dictionarySearchService;
RedactionLogCreatorService redactionLogCreatorService;
EntityLogCreatorService entityLogCreatorService;
ComponentLogCreatorService componentLogCreatorService;
RedactionStorageService redactionStorageService;
RedactionChangeLogService redactionChangeLogService;
LegalBasisClient legalBasisClient;
RedactionServiceSettings redactionServiceSettings;
NotFoundImportedEntitiesService notFoundImportedEntitiesService;
SectionFinderService sectionFinderService;
ManualRedactionEntryService manualRedactionEntryService;
ImportedRedactionEntryService importedRedactionEntryService;
ObservedStorageService observedStorageService;
FunctionTimerValues redactmanagerAnalyzePagewiseValues;
@ -90,7 +87,6 @@ public class AnalyzeService {
public AnalyzeResult reanalyze(@RequestBody AnalyzeRequest analyzeRequest) {
long startTime = System.currentTimeMillis();
RedactionLog previousRedactionLog = redactionStorageService.getRedactionLog(analyzeRequest.getDossierId(), analyzeRequest.getFileId());
EntityLog previousEntityLog = redactionStorageService.getEntityLog(analyzeRequest.getDossierId(), analyzeRequest.getFileId());
log.info("Loaded previous entity log for file {} in dossier {}", analyzeRequest.getFileId(), analyzeRequest.getDossierId());
@ -106,8 +102,9 @@ public class AnalyzeService {
}
DictionaryIncrement dictionaryIncrement = dictionaryService.getDictionaryIncrements(analyzeRequest.getDossierTemplateId(),
new DictionaryVersion(previousEntityLog.getDictionaryVersion(), previousEntityLog.getDossierDictionaryVersion()),
analyzeRequest.getDossierId());
new DictionaryVersion(previousEntityLog.getDictionaryVersion(),
previousEntityLog.getDossierDictionaryVersion()),
analyzeRequest.getDossierId());
Set<Integer> sectionsToReanalyseIds = getSectionsToReanalyseIds(analyzeRequest, previousEntityLog, document, dictionaryIncrement, importedRedactions);
List<SemanticNode> sectionsToReAnalyse = getSectionsToReAnalyse(document, sectionsToReanalyseIds);
@ -116,20 +113,19 @@ public class AnalyzeService {
if (sectionsToReAnalyse.isEmpty()) {
EntityLogChanges entityLogChanges = entityLogCreatorService.updateVersionsAndReturnChanges(previousEntityLog,
dictionaryIncrement.getDictionaryVersion(),
analyzeRequest,
false);
dictionaryIncrement.getDictionaryVersion(),
analyzeRequest,
false);
return finalizeAnalysis(analyzeRequest,
startTime,
kieContainerCreationService.getLatestKieContainer(analyzeRequest.getDossierTemplateId(), RuleFileType.COMPONENT),
entityLogChanges,
document,
previousRedactionLog,
document.getNumberOfPages(),
dictionaryIncrement.getDictionaryVersion(),
true,
Collections.emptySet());
startTime,
kieContainerCreationService.getLatestKieContainer(analyzeRequest.getDossierTemplateId(), RuleFileType.COMPONENT),
entityLogChanges,
document,
document.getNumberOfPages(),
dictionaryIncrement.getDictionaryVersion(),
true,
Collections.emptySet());
}
KieWrapper kieWrapperEntityRules = kieContainerCreationService.getLatestKieContainer(analyzeRequest.getDossierTemplateId(), RuleFileType.ENTITY);
@ -139,10 +135,12 @@ public class AnalyzeService {
log.info("Loaded Ner Entities for file {} in dossier {}", analyzeRequest.getFileId(), analyzeRequest.getDossierId());
var notFoundManualRedactionEntries = manualRedactionEntryService.addManualRedactionEntriesAndReturnNotFoundEntries(analyzeRequest,
document,
analyzeRequest.getDossierTemplateId());
document,
analyzeRequest.getDossierTemplateId());
var notFoundImportedEntries = importedRedactionEntryService.addImportedEntriesAndReturnNotFoundEntries(analyzeRequest, importedRedactions, document);
var notFoundManualOrImportedEntries = Stream.of(notFoundManualRedactionEntries, notFoundImportedEntries).flatMap(Collection::stream).collect(Collectors.toList());
var notFoundManualOrImportedEntries = Stream.of(notFoundManualRedactionEntries, notFoundImportedEntries)
.flatMap(Collection::stream)
.collect(Collectors.toList());
Dictionary dictionary = dictionaryService.getDeepCopyDictionary(analyzeRequest.getDossierTemplateId(), analyzeRequest.getDossierId());
log.info("Updated Dictionaries for file {} in dossier {}", analyzeRequest.getFileId(), analyzeRequest.getDossierId());
@ -152,34 +150,32 @@ public class AnalyzeService {
// we could add the imported redactions similar to the manual redactions here as well for additional processing
List<FileAttribute> allFileAttributes = entityDroolsExecutionService.executeRules(kieWrapperEntityRules.container(),
document,
sectionsToReAnalyse,
dictionary,
analyzeRequest.getFileAttributes(),
analyzeRequest.getManualRedactions(),
nerEntities);
document,
sectionsToReAnalyse,
dictionary,
analyzeRequest.getFileAttributes(),
analyzeRequest.getManualRedactions(),
nerEntities);
log.info("Finished entity rule execution for file {} in dossier {}", analyzeRequest.getFileId(), analyzeRequest.getDossierId());
RedactionLog redactionLog = updatePreviousRedactionLog(analyzeRequest, document, notFoundManualOrImportedEntries, previousRedactionLog, sectionsToReanalyseIds);
EntityLogChanges entityLogChanges = entityLogCreatorService.updatePreviousEntityLog(analyzeRequest,
document,
notFoundManualOrImportedEntries,
previousEntityLog,
sectionsToReanalyseIds,
dictionary.getVersion());
document,
notFoundManualOrImportedEntries,
previousEntityLog,
sectionsToReanalyseIds,
dictionary.getVersion());
notFoundImportedEntitiesService.processEntityLog(entityLogChanges.getEntityLog(), analyzeRequest, notFoundImportedEntries);
return finalizeAnalysis(analyzeRequest,
startTime,
kieContainerCreationService.getLatestKieContainer(analyzeRequest.getDossierTemplateId(), RuleFileType.COMPONENT),
entityLogChanges,
document,
redactionLog,
document.getNumberOfPages(),
dictionaryIncrement.getDictionaryVersion(),
true,
new HashSet<>(allFileAttributes));
startTime,
kieContainerCreationService.getLatestKieContainer(analyzeRequest.getDossierTemplateId(), RuleFileType.COMPONENT),
entityLogChanges,
document,
document.getNumberOfPages(),
dictionaryIncrement.getDictionaryVersion(),
true,
new HashSet<>(allFileAttributes));
}
@ -209,59 +205,42 @@ public class AnalyzeService {
log.info("Updated Dictionaries for file {} in dossier {}", analyzeRequest.getFileId(), analyzeRequest.getDossierId());
var notFoundManualRedactionEntries = manualRedactionEntryService.addManualRedactionEntriesAndReturnNotFoundEntries(analyzeRequest,
document,
analyzeRequest.getDossierTemplateId());
document,
analyzeRequest.getDossierTemplateId());
var notFoundImportedEntries = importedRedactionEntryService.addImportedEntriesAndReturnNotFoundEntries(analyzeRequest, importedRedactions, document);
var notFoundManualOrImportedEntries = Stream.of(notFoundManualRedactionEntries, notFoundImportedEntries).flatMap(Collection::stream).collect(Collectors.toList());
var notFoundManualOrImportedEntries = Stream.of(notFoundManualRedactionEntries, notFoundImportedEntries)
.flatMap(Collection::stream)
.collect(Collectors.toList());
dictionarySearchService.addDictionaryEntities(dictionary, document);
log.info("Finished Dictionary Search for file {} in dossier {}", analyzeRequest.getFileId(), analyzeRequest.getDossierId());
// we could add the imported redactions similar to the manual redactions here as well for additional processing
List<FileAttribute> allFileAttributes = entityDroolsExecutionService.executeRules(kieWrapperEntityRules.container(),
document,
dictionary,
analyzeRequest.getFileAttributes(),
analyzeRequest.getManualRedactions(),
nerEntities);
document,
dictionary,
analyzeRequest.getFileAttributes(),
analyzeRequest.getManualRedactions(),
nerEntities);
log.info("Finished entity rule execution for file {} in dossier {}", analyzeRequest.getFileId(), analyzeRequest.getDossierId());
RedactionLog redactionLog = createRedactionLog(analyzeRequest, document, notFoundManualOrImportedEntries, dictionary, kieWrapperEntityRules);
EntityLog entityLog = entityLogCreatorService.createInitialEntityLog(analyzeRequest,
document,
notFoundManualOrImportedEntries,
dictionary.getVersion(),
kieWrapperEntityRules.rulesVersion());
document,
notFoundManualOrImportedEntries,
dictionary.getVersion(),
kieWrapperEntityRules.rulesVersion());
notFoundImportedEntitiesService.processEntityLog(entityLog, analyzeRequest, notFoundImportedEntries);
return finalizeAnalysis(analyzeRequest,
startTime,
kieWrapperComponentRules,
new EntityLogChanges(entityLog, false),
document,
redactionLog,
document.getNumberOfPages(),
dictionary.getVersion(),
false,
new HashSet<>(allFileAttributes));
}
@Deprecated(forRemoval = true)
private RedactionLog updatePreviousRedactionLog(AnalyzeRequest analyzeRequest,
Document document,
List<PrecursorEntity> notFoundEntries,
RedactionLog previousRedactionLog,
Set<Integer> sectionsToReanalyseIds) {
List<RedactionLogEntry> newRedactionLogEntries = redactionLogCreatorService.createRedactionLog(document, analyzeRequest.getDossierTemplateId(), notFoundEntries);
previousRedactionLog.getRedactionLogEntry()
.removeIf(entry -> sectionsToReanalyseIds.contains(entry.getSectionNumber()) && !entry.getType().equals(NotFoundImportedEntitiesService.IMPORTED_REDACTION_TYPE));
return previousRedactionLog;
startTime,
kieWrapperComponentRules,
new EntityLogChanges(entityLog, false),
document,
document.getNumberOfPages(),
dictionary.getVersion(),
false,
new HashSet<>(allFileAttributes));
}
@ -270,14 +249,11 @@ public class AnalyzeService {
KieWrapper kieWrapperComponentRules,
EntityLogChanges entityLogChanges,
Document document,
RedactionLog redactionLog,
int numberOfPages,
DictionaryVersion dictionaryVersion,
boolean isReanalysis,
Set<FileAttribute> addedFileAttributes) {
finalizeRedactionLog(analyzeRequest, redactionLog, dictionaryVersion);
EntityLog entityLog = entityLogChanges.getEntityLog();
redactionStorageService.storeObject(analyzeRequest.getDossierId(), analyzeRequest.getFileId(), FileType.ENTITY_LOG, entityLogChanges.getEntityLog());
@ -324,9 +300,10 @@ public class AnalyzeService {
}
List<Component> components = componentDroolsExecutionService.executeRules(kieWrapperComponentRules.container(),
entityLogChanges.getEntityLog(),
document,
addedFileAttributes.stream().toList());
entityLogChanges.getEntityLog(),
document,
addedFileAttributes.stream()
.toList());
log.info("Finished component rule execution for file {} in dossier {}", analyzeRequest.getFileId(), analyzeRequest.getDossierId());
ComponentLog componentLog = componentLogCreatorService.buildComponentLog(analyzeRequest.getAnalysisNumber(), components, kieWrapperComponentRules.rulesVersion());
@ -336,29 +313,20 @@ public class AnalyzeService {
}
private RedactionLogChanges finalizeRedactionLog(AnalyzeRequest analyzeRequest, RedactionLog redactionLog, DictionaryVersion dictionaryVersion) {
// TODO: remove redactionLog related stuff
RedactionLog previousRedactionLog = redactionStorageService.getRedactionLog(analyzeRequest.getDossierId(), analyzeRequest.getFileId());
redactionLog.setDictionaryVersion(dictionaryVersion.getDossierTemplateVersion());
redactionLog.setDossierDictionaryVersion(dictionaryVersion.getDossierVersion());
RedactionLogChanges redactionLogChange = redactionChangeLogService.computeChanges(previousRedactionLog, redactionLog, analyzeRequest.getAnalysisNumber());
redactionStorageService.storeObject(analyzeRequest.getDossierId(), analyzeRequest.getFileId(), FileType.REDACTION_LOG, redactionLogChange.getRedactionLog());
return redactionLogChange;
}
private static List<SemanticNode> getSectionsToReAnalyse(Document document, Set<Integer> sectionsToReanalyseIds) {
return document.streamChildren().filter(section -> sectionsToReanalyseIds.contains(section.getTreeId().get(0))).collect(Collectors.toList());
return document.streamChildren()
.filter(section -> sectionsToReanalyseIds.contains(section.getTreeId()
.get(0)))
.collect(Collectors.toList());
}
private Set<Integer> getSectionsToReanalyseIds(AnalyzeRequest analyzeRequest, EntityLog entityLog, Document document, DictionaryIncrement dictionaryIncrement, ImportedRedactions importedRedactions) {
private Set<Integer> getSectionsToReanalyseIds(AnalyzeRequest analyzeRequest,
EntityLog entityLog,
Document document,
DictionaryIncrement dictionaryIncrement,
ImportedRedactions importedRedactions) {
return sectionFinderService.findSectionsToReanalyse(dictionaryIncrement, entityLog, document, analyzeRequest, importedRedactions);
}
@ -380,9 +348,10 @@ public class AnalyzeService {
private static NerEntitiesModel filterNerEntitiesModelBySectionIds(Set<Integer> sectionsToReanalyseIds, NerEntitiesModel nerEntitiesModel) {
return new NerEntitiesModel(nerEntitiesModel.getData().entrySet().stream() //
.filter(entry -> sectionsToReanalyseIds.contains(entry.getKey())) //
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));
return new NerEntitiesModel(nerEntitiesModel.getData().entrySet()
.stream() //
.filter(entry -> sectionsToReanalyseIds.contains(entry.getKey())) //
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));
}
@ -397,35 +366,4 @@ public class AnalyzeService {
return nerEntities;
}
@Deprecated(forRemoval = true)
private RedactionLog createRedactionLog(AnalyzeRequest analyzeRequest,
Document document,
List<PrecursorEntity> notFoundManualRedactionEntries,
Dictionary dictionary,
KieWrapper wrapper) {
List<RedactionLogEntry> redactionLogEntries = redactionLogCreatorService.createRedactionLog(document,
analyzeRequest.getDossierTemplateId(),
notFoundManualRedactionEntries);
List<LegalBasis> legalBasis = legalBasisClient.getLegalBasisMapping(analyzeRequest.getDossierTemplateId());
RedactionLog redactionLog = new RedactionLog(redactionServiceSettings.getAnalysisVersion(),
analyzeRequest.getAnalysisNumber(),
redactionLogEntries,
toRedactionLogLegalBasis(legalBasis),
dictionary.getVersion().getDossierTemplateVersion(),
dictionary.getVersion().getDossierVersion(),
wrapper.rulesVersion(),
legalBasisClient.getVersion(analyzeRequest.getDossierTemplateId()));
return redactionLog;
}
public List<RedactionLogLegalBasis> toRedactionLogLegalBasis(List<LegalBasis> legalBasis) {
return legalBasis.stream().map(l -> new RedactionLogLegalBasis(l.getName(), l.getDescription(), l.getReason())).collect(Collectors.toList());
}
}

View File

@ -27,8 +27,6 @@ import lombok.extern.slf4j.Slf4j;
@FieldDefaults(makeFinal=true, level= AccessLevel.PRIVATE)
public class EntityChangeLogService {
ManualChangeFactory manualChangeFactory;
@Timed("redactmanager_computeChanges")
public boolean computeChanges(List<EntityLogEntry> previousEntityLogEntries, List<EntityLogEntry> newEntityLogEntries, ManualRedactions manualRedactions, int analysisNumber) {
@ -88,7 +86,7 @@ public class EntityChangeLogService {
.filter(IdRemoval::isRemoveFromDictionary)//
.filter(removed -> removed.getAnnotationId().equals(entry.getId()))//
.findFirst()//
.ifPresent(idRemove -> entry.getManualChanges().add(manualChangeFactory.toManualChange(idRemove, false)));
.ifPresent(idRemove -> entry.getManualChanges().add(ManualChangeFactory.toManualChange(idRemove, false)));
}
private ChangeType calculateChangeType(EntryState state, EntryState previousState) {

View File

@ -44,7 +44,6 @@ import lombok.extern.slf4j.Slf4j;
public class EntityLogCreatorService {
DictionaryService dictionaryService;
ManualChangeFactory manualChangeFactory;
RedactionServiceSettings redactionServiceSettings;
LegalBasisClient legalBasisClient;
EntityChangeLogService entityChangeLogService;
@ -125,7 +124,10 @@ public class EntityLogCreatorService {
.toList();
previousEntityLog.getEntityLogEntry().removeAll(previousEntriesFromReAnalyzedSections);
boolean hasChanges = entityChangeLogService.computeChanges(previousEntriesFromReAnalyzedSections, newEntityLogEntries, analyzeRequest.getManualRedactions(), analyzeRequest.getAnalysisNumber());
boolean hasChanges = entityChangeLogService.computeChanges(previousEntriesFromReAnalyzedSections,
newEntityLogEntries,
analyzeRequest.getManualRedactions(),
analyzeRequest.getAnalysisNumber());
previousEntityLog.getEntityLogEntry().addAll(newEntityLogEntries);
return updateVersionsAndReturnChanges(previousEntityLog, dictionaryVersion, analyzeRequest, hasChanges);
@ -143,9 +145,7 @@ public class EntityLogCreatorService {
.filter(entity -> !entity.removed())
.forEach(entityNode -> entries.addAll(toEntityLogEntries(entityNode)));
document.streamAllImages().filter(entity -> !entity.removed()).forEach(imageNode -> entries.add(createEntityLogEntry(imageNode, dossierTemplateId)));
notFoundPrecursorEntries.stream()
.filter(entity -> !entity.removed())
.forEach(precursorEntity -> entries.add(createEntityLogEntry(precursorEntity, dossierTemplateId)));
notFoundPrecursorEntries.stream().filter(entity -> !entity.removed()).forEach(precursorEntity -> entries.add(createEntityLogEntry(precursorEntity, dossierTemplateId)));
return entries;
}
@ -191,7 +191,7 @@ public class EntityLogCreatorService {
.closestHeadline(image.getHeadline().getTextBlock().getSearchText())
.section(image.getManualOverwrite().getSection().orElse(image.getParent().toString()))
.imageHasTransparency(image.isTransparent())
.manualChanges(manualChangeFactory.toManualChangeList(image.getManualOverwrite().getManualChangeLog(), isHint))
.manualChanges(ManualChangeFactory.toManualChangeList(image.getManualOverwrite().getManualChangeLog(), isHint))
.state(buildEntryState(image))
.entryType(isHint ? EntryType.IMAGE_HINT : EntryType.IMAGE)
.build();
@ -232,7 +232,7 @@ public class EntityLogCreatorService {
//(was .imported(precursorEntity.getEngines() != null && precursorEntity.getEngines().contains(Engine.IMPORTED)))
.imported(false)
.reference(Collections.emptySet())
.manualChanges(manualChangeFactory.toManualChangeList(precursorEntity.getManualOverwrite().getManualChangeLog(), isHint))
.manualChanges(ManualChangeFactory.toManualChangeList(precursorEntity.getManualOverwrite().getManualChangeLog(), isHint))
.build();
}
@ -262,7 +262,7 @@ public class EntityLogCreatorService {
//(was .imported(entity.getEngines() != null && entity.getEngines().contains(Engine.IMPORTED)))
.imported(false)
.reference(referenceIds)
.manualChanges(manualChangeFactory.toManualChangeList(entity.getManualOverwrite().getManualChangeLog(), isHint))
.manualChanges(ManualChangeFactory.toManualChangeList(entity.getManualOverwrite().getManualChangeLog(), isHint))
.state(buildEntryState(entity))
.entryType(buildEntryType(entity))
.build();

View File

@ -15,7 +15,9 @@ import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.ManualChange;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.ManualRedactionType;
@Service
import lombok.experimental.UtilityClass;
@UtilityClass
public class ManualChangeFactory {
public List<ManualChange> toManualChangeList(List<BaseAnnotation> manualChanges, boolean isHint) {

View File

@ -78,6 +78,8 @@ public class ManualChangesApplicationService {
.map(ManualChangesApplicationService::toRectangle2D)
.collect(Collectors.toList()));
entityToBeResized.getManualOverwrite().addChange(manualResizeRedaction);
SemanticNode node = entityToBeResized.getDeepestFullyContainingNode();
PrecursorEntity searchEntity = PrecursorEntity.fromManualResizeRedaction(manualResizeRedaction);
// Loop through nodes starting from the deepest fully containing node all the way to the document node
@ -125,7 +127,6 @@ public class ManualChangesApplicationService {
entityToBeResized.setDuplicateTextRanges(new ArrayList<>(closestEntity.getDuplicateTextRanges()));
entityToBeResized.setValue(closestEntity.getValue());
entityToBeResized.setPages(newIntersectingPages);
entityToBeResized.getManualOverwrite().addChange(manualResizeRedaction);
}

View File

@ -1,125 +0,0 @@
package com.iqser.red.service.redaction.v1.server.service;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.Change;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.ChangeType;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.ManualRedactionType;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.RedactionLog;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.RedactionLogChanges;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.RedactionLogEntry;
import com.iqser.red.service.redaction.v1.server.storage.RedactionStorageService;
import io.micrometer.core.annotation.Timed;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Deprecated(forRemoval = true)
@Slf4j
@Service
@RequiredArgsConstructor
public class RedactionChangeLogService {
private final RedactionStorageService redactionStorageService;
@Timed("redactmanager_computeChanges")
public RedactionLogChanges computeChanges(RedactionLog previousRedactionLog, RedactionLog currentRedactionLog, int analysisNumber) {
long start = System.currentTimeMillis();
if (previousRedactionLog == null) {
currentRedactionLog.getRedactionLogEntry().forEach(entry -> {
entry.getChanges().add(new Change(analysisNumber, ChangeType.ADDED, OffsetDateTime.now()));
});
return new RedactionLogChanges(currentRedactionLog, false);
}
List<RedactionLogEntry> previouslyExistingEntries = previousRedactionLog.getRedactionLogEntry().stream().filter(entry -> !entry.lastChangeIsRemoved()).toList();
Map<String, RedactionLogEntry> addedEntryIds = getEntriesThatExistInCurrentButNotInPreviousRedactionLog(currentRedactionLog, previouslyExistingEntries);
Set<String> removedIds = getEntryIdsThatExistInPreviousButNotInCurrentRedactionLog(currentRedactionLog, previouslyExistingEntries);
List<RedactionLogEntry> newRedactionLogEntries = previousRedactionLog.getRedactionLogEntry();
List<RedactionLogEntry> toRemove = new ArrayList<>();
newRedactionLogEntries.forEach(entry -> {
if (removedIds.contains(entry.getId()) && addedEntryIds.containsKey(entry.getId())) {
List<Change> changes = entry.getChanges();
changes.add(new Change(analysisNumber, ChangeType.CHANGED, OffsetDateTime.now()));
var newEntry = addedEntryIds.get(entry.getId());
newEntry.setChanges(changes);
addedEntryIds.put(entry.getId(), newEntry);
toRemove.add(entry);
} else if (removedIds.contains(entry.getId())) {
entry.getChanges().add(new Change(analysisNumber, ChangeType.REMOVED, OffsetDateTime.now()));
} else if (addedEntryIds.containsKey(entry.getId())) {
List<Change> changes = entry.getChanges();
changes.add(new Change(analysisNumber, ChangeType.ADDED, OffsetDateTime.now()));
var newEntry = addedEntryIds.get(entry.getId());
newEntry.setChanges(changes);
addedEntryIds.put(entry.getId(), newEntry);
toRemove.add(entry);
}
});
newRedactionLogEntries.removeAll(toRemove);
addedEntryIds.forEach((k, v) -> {
if (v.getChanges().isEmpty()) {
v.getChanges().add(new Change(analysisNumber, ChangeType.ADDED, OffsetDateTime.now()));
}
newRedactionLogEntries.add(v);
});
currentRedactionLog.setRedactionLogEntry(newRedactionLogEntries);
log.debug("Change computation took: {}", System.currentTimeMillis() - start);
return new RedactionLogChanges(currentRedactionLog, !addedEntryIds.isEmpty() || !removedIds.isEmpty());
}
private static Set<String> getEntryIdsThatExistInPreviousButNotInCurrentRedactionLog(RedactionLog currentRedactionLog, List<RedactionLogEntry> previouslyExistingEntries) {
Set<RedactionLogEntry> removed = new HashSet<>(previouslyExistingEntries);
currentRedactionLog.getRedactionLogEntry().forEach(removed::remove);
Set<String> removedIds = removed.stream().map(RedactionLogEntry::getId).collect(Collectors.toSet());
return removedIds;
}
private static Map<String, RedactionLogEntry> getEntriesThatExistInCurrentButNotInPreviousRedactionLog(RedactionLog currentRedactionLog,
List<RedactionLogEntry> previouslyExistingEntries) {
Set<RedactionLogEntry> currentExistingEntries = currentRedactionLog.getRedactionLogEntry()
.stream()
.filter(entry -> (entry.getChanges().isEmpty() || !entry.lastChangeIsRemoved()) && !isLastManualChangeRemove(entry))
.collect(Collectors.toSet());
previouslyExistingEntries.forEach(currentExistingEntries::remove);
Map<String, RedactionLogEntry> addedIds = new HashMap<>();
currentExistingEntries.forEach(entry -> {
addedIds.put(entry.getId(), entry);
});
return addedIds;
}
private static boolean isLastManualChangeRemove(RedactionLogEntry redactionLogEntry){
if(redactionLogEntry.getManualChanges() == null || redactionLogEntry.getManualChanges().isEmpty()){
return false;
}
return redactionLogEntry.getManualChanges().get(redactionLogEntry.getManualChanges().size() -1).getManualRedactionType() == ManualRedactionType.REMOVE_LOCALLY;
}
}

View File

@ -1,260 +0,0 @@
package com.iqser.red.service.redaction.v1.server.service;
import java.awt.geom.Rectangle2D;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.AnnotationStatus;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.Engine;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.ManualChange;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.ManualRedactionType;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.Point;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.Rectangle;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.RedactionLogEntry;
import com.iqser.red.service.redaction.v1.server.model.PrecursorEntity;
import com.iqser.red.service.redaction.v1.server.model.document.entity.EntityType;
import com.iqser.red.service.redaction.v1.server.model.document.entity.IEntity;
import com.iqser.red.service.redaction.v1.server.model.document.entity.ManualChangeOverwrite;
import com.iqser.red.service.redaction.v1.server.model.document.entity.PositionOnPage;
import com.iqser.red.service.redaction.v1.server.model.document.entity.TextEntity;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.Document;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.Image;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.ImageType;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Deprecated(forRemoval = true)
@Service
@Slf4j
@RequiredArgsConstructor
public class RedactionLogCreatorService {
private final DictionaryService dictionaryService;
private final ManualChangeFactory manualChangeFactory;
public List<RedactionLogEntry> createRedactionLog(Document document, String dossierTemplateId, List<PrecursorEntity> notFoundManualRedactionEntries) {
List<RedactionLogEntry> entries = new ArrayList<>();
Set<String> processIds = new HashSet<>();
document.getEntities()
.stream()
.filter(entity -> !entity.getValue().isEmpty())
.filter(RedactionLogCreatorService::notFalsePositiveOrFalseRecommendation)
.filter(IEntity::active)
.forEach(entityNode -> entries.addAll(toRedactionLogEntries(entityNode, dossierTemplateId, processIds)));
document.streamAllImages().filter(image -> !image.removed()).forEach(imageNode -> entries.add(createRedactionLogEntry(imageNode, dossierTemplateId)));
notFoundManualRedactionEntries.forEach(entityIdentifier -> entries.add(createRedactionLogEntry(entityIdentifier, dossierTemplateId)));
return entries;
}
private static boolean notFalsePositiveOrFalseRecommendation(TextEntity textEntity) {
return !(textEntity.getEntityType() == EntityType.FALSE_POSITIVE || textEntity.getEntityType() == EntityType.FALSE_RECOMMENDATION);
}
private List<RedactionLogEntry> toRedactionLogEntries(TextEntity textEntity, String dossierTemplateId, Set<String> processedIds) {
List<RedactionLogEntry> redactionLogEntities = new ArrayList<>();
for (PositionOnPage positionOnPage : textEntity.getPositionsOnPagePerPage()) {
// Duplicates should be removed. They might exist due to table extraction duplicating cells spanning multiple columns/rows.
if (processedIds.contains(positionOnPage.getId())) {
continue;
}
processedIds.add(positionOnPage.getId());
RedactionLogEntry redactionLogEntry = createRedactionLogEntry(textEntity, dossierTemplateId);
redactionLogEntry.setId(positionOnPage.getId());
List<Rectangle> rectanglesPerLine = positionOnPage.getRectanglePerLine()
.stream()
.map(rectangle2D -> toRedactionLogRectangle(rectangle2D, positionOnPage.getPage().getNumber()))
.toList();
redactionLogEntry.setPositions(rectanglesPerLine);
redactionLogEntities.add(redactionLogEntry);
}
return redactionLogEntities;
}
private RedactionLogEntry createRedactionLogEntry(TextEntity entity, String dossierTemplateId) {
Set<String> referenceIds = new HashSet<>();
entity.references().stream().filter(TextEntity::active).forEach(ref -> ref.getPositionsOnPagePerPage().forEach(pos -> referenceIds.add(pos.getId())));
int sectionNumber = entity.getDeepestFullyContainingNode().getTreeId().isEmpty() ? 0 : entity.getDeepestFullyContainingNode().getTreeId().get(0);
boolean isHint = isHint(entity.getEntityType());
return RedactionLogEntry.builder()
.color(getColor(entity.type(), dossierTemplateId, entity.applied(), isHint))
.reason(entity.buildReasonWithManualChangeDescriptions())
.legalBasis(entity.legalBasis())
.value(entity.getManualOverwrite().getValue().orElse(entity.getMatchedRule().isWriteValueWithLineBreaks() ? entity.getValueWithLineBreaks() : entity.getValue()))
.type(entity.type())
.redacted(entity.applied())
.isHint(isHint)
.isRecommendation(entity.getEntityType().equals(EntityType.RECOMMENDATION))
.isFalsePositive(entity.getEntityType().equals(EntityType.FALSE_POSITIVE) || entity.getEntityType().equals(EntityType.FALSE_RECOMMENDATION))
.section(entity.getManualOverwrite().getSection().orElse(entity.getDeepestFullyContainingNode().toString()))
.sectionNumber(sectionNumber)
.matchedRule(entity.getMatchedRule().getRuleIdentifier().toString())
.isDictionaryEntry(entity.isDictionaryEntry())
.textAfter(entity.getTextAfter())
.textBefore(entity.getTextBefore())
.startOffset(entity.getTextRange().start())
.endOffset(entity.getTextRange().end())
.isDossierDictionaryEntry(entity.isDossierDictionaryEntry())
.engines(getEngines(entity))
.reference(referenceIds)
.manualChanges(mapManualChanges(entity.getManualOverwrite(), isHint))
.build();
}
private Set<Engine> getEngines(TextEntity entity) {
return entity.getEngines() != null ? mapToEngines(entity.getEngines()) : Collections.emptySet();
}
private Set<Engine> mapToEngines(Set<com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.Engine> engines) {
return engines.stream().map(engine -> switch (engine) {
case NER -> Engine.NER;
case RULE -> Engine.RULE;
case DICTIONARY -> Engine.DICTIONARY;
default -> null;
}).filter(Objects::nonNull).collect(Collectors.toSet());
}
private boolean isHint(EntityType entityType) {
return entityType.equals(EntityType.HINT);
}
private List<ManualChange> mapManualChanges(ManualChangeOverwrite manualEntity, boolean isHint) {
return manualChangeFactory.toManualChangeList(manualEntity.getManualChangeLog(), isHint).stream().map(this::mapManualChange).toList();
}
private ManualChange mapManualChange(com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.ManualChange manualChange) {
ManualChange manualChange1 = new ManualChange();
BeanUtils.copyProperties(manualChange, manualChange1);
manualChange1.setManualRedactionType(switch (manualChange.getManualRedactionType()) {
case ADD_LOCALLY -> ManualRedactionType.ADD_LOCALLY;
case ADD_TO_DICTIONARY -> ManualRedactionType.ADD_TO_DICTIONARY;
case REMOVE_LOCALLY -> ManualRedactionType.REMOVE_LOCALLY;
case REMOVE_FROM_DICTIONARY -> ManualRedactionType.REMOVE_FROM_DICTIONARY;
case FORCE_REDACT -> ManualRedactionType.FORCE_REDACT;
case FORCE_HINT -> ManualRedactionType.FORCE_HINT;
case RECATEGORIZE -> ManualRedactionType.RECATEGORIZE;
case LEGAL_BASIS_CHANGE -> ManualRedactionType.LEGAL_BASIS_CHANGE;
case RESIZE -> ManualRedactionType.RESIZE;
case RESIZE_IN_DICTIONARY -> ManualRedactionType.RESIZE;
});
manualChange1.setProcessedDate(manualChange.getProcessedDate());
manualChange1.setRequestedDate(manualChange.getRequestedDate());
manualChange1.setPropertyChanges(manualChange.getPropertyChanges());
manualChange1.setAnnotationStatus(AnnotationStatus.APPROVED);
return manualChange1;
}
private float[] getColor(String type, String dossierTemplateId, boolean isRedaction, boolean isHint) {
if (!isRedaction && isHint) {
return dictionaryService.getNotRedactedColor(dossierTemplateId);
}
return dictionaryService.getColor(type, dossierTemplateId);
}
public RedactionLogEntry createRedactionLogEntry(Image image, String dossierTemplateId) {
String imageType = image.getImageType().equals(ImageType.OTHER) ? "image" : image.getImageType().toString().toLowerCase(Locale.ENGLISH);
boolean isHint = dictionaryService.isHint(imageType, dossierTemplateId);
return RedactionLogEntry.builder()
.id(image.getId())
.color(getColor(imageType, dossierTemplateId, image.applied(), isHint))
.isImage(true)
.value(image.value())
.type(imageType)
.redacted(image.applied())
.reason(image.buildReasonWithManualChangeDescriptions())
.legalBasis(image.legalBasis())
.matchedRule(image.getMatchedRule().getRuleIdentifier().toString())
.isHint(isHint)
.isDictionaryEntry(false)
.isRecommendation(false)
.positions(List.of(toRedactionLogRectangle(image.getPosition(), image.getPage().getNumber())))
.sectionNumber(image.getTreeId().get(0))
.section(image.getManualOverwrite().getSection().orElse(image.getParent().toString()))
.imageHasTransparency(image.isTransparent())
.manualChanges(mapManualChanges(image.getManualOverwrite(), isHint))
.build();
}
private Rectangle toRedactionLogRectangle(Rectangle2D rectangle2D, int pageNumber) {
return new Rectangle(new Point((float) rectangle2D.getMinX(), (float) rectangle2D.getMinY()), (float) rectangle2D.getWidth(), (float) rectangle2D.getHeight(), pageNumber);
}
public RedactionLogEntry createRedactionLogEntry(PrecursorEntity precursorEntity, String dossierTemplateId) {
String type = precursorEntity.getManualOverwrite().getType().orElse(precursorEntity.getType());
boolean isHint = isHint(precursorEntity.getEntityType());
return RedactionLogEntry.builder()
.id(precursorEntity.getId())
.color(getColor(type, dossierTemplateId, precursorEntity.applied(), isHint))
.reason(precursorEntity.buildReasonWithManualChangeDescriptions())
.legalBasis(precursorEntity.legalBasis())
.value(precursorEntity.value())
.type(type)
.redacted(precursorEntity.applied())
.isHint(isHint)
.isRecommendation(precursorEntity.getEntityType().equals(EntityType.RECOMMENDATION))
.isFalsePositive(precursorEntity.getEntityType().equals(EntityType.FALSE_POSITIVE) || precursorEntity.getEntityType().equals(EntityType.FALSE_RECOMMENDATION))
.section(precursorEntity.getManualOverwrite().getSection().orElse(precursorEntity.getSection()))
.sectionNumber(0)
.matchedRule(precursorEntity.getMatchedRule().getRuleIdentifier().toString())
.rectangle(precursorEntity.isRectangle())
.isDictionaryEntry(precursorEntity.isDictionaryEntry())
.isDossierDictionaryEntry(precursorEntity.isDossierDictionaryEntry())
.textAfter("")
.textBefore("")
.startOffset(-1)
.endOffset(-1)
.positions(precursorEntity.getManualOverwrite()
.getPositions()
.orElse(precursorEntity.getEntityPosition())
.stream()
.map(entityPosition -> toRedactionLogRectangle(entityPosition.rectangle2D(), entityPosition.pageNumber()))
.toList())
.engines(Collections.emptySet())
.reference(Collections.emptySet())
.manualChanges(mapManualChanges(precursorEntity.getManualOverwrite(), isHint))
.build();
}
}

View File

@ -4,9 +4,11 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.when;
import java.awt.geom.Rectangle2D;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
import java.util.LinkedList;
@ -46,6 +48,8 @@ import com.iqser.red.service.redaction.v1.server.model.MigratedEntityLog;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.Document;
import com.iqser.red.service.redaction.v1.server.redaction.utils.OsUtils;
import com.iqser.red.service.redaction.v1.server.service.DictionaryService;
import com.iqser.red.service.redaction.v1.server.service.document.EntityFindingUtility;
import com.iqser.red.service.redaction.v1.server.utils.RectangleTransformations;
import com.knecon.fforesight.tenantcommons.TenantContext;
import lombok.SneakyThrows;
@ -83,15 +87,15 @@ public class MigrationIntegrationTest extends BuildDocumentIntegrationTest {
when(dictionaryClient.getVersion(TEST_DOSSIER_TEMPLATE_ID)).thenReturn(0L);
when(dictionaryClient.getAllTypesForDossier(TEST_DOSSIER_ID, true)).thenReturn(List.of(Type.builder()
.id(DOSSIER_REDACTIONS_INDICATOR + ":" + TEST_DOSSIER_TEMPLATE_ID)
.type(DOSSIER_REDACTIONS_INDICATOR)
.dossierTemplateId(TEST_DOSSIER_ID)
.hexColor("#ffe187")
.isHint(hintTypeMap.get(DOSSIER_REDACTIONS_INDICATOR))
.isCaseInsensitive(caseInSensitiveMap.get(DOSSIER_REDACTIONS_INDICATOR))
.isRecommendation(recommendationTypeMap.get(DOSSIER_REDACTIONS_INDICATOR))
.rank(rankTypeMap.get(DOSSIER_REDACTIONS_INDICATOR))
.build()));
.id(DOSSIER_REDACTIONS_INDICATOR + ":" + TEST_DOSSIER_TEMPLATE_ID)
.type(DOSSIER_REDACTIONS_INDICATOR)
.dossierTemplateId(TEST_DOSSIER_ID)
.hexColor("#ffe187")
.isHint(hintTypeMap.get(DOSSIER_REDACTIONS_INDICATOR))
.isCaseInsensitive(caseInSensitiveMap.get(DOSSIER_REDACTIONS_INDICATOR))
.isRecommendation(recommendationTypeMap.get(DOSSIER_REDACTIONS_INDICATOR))
.rank(rankTypeMap.get(DOSSIER_REDACTIONS_INDICATOR))
.build()));
mockDictionaryCalls(null);
@ -120,12 +124,13 @@ public class MigrationIntegrationTest extends BuildDocumentIntegrationTest {
@SneakyThrows
public void testMigration() {
String filesPrefix = "files/migration/178ee8cd99fe786e03fad50d51a69ad3";
String filesPrefix = "files/migration/test";
String fileName = filesPrefix + ".ORIGIN.pdf";
String imageFileName = filesPrefix + ".IMAGE_INFO.json";
String tableFileName = filesPrefix + ".TABLES.json";
String manualChangesFileName = filesPrefix + ".MANUAL_CHANGES.json";
String migratedIdsFileName = filesPrefix + ".MIGRATED_IDS.json";
Document document = buildGraph(fileName, imageFileName, tableFileName);
RedactionLog redactionLog;
@ -137,8 +142,8 @@ public class MigrationIntegrationTest extends BuildDocumentIntegrationTest {
redactionLog = mapper.readValue(in, RedactionLog.class);
}
var manualChangesResource = new ClassPathResource(manualChangesFileName);
ManualRedactions manualRedactions;
if (manualChangesResource.exists()) {
ManualRedactions manualRedactions;
try (var in = manualChangesResource.getInputStream()) {
manualRedactions = mapper.readValue(in, ManualRedactions.class);
if (manualRedactions.getEntriesToAdd() == null) {
@ -160,12 +165,15 @@ public class MigrationIntegrationTest extends BuildDocumentIntegrationTest {
manualRedactions.setResizeRedactions(Collections.emptySet());
}
}
mergedRedactionLog = legacyRedactionLogMergeService.mergeManualChanges(redactionLog, manualRedactions, TEST_DOSSIER_TEMPLATE_ID);
MigratedIds migratedIds = getMigratedIds(migratedIdsFileName);
revertMigration(manualRedactions, migratedIds);
mergedRedactionLog = legacyRedactionLogMergeService.addManualAddEntriesAndRemoveSkippedImported(redactionLog, manualRedactions, TEST_DOSSIER_TEMPLATE_ID);
} else {
manualRedactions = new ManualRedactions();
mergedRedactionLog = redactionLog;
}
MigratedEntityLog migratedEntityLog = redactionLogToEntityLogMigrationService.migrate(mergedRedactionLog, document, TEST_DOSSIER_TEMPLATE_ID);
MigratedEntityLog migratedEntityLog = redactionLogToEntityLogMigrationService.migrate(mergedRedactionLog, document, TEST_DOSSIER_TEMPLATE_ID, manualRedactions);
redactionStorageService.storeObject(TEST_DOSSIER_ID, TEST_FILE_ID, FileType.ENTITY_LOG, migratedEntityLog.getEntityLog());
assertEquals(mergedRedactionLog.getRedactionLogEntry().size(), migratedEntityLog.getEntityLog().getEntityLogEntry().size());
@ -181,7 +189,8 @@ public class MigrationIntegrationTest extends BuildDocumentIntegrationTest {
Map<String, String> migratedIds = migratedEntityLog.getMigratedIds().buildOldToNewMapping();
migratedIds.forEach((oldId, newId) -> assertEntryIsEqual(oldId, newId, mergedRedactionLog, entityLog, migratedIds));
AnnotateResponse annotateResponse = annotationService.annotate(AnnotateRequest.builder().dossierId(TEST_DOSSIER_ID).fileId(TEST_FILE_ID).build());
AnnotateResponse annotateResponse = annotationService.annotate(AnnotateRequest.builder().dossierId(TEST_DOSSIER_ID).fileId(TEST_FILE_ID)
.build());
File outputFile = Path.of(OsUtils.getTemporaryDirectory()).resolve(Path.of(fileName.replaceAll(".pdf", "_MIGRATED.pdf")).getFileName()).toFile();
try (FileOutputStream fileOutputStream = new FileOutputStream(outputFile)) {
@ -190,6 +199,53 @@ public class MigrationIntegrationTest extends BuildDocumentIntegrationTest {
}
private void revertMigration(ManualRedactions manualRedactions, MigratedIds migratedIds) {
var mapping = migratedIds.buildNewToOldMapping();
manualRedactions.getEntriesToAdd()
.stream()
.filter(e -> mapping.containsKey(e.getAnnotationId()))
.forEach(e -> e.setAnnotationId(mapping.get(e.getAnnotationId())));
manualRedactions.getRecategorizations()
.stream()
.filter(e -> mapping.containsKey(e.getAnnotationId()))
.forEach(e -> e.setAnnotationId(mapping.get(e.getAnnotationId())));
manualRedactions.getResizeRedactions()
.stream()
.filter(e -> mapping.containsKey(e.getAnnotationId()))
.forEach(e -> e.setAnnotationId(mapping.get(e.getAnnotationId())));
manualRedactions.getIdsToRemove()
.stream()
.filter(e -> mapping.containsKey(e.getAnnotationId()))
.forEach(e -> e.setAnnotationId(mapping.get(e.getAnnotationId())));
manualRedactions.getLegalBasisChanges()
.stream()
.filter(e -> mapping.containsKey(e.getAnnotationId()))
.forEach(e -> e.setAnnotationId(mapping.get(e.getAnnotationId())));
manualRedactions.getForceRedactions()
.stream()
.filter(e -> mapping.containsKey(e.getAnnotationId()))
.forEach(e -> e.setAnnotationId(mapping.get(e.getAnnotationId())));
}
private MigratedIds getMigratedIds(String migratedIdsFileName) throws IOException {
var migratedIdsResource = new ClassPathResource(migratedIdsFileName);
if (!migratedIdsResource.exists()) {
return new MigratedIds();
}
try (var in = migratedIdsResource.getInputStream()) {
return mapper.readValue(in, MigratedIds.class);
}
}
private static boolean hasManualChanges(RedactionLogEntry entry) {
return !entry.getManualChanges().isEmpty() || (entry.getComments() != null && !entry.getComments().isEmpty());
@ -198,54 +254,63 @@ public class MigrationIntegrationTest extends BuildDocumentIntegrationTest {
private void assertEntryIsEqual(String oldId, String newId, RedactionLog redactionLog, EntityLog entityLog, Map<String, String> oldToNewMapping) {
RedactionLogEntry redactionLogEntry = redactionLog.getRedactionLogEntry().stream().filter(entry -> entry.getId().equals(oldId)).findAny().orElseThrow();
EntityLogEntry entityLogEntry = entityLog.getEntityLogEntry().stream().filter(entry -> entry.getId().equals(newId)).findAny().orElseThrow();
RedactionLogEntry redactionLogEntry = redactionLog.getRedactionLogEntry()
.stream()
.filter(entry -> entry.getId().equals(oldId))
.findAny()
.orElseThrow();
EntityLogEntry entityLogEntry = entityLog.getEntityLogEntry()
.stream()
.filter(entry -> entry.getId().equals(newId))
.findAny()
.orElseThrow();
if (!redactionLogEntry.isImage()) {
assertEquals(redactionLogEntry.getValue().toLowerCase(Locale.ENGLISH), entityLogEntry.getValue().toLowerCase(Locale.ENGLISH));
}
assertEquals(redactionLogEntry.getChanges().size(), entityLogEntry.getChanges().size());
assertEquals(redactionLogEntry.getManualChanges().size(), entityLogEntry.getManualChanges().size());
assertTrue(redactionLogEntry.getManualChanges().size() <= entityLogEntry.getManualChanges().size());
assertEquals(redactionLogEntry.getPositions().size(), entityLogEntry.getPositions().size());
// assertTrue(positionsAlmostEqual(redactionLogEntry.getPositions(), entityLogEntry.getPositions()));
assertEquals(redactionLogEntry.getColor(), entityLogEntry.getColor());
assertTrue(positionsAlmostEqual(redactionLogEntry.getPositions(), entityLogEntry.getPositions()));
// assertEquals(redactionLogEntry.getColor(), entityLogEntry.getColor());
assertEqualsNullSafe(redactionLogEntry.getLegalBasis(), entityLogEntry.getLegalBasis());
assertEqualsNullSafe(redactionLogEntry.getReason(), entityLogEntry.getReason());
// assertEqualsNullSafe(redactionLogEntry.getReason(), entityLogEntry.getReason());
assertReferencesEqual(redactionLogEntry.getReference(), entityLogEntry.getReference(), oldToNewMapping);
assertEquals(redactionLogEntry.isDictionaryEntry(), entityLogEntry.isDictionaryEntry());
assertEquals(redactionLogEntry.isDossierDictionaryEntry(), entityLogEntry.isDossierDictionaryEntry());
if (redactionLogEntry.getEngines() == null) {
assertTrue(entityLogEntry.getEngines().isEmpty());
} else {
assertEquals(redactionLogEntry.getEngines().stream().map(Enum::name).collect(Collectors.toSet()),
entityLogEntry.getEngines().stream().map(Enum::name).collect(Collectors.toSet()));
assertEquals(redactionLogEntry.getEngines()
.stream()
.map(Enum::name)
.collect(Collectors.toSet()),
entityLogEntry.getEngines()
.stream()
.map(Enum::name)
.collect(Collectors.toSet()));
}
}
private boolean positionsAlmostEqual(List<Rectangle> positions1, List<Position> positions2) {
double tolerance = 3;
double tolerance = 10;
double averageDistance = 0;
for (int i = 0; i < positions1.size(); i++) {
Rectangle p1 = positions1.get(0);
Position p2 = positions2.get(0);
if (p1.getPage() != p2.getPageNumber()) {
return false;
}
if (Math.abs(p1.getHeight() - p2.h()) > tolerance) {
return false;
}
if (Math.abs(p1.getWidth() - p2.w()) > tolerance) {
return false;
}
if (Math.abs(p1.getTopLeft().getX() - p2.x()) > tolerance) {
return false;
}
if (Math.abs(p1.getTopLeft().getY() - p2.y()) > tolerance) {
Rectangle r1 = positions1.get(i);
Rectangle2D p1 = new Rectangle2D.Double(r1.getTopLeft().getX(), r1.getTopLeft().getY(), r1.getWidth(), r1.getHeight());
Position p2 = positions2.get(i);
double distance = EntityFindingUtility.calculateDistance(p1, p2.toRectangle2D());
if (r1.getPage() != p2.getPageNumber()) {
return false;
}
averageDistance += distance;
}
return true;
averageDistance /= positions1.size();
return averageDistance <= tolerance;
}
@ -271,7 +336,9 @@ public class MigrationIntegrationTest extends BuildDocumentIntegrationTest {
return;
}
assertEquals(reference.stream().map(oldToNewMapping::get).collect(Collectors.toSet()), reference1);
assertEquals(reference.stream()
.map(oldToNewMapping::get)
.collect(Collectors.toSet()), reference1);
}

View File

@ -63,8 +63,8 @@ public class AnnotationService {
public AnnotateResponse annotate(AnnotateRequest annotateRequest) {
var storedObjectFile = redactionStorageService.getStoredObjectFile(RedactionStorageService.StorageIdUtils.getStorageId(annotateRequest.getDossierId(),
annotateRequest.getFileId(),
FileType.VIEWER_DOCUMENT));
annotateRequest.getFileId(),
FileType.VIEWER_DOCUMENT));
var entityLog = redactionStorageService.getEntityLog(annotateRequest.getDossierId(), annotateRequest.getFileId());
@ -79,7 +79,8 @@ public class AnnotationService {
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) {
pdDocument.save(byteArrayOutputStream);
return AnnotateResponse.builder().document(byteArrayOutputStream.toByteArray()).build();
return AnnotateResponse.builder().document(byteArrayOutputStream.toByteArray())
.build();
}
}
}
@ -137,7 +138,10 @@ public class AnnotationService {
List<PDAnnotation> annotations = new ArrayList<>();
List<Position> rectangles = redactionLogEntry.getPositions().stream().filter(pos -> pos.getPageNumber() == page).collect(Collectors.toList());
List<Position> rectangles = redactionLogEntry.getPositions()
.stream()
.filter(pos -> pos.getPageNumber() == page)
.collect(Collectors.toList());
if (rectangles.isEmpty()) {
return annotations;
@ -154,22 +158,25 @@ public class AnnotationService {
float[] color;
if (redactionLogEntry.getEntryType().equals(EntryType.RECOMMENDATION)) {
color = new float[]{0, 0.8f, 0};
} else if ((redactionLogEntry.getEntryType().equals(EntryType.ENTITY) || redactionLogEntry.getEntryType().equals(EntryType.IMAGE)) &&//
redactionLogEntry.getState().equals(EntryState.APPLIED)) {
} else if ((redactionLogEntry.getEntryType().equals(EntryType.ENTITY) || redactionLogEntry.getEntryType().equals(EntryType.IMAGE)) //
&& redactionLogEntry.getState().equals(EntryState.APPLIED)) {
color = new float[]{0.5764706f, 0.59607846f, 0.627451f};
} else if ((redactionLogEntry.getEntryType().equals(EntryType.ENTITY) || redactionLogEntry.getEntryType().equals(EntryType.IMAGE)) &&//
redactionLogEntry.getState().equals(EntryState.SKIPPED)) {
} else if ((redactionLogEntry.getEntryType().equals(EntryType.ENTITY) || redactionLogEntry.getEntryType().equals(EntryType.IMAGE)) //
&& (redactionLogEntry.getState().equals(EntryState.SKIPPED) || redactionLogEntry.getState().equals(EntryState.IGNORED))) {
color = new float[]{0.76862746f, 0.59607846f, 0.98039216f};
} else if (redactionLogEntry.getEntryType().equals(EntryType.HINT) && redactionLogEntry.getState().equals(EntryState.SKIPPED) && redactionLogEntry.getType().equals("published_information")) {
} else if (redactionLogEntry.getEntryType().equals(EntryType.HINT) && redactionLogEntry.getState().equals(EntryState.SKIPPED) && redactionLogEntry.getType()
.equals("published_information")) {
color = new float[]{0.52156866f, 0.92156863f, 1.0f};
} else if (redactionLogEntry.getEntryType().equals(EntryType.HINT) && redactionLogEntry.getState().equals(EntryState.SKIPPED)) {
color = new float[]{0.98039216f, 0.59607846f, 0.96862745f};
} else if (redactionLogEntry.getEntryType().equals(EntryType.HINT) && redactionLogEntry.getState().equals(EntryState.IGNORED)) {
color = new float[]{0.76862746f, 0.59607846f, 0.98039216f};
} else if (redactionLogEntry.getState().equals(EntryState.APPLIED)){
color = Optional.ofNullable(redactionLogEntry.getColor()).orElse(new float[]{0.5764706f, 0.59607846f, 0.627451f});
} else if (redactionLogEntry.getState().equals(EntryState.APPLIED)) {
color = Optional.ofNullable(redactionLogEntry.getColor())
.orElse(new float[]{0.5764706f, 0.59607846f, 0.627451f});
} else {
color = Optional.ofNullable(redactionLogEntry.getColor()).orElse(new float[]{0.76862746f, 0.59607846f, 0.98039216f});
color = Optional.ofNullable(redactionLogEntry.getColor())
.orElse(new float[]{0.76862746f, 0.59607846f, 0.98039216f});
}
annotation.setColor(new PDColor(color, PDDeviceRGB.INSTANCE));
annotation.setNoRotate(false);
@ -194,7 +201,10 @@ public class AnnotationService {
public static List<Double> toQuadPoints(List<Position> rectangles) {
return rectangles.stream().map(Position::toRectangle2D).flatMap(AnnotationService::toQuadPoints).toList();
return rectangles.stream()
.map(Position::toRectangle2D)
.flatMap(AnnotationService::toQuadPoints)
.toList();
}
@ -218,12 +228,20 @@ public class AnnotationService {
private String createAnnotationContent(EntityLogEntry redactionLogEntry) {
return redactionLogEntry.getType() +//
" \nRule " + redactionLogEntry.getMatchedRule() +//
" matched\n\n" + redactionLogEntry.getReason() +//
"\n\nLegal basis:" + redactionLogEntry.getLegalBasis() +//
"\n\nIn section: \"" + redactionLogEntry.getSection() + "\"" +//
"\n\nChanges: " + redactionLogEntry.getChanges().stream().map(Change::getType).map(Enum::name).collect(Collectors.joining("\n")) +//
"\n\nManualChanges: " + redactionLogEntry.getManualChanges().stream().map(ManualChange::getManualRedactionType).map(Enum::name).collect(Collectors.joining("\n"));
" \nRule " + redactionLogEntry.getMatchedRule() +//
" matched\n\n" + redactionLogEntry.getReason() +//
"\n\nLegal basis:" + redactionLogEntry.getLegalBasis() +//
"\n\nIn section: \"" + redactionLogEntry.getSection() + "\"" +//
"\n\nChanges: " + redactionLogEntry.getChanges()
.stream()
.map(Change::getType)
.map(Enum::name)
.collect(Collectors.joining("\n")) +//
"\n\nManualChanges: " + redactionLogEntry.getManualChanges()
.stream()
.map(ManualChange::getManualRedactionType)
.map(Enum::name)
.collect(Collectors.joining("\n"));
}
}

View File

@ -1,491 +0,0 @@
{
"idsToRemove": [
{
"annotationId": "6808af23a9652917b73c1939b481f3e4",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:11:06.468874Z",
"processedDate": "2024-01-26T10:11:06.555Z",
"softDeletedTime": null,
"removeFromDictionary": false,
"approved": true
},
{
"annotationId": "5316fff0ec9ae3f2773378f3cc833079",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:09:16.092673Z",
"processedDate": "2024-01-26T10:09:16.172Z",
"softDeletedTime": null,
"removeFromDictionary": false,
"approved": true
},
{
"annotationId": "01fcfb3581723a4d164b4e5ed9e7db90",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:09:05.824116Z",
"processedDate": "2024-01-26T10:09:05.885Z",
"softDeletedTime": null,
"removeFromDictionary": false,
"approved": true
},
{
"annotationId": "ad5c82acae51a8af70c9141e1ae4efde",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:09:01.121129Z",
"processedDate": "2024-01-26T10:09:01.178Z",
"softDeletedTime": null,
"removeFromDictionary": false,
"approved": true
},
{
"annotationId": "1ec000364ca48d676af7e33be13685e4",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:10:49.38646Z",
"processedDate": "2024-01-26T10:10:49.429Z",
"softDeletedTime": null,
"removeFromDictionary": false,
"approved": true
}
],
"entriesToAdd": [
{
"annotationId": "664d4f36dfb9549d1ba11d70c63274e1",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:08:20.832Z",
"processedDate": "2024-01-26T10:08:20.832Z",
"softDeletedTime": null,
"type": "manual",
"value": "CBI.0.1",
"reason": "personal data (names and addresses) of individuals involved in testing on vertebrate studies or in obtaining toxicological information",
"legalBasis": "Article 39(e)(2) of Regulation (EC) No 178/2002",
"section": null,
"rectangle": false,
"addToDictionary": false,
"addToDossierDictionary": false,
"positions": [
{
"topLeftX": 216.38,
"topLeftY": 597.27,
"width": 31.09,
"height": 12.83,
"page": 1
}
],
"textBefore": null,
"textAfter": null,
"sourceId": null,
"approved": true
},
{
"annotationId": "e3d199546dd63db9746adbe0f71a9526",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:12:06.458Z",
"processedDate": "2024-01-26T10:12:07.031Z",
"softDeletedTime": null,
"type": "PII",
"value": "the six desire",
"reason": "Dictionary Request",
"legalBasis": null,
"section": null,
"rectangle": false,
"addToDictionary": true,
"addToDossierDictionary": false,
"positions": [
{
"topLeftX": 112.86,
"topLeftY": 590.97,
"width": 52.68,
"height": 12.84,
"page": 19
}
],
"textBefore": null,
"textAfter": null,
"sourceId": null,
"approved": true
},
{
"annotationId": "b1a86e5984e7e9a7434f1bbdfc983228",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:07:50.175Z",
"processedDate": "2024-01-26T10:07:50.175Z",
"softDeletedTime": null,
"type": "manual",
"value": "my non-readable content",
"reason": "(Regulations (EU) 2016/679 and (EU) 2018/1725 shall apply to the processing of personal data carried out pursuant to this Regulation. Any personal data made public pursuant to Article 38 of this Regulation and this Article shall only be used to ensure the transparency of the risk assessment under this Regulation and shall not be further processed in a manner that is incompatible with these purposes, in accordance with point (b) of Article 5(1) of Regulation (EU) 2016/679 and point (b) of Article 4(1) of Regulation (EU) 2018/1725, as the case may be)",
"legalBasis": "Article 39(e)(3) of Regulation (EC) No 178/2002",
"section": "my Paragraph",
"rectangle": true,
"addToDictionary": false,
"addToDossierDictionary": false,
"positions": [
{
"topLeftX": 305.67,
"topLeftY": 591.4,
"width": 189.85,
"height": 167.17,
"page": 1
}
],
"textBefore": null,
"textAfter": null,
"sourceId": null,
"approved": true
},
{
"annotationId": "8572ab8899313b10f02a3cc9d97d7240",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:07:58.713Z",
"processedDate": "2024-01-26T10:07:58.713Z",
"softDeletedTime": null,
"type": "manual",
"value": "Rule",
"reason": "personal data (names and addresses) of individuals involved in testing on vertebrate studies or in obtaining toxicological information",
"legalBasis": "Article 39(e)(2) of Regulation (EC) No 178/2002",
"section": null,
"rectangle": false,
"addToDictionary": false,
"addToDossierDictionary": false,
"positions": [
{
"topLeftX": 59.8,
"topLeftY": 687.552,
"width": 31.968,
"height": 20.96,
"page": 1
}
],
"textBefore": null,
"textAfter": null,
"sourceId": null,
"approved": true
},
{
"annotationId": "dbace52ac2ad8aaf7ff207001e256619",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:08:08.766Z",
"processedDate": "2024-01-26T10:08:08.766Z",
"softDeletedTime": null,
"type": "manual",
"value": "CBI.0.0/1: Redact CBI Authors",
"reason": "personal data (names and addresses) of individuals involved in testing on vertebrate studies or in obtaining toxicological information",
"legalBasis": "Article 39(e)(2) of Regulation (EC) No 178/2002",
"section": null,
"rectangle": false,
"addToDictionary": false,
"addToDossierDictionary": false,
"positions": [
{
"topLeftX": 56.8,
"topLeftY": 626.5984,
"width": 200.6712,
"height": 19.8669,
"page": 1
}
],
"textBefore": null,
"textAfter": null,
"sourceId": null,
"approved": true
},
{
"annotationId": "e8e40aa8eae970d476c3f58b14334e4f",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:11:57.565Z",
"processedDate": "2024-01-26T10:11:58.184Z",
"softDeletedTime": null,
"type": "CBI_author",
"value": "Pasture he invited mr company shyness. But when shot real her. Chamber her\nobserve visited removal six sending himself boy. At exquisite existence if an oh\ndependent excellent. Are gay head need down draw. Misery wonder enable\nmutual get set oppose the uneasy. End why melancholy estimating her had\nindulgence middletons. Say ferrars demands besides her address. Blind going\nyou merit few fancy their.",
"reason": "Dictionary Request",
"legalBasis": null,
"section": null,
"rectangle": false,
"addToDictionary": true,
"addToDossierDictionary": false,
"positions": [
{
"topLeftX": 56.8,
"topLeftY": 671.87,
"width": 309.71,
"height": 12.84,
"page": 19
},
{
"topLeftX": 56.8,
"topLeftY": 660.27,
"width": 318.24,
"height": 12.84,
"page": 19
},
{
"topLeftX": 56.8,
"topLeftY": 648.77,
"width": 299.68,
"height": 12.84,
"page": 19
},
{
"topLeftX": 56.8,
"topLeftY": 637.17,
"width": 297.4,
"height": 12.84,
"page": 19
},
{
"topLeftX": 56.8,
"topLeftY": 625.67,
"width": 309.46,
"height": 12.84,
"page": 19
},
{
"topLeftX": 56.8,
"topLeftY": 614.07,
"width": 103.53,
"height": 12.84,
"page": 19
}
],
"textBefore": null,
"textAfter": null,
"sourceId": null,
"approved": true
},
{
"annotationId": "da0b8d99861b9720b2155e470b18f21d",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:13:51.881Z",
"processedDate": "2024-01-26T10:13:52.463Z",
"softDeletedTime": null,
"type": "CBI_author",
"value": "Lain",
"reason": "Dictionary Request",
"legalBasis": null,
"section": null,
"rectangle": false,
"addToDictionary": true,
"addToDossierDictionary": false,
"positions": [
{
"topLeftX": 217.76,
"topLeftY": 475.47,
"width": 18.29,
"height": 12.84,
"page": 19
}
],
"textBefore": null,
"textAfter": null,
"sourceId": null,
"approved": true
},
{
"annotationId": "7634eed4ce8269fdecd009b07fc70144",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:09:27.249Z",
"processedDate": "2024-01-26T10:09:27.864Z",
"softDeletedTime": null,
"type": "PII",
"value": "Xinyi Y. Tao Possible incidents",
"reason": "False Positive",
"legalBasis": null,
"section": null,
"rectangle": false,
"addToDictionary": true,
"addToDossierDictionary": false,
"positions": [
{
"topLeftX": 56.8,
"topLeftY": 568.84204,
"width": 60.575993,
"height": -12.641998,
"page": 6
}
],
"textBefore": null,
"textAfter": null,
"sourceId": "3fcff2c472c4cce852815de9cb17a50d",
"approved": true
}
],
"forceRedactions": [
{
"annotationId": "63a758ca88b0e0a6da546b1a8ece7e39",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:10:31.922754Z",
"processedDate": "2024-01-26T10:10:32.38Z",
"softDeletedTime": null,
"legalBasis": "Article 39(e)(2) of Regulation (EC) No 178/2002",
"approved": true
},
{
"annotationId": "44f684045879e7a6decd34baaae0930f",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:11:31.762669Z",
"processedDate": "2024-01-26T10:11:32.194Z",
"softDeletedTime": null,
"legalBasis": "Article 39(e)(2) of Regulation (EC) No 178/2002",
"approved": true
}
],
"imageRecategorization": [
{
"annotationId": "647a450f5feba4859297886a4263b653",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:11:25.055708Z",
"processedDate": "2024-01-26T10:11:25.539Z",
"softDeletedTime": null,
"type": "formula",
"approved": true
}
],
"legalBasisChanges": [],
"resizeRedactions": [
{
"annotationId": "30cb40dc6f495193f1b47d0d557fd682",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:09:38.629636Z",
"processedDate": "2024-01-26T10:09:38.629Z",
"softDeletedTime": null,
"value": "for this project are library@outlook.com",
"positions": [
{
"topLeftX": 248.632,
"topLeftY": 328.464,
"width": 194.58,
"height": 15.408,
"page": 6
}
],
"textBefore": null,
"textAfter": null,
"updateDictionary": null,
"approved": true
},
{
"annotationId": "90129d370d60b07d8cb060400338255e",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:09:24.387046Z",
"processedDate": "2024-01-26T10:09:24.387Z",
"softDeletedTime": null,
"value": "Alfred",
"positions": [
{
"topLeftX": 80.788,
"topLeftY": 568.364,
"width": 31.284,
"height": 15.408,
"page": 6
}
],
"textBefore": null,
"textAfter": null,
"updateDictionary": false,
"approved": true
},
{
"annotationId": "3ed0487b61fdb556c8982450aa39a8db",
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"status": "APPROVED",
"requestDate": "2024-01-26T10:11:03.039254Z",
"processedDate": "2024-01-26T10:11:03.039Z",
"softDeletedTime": null,
"value": "Image",
"positions": [
{
"topLeftX": 287,
"topLeftY": 613,
"width": 288.45,
"height": 85.64,
"page": 14
}
],
"textBefore": null,
"textAfter": null,
"updateDictionary": null,
"approved": true
}
],
"comments": {
"664d4f36dfb9549d1ba11d70c63274e1": [
{
"id": 36,
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"annotationId": "664d4f36dfb9549d1ba11d70c63274e1",
"date": "2024-01-26T10:08:26.837Z",
"text": "a1",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"softDeletedTime": null,
"fileStatus": null
},
{
"id": 38,
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"annotationId": "664d4f36dfb9549d1ba11d70c63274e1",
"date": "2024-01-26T10:08:28.936Z",
"text": "a2",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"softDeletedTime": null,
"fileStatus": null
}
],
"96faa75e974e6634fe534116952650ee": [
{
"id": 40,
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"annotationId": "96faa75e974e6634fe534116952650ee",
"date": "2024-01-26T10:08:45.461Z",
"text": "b1",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"softDeletedTime": null,
"fileStatus": null
},
{
"id": 42,
"fileId": "2018ceba2d83a7de510c66c9f636cfbf",
"annotationId": "96faa75e974e6634fe534116952650ee",
"date": "2024-01-26T10:08:48.416Z",
"text": "b2",
"user": "4f5958e9-6444-4e52-97e4-9a995e54dc6d",
"softDeletedTime": null,
"fileStatus": null
}
]
}
}

View File

@ -0,0 +1,513 @@
{
"idsToRemove": [
{
"removeFromDictionary": false,
"removeFromAllDossiers": false,
"annotationId": "657b681db1be8118ac305b40758effb8",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:03:54.458271Z",
"processedDate": "2024-02-09T14:03:54.501Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"removeFromDictionary": false,
"removeFromAllDossiers": false,
"annotationId": "234bfcab1a40c5346191a0da99e99e7c",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:03:34.525304Z",
"processedDate": "2024-02-09T14:03:34.558Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"removeFromDictionary": false,
"removeFromAllDossiers": false,
"annotationId": "c9c3283baf73e327a8ddbdd4d603aad5",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:01:01.401478Z",
"processedDate": "2024-02-09T14:01:01.432Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"removeFromDictionary": false,
"removeFromAllDossiers": false,
"annotationId": "aed17872b01a30e019978a99fb1b3c69",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:01:12.658542Z",
"processedDate": "2024-02-09T14:01:12.7Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"removeFromDictionary": false,
"removeFromAllDossiers": false,
"annotationId": "5c0ee1942517fa0f8b7862eeb720a7fe",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:01:04.622727Z",
"processedDate": "2024-02-09T14:01:04.659Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"removeFromDictionary": false,
"removeFromAllDossiers": false,
"annotationId": "c4587ec065c8e81ad53c34417b8ef866",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:03:40.359758Z",
"processedDate": "2024-02-09T14:03:40.4Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
}
],
"entriesToAdd": [
{
"type": "PII",
"value": "the six desire",
"reason": "Dictionary Request",
"legalBasis": null,
"section": null,
"rectangle": false,
"addToDictionary": true,
"addToDossierDictionary": false,
"positions": [
{
"topLeft": {
"x": 112.86,
"y": 590.97
},
"width": 52.68,
"height": 12.84,
"page": 19,
"topLeftX": 112.86,
"topLeftY": 590.97
}
],
"textBefore": null,
"textAfter": null,
"sourceId": null,
"dictionaryEntryType": null,
"annotationId": "19578e7d4275e9e68853a6b2988f0681",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:04:26.939Z",
"processedDate": "2024-02-09T14:04:28.118Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"type": "manual",
"value": "Rule",
"reason": "personal data (names and addresses) of individuals involved in testing on vertebrate studies or in obtaining toxicological information",
"legalBasis": "Article 39(e)(2) of Regulation (EC) No 178/2002",
"section": null,
"rectangle": false,
"addToDictionary": false,
"addToDossierDictionary": false,
"positions": [
{
"topLeft": {
"x": 59.8,
"y": 687.552
},
"width": 31.968,
"height": 20.96,
"page": 1,
"topLeftX": 59.8,
"topLeftY": 687.552
}
],
"textBefore": null,
"textAfter": null,
"sourceId": null,
"dictionaryEntryType": null,
"annotationId": "cb27dd265a1c09418b55317c68be6c12",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:00:09.427Z",
"processedDate": "2024-02-09T14:00:09.427Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"type": "manual",
"value": "Rule CBI.0.1",
"reason": "personal data (names and addresses) of individuals involved in testing on vertebrate studies or in obtaining toxicological information",
"legalBasis": "Article 39(e)(2) of Regulation (EC) No 178/2002",
"section": null,
"rectangle": false,
"addToDictionary": false,
"addToDossierDictionary": false,
"positions": [
{
"topLeft": {
"x": 195.59,
"y": 597.27
},
"width": 51.88,
"height": 12.83,
"page": 1,
"topLeftX": 195.59,
"topLeftY": 597.27
}
],
"textBefore": null,
"textAfter": null,
"sourceId": null,
"dictionaryEntryType": null,
"annotationId": "7ff0836d041a05a5e7165c0712d85b61",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:00:27.216Z",
"processedDate": "2024-02-09T14:00:27.216Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"type": "CBI_author",
"value": "Pasture he invited mr company shyness. But when shot real her. Chamber her\nobserve visited removal six sending himself boy. At exquisite existence if an oh\ndependent excellent. Are gay head need down draw. Misery wonder enable\nmutual get set oppose the uneasy. End why melancholy estimating her had\nindulgence middletons. Say ferrars demands besides her address. Blind going\nyou merit few fancy their.",
"reason": "Dictionary Request",
"legalBasis": null,
"section": null,
"rectangle": false,
"addToDictionary": true,
"addToDossierDictionary": false,
"positions": [
{
"topLeft": {
"x": 56.8,
"y": 671.87
},
"width": 309.71,
"height": 12.84,
"page": 19,
"topLeftX": 56.8,
"topLeftY": 671.87
},
{
"topLeft": {
"x": 56.8,
"y": 660.27
},
"width": 318.24,
"height": 12.84,
"page": 19,
"topLeftX": 56.8,
"topLeftY": 660.27
},
{
"topLeft": {
"x": 56.8,
"y": 648.77
},
"width": 299.68,
"height": 12.84,
"page": 19,
"topLeftX": 56.8,
"topLeftY": 648.77
},
{
"topLeft": {
"x": 56.8,
"y": 637.17
},
"width": 297.4,
"height": 12.84,
"page": 19,
"topLeftX": 56.8,
"topLeftY": 637.17
},
{
"topLeft": {
"x": 56.8,
"y": 625.67
},
"width": 309.46,
"height": 12.84,
"page": 19,
"topLeftX": 56.8,
"topLeftY": 625.67
},
{
"topLeft": {
"x": 56.8,
"y": 614.07
},
"width": 103.53,
"height": 12.84,
"page": 19,
"topLeftX": 56.8,
"topLeftY": 614.07
}
],
"textBefore": null,
"textAfter": null,
"sourceId": null,
"dictionaryEntryType": null,
"annotationId": "15e0bbae087bf78e872363512de9c0b6",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:04:18.561Z",
"processedDate": "2024-02-09T14:04:19.459Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"type": "manual",
"value": "CBI.0.0/1: Redact CBI Authors",
"reason": "personal data (names and addresses) of individuals involved in testing on vertebrate studies or in obtaining toxicological information",
"legalBasis": "Article 39(e)(2) of Regulation (EC) No 178/2002",
"section": null,
"rectangle": false,
"addToDictionary": false,
"addToDossierDictionary": false,
"positions": [
{
"topLeft": {
"x": 56.8,
"y": 626.5984
},
"width": 200.6712,
"height": 19.8669,
"page": 1,
"topLeftX": 56.8,
"topLeftY": 626.5984
}
],
"textBefore": null,
"textAfter": null,
"sourceId": null,
"dictionaryEntryType": null,
"annotationId": "2e0b1dd10a032ce27437b5b404a4d561",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:00:16.794Z",
"processedDate": "2024-02-09T14:00:16.795Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"type": "PII",
"value": "Xinyi Y. Tao Possible incidents",
"reason": "False Positive",
"legalBasis": null,
"section": null,
"rectangle": false,
"addToDictionary": true,
"addToDossierDictionary": false,
"positions": [
{
"topLeft": {
"x": 56.8,
"y": 568.84204
},
"width": 60.575993,
"height": -12.641998,
"page": 6,
"topLeftX": 56.8,
"topLeftY": 568.84204
}
],
"textBefore": null,
"textAfter": null,
"sourceId": "3fcff2c472c4cce852815de9cb17a50d",
"dictionaryEntryType": "FALSE_POSITIVE",
"annotationId": "1abcd854c21c305d357677346d1fc3e2",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:01:28.622Z",
"processedDate": "2024-02-09T14:01:29.449Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"type": "manual",
"value": "non-readable content",
"reason": "personal data (names and addresses) of individuals involved in testing on vertebrate studies or in obtaining toxicological information",
"legalBasis": "Article 39(e)(2) of Regulation (EC) No 178/2002",
"section": null,
"rectangle": true,
"addToDictionary": false,
"addToDossierDictionary": false,
"positions": [
{
"topLeft": {
"x": 346.08,
"y": 610.87
},
"width": 128.31,
"height": 109.97,
"page": 1,
"topLeftX": 346.08,
"topLeftY": 610.87
}
],
"textBefore": null,
"textAfter": null,
"sourceId": null,
"dictionaryEntryType": null,
"annotationId": "562ef1c464f3739c79fa9cedb9649658",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:00:00.762Z",
"processedDate": "2024-02-09T14:00:00.762Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
}
],
"forceRedactions": [
{
"legalBasis": "Article 39(e)(2) of Regulation (EC) No 178/2002",
"annotationId": "9b94e089cbc76a4eee06fa358d268015",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:02:31.479111Z",
"processedDate": "2024-02-09T14:02:32.092Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"legalBasis": "Article 39(e)(2) of Regulation (EC) No 178/2002",
"annotationId": "81dea577c7e615375e2a7bd011ad1a6b",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:03:59.774394Z",
"processedDate": "2024-02-09T14:04:00.373Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"legalBasis": "Article 39(e)(2) of Regulation (EC) No 178/2002",
"annotationId": "0c0164dfea2be886475482930b403de5",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:02:14.953642Z",
"processedDate": "2024-02-09T14:02:15.58Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
}
],
"recategorizations": [
{
"type": "formula",
"annotationId": "c6df2cf8cca12cad76fa89f5ee0b3214",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:04:06.892584Z",
"processedDate": "2024-02-09T14:04:07.574Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
}
],
"legalBasisChanges": [],
"resizeRedactions": [
{
"value": "for this project are library@outlook.com",
"positions": [
{
"topLeft": {
"x": 248.632,
"y": 328.464
},
"width": 194.58,
"height": 15.408,
"page": 6,
"topLeftX": 248.632,
"topLeftY": 328.464
}
],
"textBefore": null,
"textAfter": null,
"updateDictionary": false,
"addToAllDossiers": false,
"annotationId": "f2ef0bb904c4e055ce1963d635b41789",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:01:49.857167Z",
"processedDate": "2024-02-09T14:01:49.857Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"value": "Alfred",
"positions": [
{
"topLeft": {
"x": 80.788,
"y": 568.364
},
"width": 31.284,
"height": 15.408,
"page": 6,
"topLeftX": 80.788,
"topLeftY": 568.364
}
],
"textBefore": null,
"textAfter": null,
"updateDictionary": false,
"addToAllDossiers": false,
"annotationId": "d251f8afaa84d0f0c939df9ca0eb1b88",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:01:21.749389Z",
"processedDate": "2024-02-09T14:01:21.749Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
},
{
"value": "Image",
"positions": [
{
"topLeft": {
"x": 287,
"y": 613
},
"width": 278.86,
"height": 128.24,
"page": 14,
"topLeftX": 287,
"topLeftY": 613
}
],
"textBefore": null,
"textAfter": null,
"updateDictionary": false,
"addToAllDossiers": false,
"annotationId": "1d7c699be6cfa57e36d26add7f1fd0c5",
"fileId": "56595118999b5bf98e47c1c918ada128",
"user": "2cb10c0f-e6e9-42de-a4ec-efb3fac5ef61",
"requestDate": "2024-02-09T14:03:51.725296Z",
"processedDate": "2024-02-09T14:03:51.725Z",
"softDeletedTime": null,
"approved": true,
"status": "APPROVED"
}
]
}

View File

@ -0,0 +1,68 @@
{
"mappings": [
{
"oldId": "63a758ca88b0e0a6da546b1a8ece7e39",
"newId": "0c0164dfea2be886475482930b403de5"
},
{
"oldId": "01fcfb3581723a4d164b4e5ed9e7db90",
"newId": "5c0ee1942517fa0f8b7862eeb720a7fe"
},
{
"oldId": "ad5c82acae51a8af70c9141e1ae4efde",
"newId": "c9c3283baf73e327a8ddbdd4d603aad5"
},
{
"oldId": "5316fff0ec9ae3f2773378f3cc833079",
"newId": "aed17872b01a30e019978a99fb1b3c69"
},
{
"oldId": "af762b495f33a941dc94155ed7db9fa7",
"newId": "9b94e089cbc76a4eee06fa358d268015"
},
{
"oldId": "c2cc46b2fd7b3a68639ad12f9ae4c70d",
"newId": "c4587ec065c8e81ad53c34417b8ef866"
},
{
"oldId": "1ec000364ca48d676af7e33be13685e4",
"newId": "234bfcab1a40c5346191a0da99e99e7c"
},
{
"oldId": "90129d370d60b07d8cb060400338255e",
"newId": "d251f8afaa84d0f0c939df9ca0eb1b88"
},
{
"oldId": "30cb40dc6f495193f1b47d0d557fd682",
"newId": "f2ef0bb904c4e055ce1963d635b41789"
},
{
"oldId": "a9eb84b047ecc5f11d1fe835a4c814ad",
"newId": "2e0b1dd10a032ce27437b5b404a4d561"
},
{
"oldId": "fe41fc0f9f5f1e90a2df6eda2f1ba88f",
"newId": "cb27dd265a1c09418b55317c68be6c12"
},
{
"oldId": "52e0c82360238d1a2d4e7023550f5c3c",
"newId": "7ff0836d041a05a5e7165c0712d85b61"
},
{
"oldId": "6808af23a9652917b73c1939b481f3e4",
"newId": "657b681db1be8118ac305b40758effb8"
},
{
"oldId": "3ed0487b61fdb556c8982450aa39a8db",
"newId": "1d7c699be6cfa57e36d26add7f1fd0c5"
},
{
"oldId": "44f684045879e7a6decd34baaae0930f",
"newId": "81dea577c7e615375e2a7bd011ad1a6b"
},
{
"oldId": "647a450f5feba4859297886a4263b653",
"newId": "c6df2cf8cca12cad76fa89f5ee0b3214"
}
]
}