RED-9859: introduce KieSessionUpdater
This commit is contained in:
parent
3ff541fee6
commit
d4cdf36c52
@ -180,6 +180,13 @@ public class PrecursorEntity implements IEntity {
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void update() {
|
||||
|
||||
// not in KieSession, do nothing
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return true when this entity is of EntityType ENTITY or HINT
|
||||
*/
|
||||
|
||||
@ -35,7 +35,7 @@ public class DocumentTree {
|
||||
|
||||
public DocumentTree(Document document) {
|
||||
|
||||
root = Entry.builder().treeId(Collections.emptyList()).children(new LinkedList<>()).node(document).build();
|
||||
root = Entry.builder().treeId(Collections.emptyList()).node(document).build();
|
||||
}
|
||||
|
||||
|
||||
@ -134,7 +134,7 @@ public class DocumentTree {
|
||||
public List<SemanticNode> findIntersectingChildNodes(List<Integer> treeId, TextRange textRange) {
|
||||
|
||||
List<Entry> childEntries = getEntryById(treeId).getChildren();
|
||||
List<SemanticNode> intersectingChildEntries = new LinkedList<>();
|
||||
List<SemanticNode> intersectingChildEntries = new ArrayList<>();
|
||||
int startIdx = findFirstIdxOfContainingChildBinarySearch(childEntries, textRange.start());
|
||||
if (startIdx < 0) {
|
||||
return intersectingChildEntries;
|
||||
@ -289,7 +289,8 @@ public class DocumentTree {
|
||||
return root;
|
||||
}
|
||||
Entry entry = root;
|
||||
for (int id : treeId) {
|
||||
for (int i = 0, treeIdSize = treeId.size(); i < treeIdSize; i++) {
|
||||
int id = treeId.get(i);
|
||||
entry = entry.children.get(id);
|
||||
}
|
||||
return entry;
|
||||
|
||||
@ -0,0 +1,56 @@
|
||||
package com.iqser.red.service.redaction.v1.server.model.document;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import org.kie.api.runtime.KieSession;
|
||||
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.entity.TextEntity;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.nodes.Image;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.nodes.SemanticNode;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
|
||||
public class KieSessionUpdater {
|
||||
|
||||
Set<SemanticNode> nodesInKieSession;
|
||||
KieSession kieSession;
|
||||
|
||||
|
||||
public void insert(TextEntity textEntity) {
|
||||
|
||||
kieSession.insert(textEntity);
|
||||
updateIntersectingNodes(textEntity);
|
||||
}
|
||||
|
||||
|
||||
public void update(TextEntity textEntity) {
|
||||
|
||||
kieSession.update(kieSession.getFactHandle(textEntity), textEntity);
|
||||
updateIntersectingNodes(textEntity);
|
||||
}
|
||||
|
||||
|
||||
public void update(Image image) {
|
||||
|
||||
kieSession.update(kieSession.getFactHandle(image), image);
|
||||
SemanticNode parent = image;
|
||||
while (parent.hasParent()) {
|
||||
parent = parent.getParent();
|
||||
kieSession.update(kieSession.getFactHandle(parent), parent);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void updateIntersectingNodes(TextEntity textEntity) {
|
||||
|
||||
textEntity.getIntersectingNodes()
|
||||
.stream()
|
||||
.filter(nodesInKieSession::contains)
|
||||
.forEach(o -> kieSession.update(kieSession.getFactHandle(o), o));
|
||||
}
|
||||
|
||||
}
|
||||
@ -52,6 +52,12 @@ public interface IEntity {
|
||||
String type();
|
||||
|
||||
|
||||
/**
|
||||
* Marks this entity and all its intersecting nodes as updated
|
||||
*/
|
||||
void update();
|
||||
|
||||
|
||||
/**
|
||||
* An Entity is valid, when it active and not a false recommendation, a false positive or a dictionary removal.
|
||||
*
|
||||
@ -273,13 +279,13 @@ public interface IEntity {
|
||||
if (legalBasis.isBlank() || legalBasis.isEmpty()) {
|
||||
throw new IllegalArgumentException("legal basis cannot be empty when redacting an entity");
|
||||
}
|
||||
getMatchedRuleList().add(MatchedRule.builder()
|
||||
.ruleIdentifier(RuleIdentifier.fromString(ruleIdentifier))
|
||||
.reason(reason)
|
||||
.legalBasis(legalBasis)
|
||||
.applied(true)
|
||||
.writeValueWithLineBreaks(true)
|
||||
.build());
|
||||
addMatchedRule(MatchedRule.builder()
|
||||
.ruleIdentifier(RuleIdentifier.fromString(ruleIdentifier))
|
||||
.reason(reason)
|
||||
.legalBasis(legalBasis)
|
||||
.applied(true)
|
||||
.writeValueWithLineBreaks(true)
|
||||
.build());
|
||||
}
|
||||
|
||||
|
||||
@ -297,13 +303,13 @@ public interface IEntity {
|
||||
if (legalBasis.isBlank() || legalBasis.isEmpty()) {
|
||||
throw new IllegalArgumentException("legal basis cannot be empty when redacting an entity");
|
||||
}
|
||||
getMatchedRuleList().add(MatchedRule.builder()
|
||||
.ruleIdentifier(RuleIdentifier.fromString(ruleIdentifier))
|
||||
.reason(reason)
|
||||
.legalBasis(legalBasis)
|
||||
.applied(true)
|
||||
.references(new HashSet<>(references))
|
||||
.build());
|
||||
addMatchedRule(MatchedRule.builder()
|
||||
.ruleIdentifier(RuleIdentifier.fromString(ruleIdentifier))
|
||||
.reason(reason)
|
||||
.legalBasis(legalBasis)
|
||||
.applied(true)
|
||||
.references(new HashSet<>(references))
|
||||
.build());
|
||||
}
|
||||
|
||||
|
||||
@ -316,7 +322,7 @@ public interface IEntity {
|
||||
*/
|
||||
default void skipWithReferences(@NonNull String ruleIdentifier, String reason, Collection<TextEntity> references) {
|
||||
|
||||
getMatchedRuleList().add(MatchedRule.builder().ruleIdentifier(RuleIdentifier.fromString(ruleIdentifier)).reason(reason).references(new HashSet<>(references)).build());
|
||||
addMatchedRule(MatchedRule.builder().ruleIdentifier(RuleIdentifier.fromString(ruleIdentifier)).reason(reason).references(new HashSet<>(references)).build());
|
||||
}
|
||||
|
||||
|
||||
@ -327,7 +333,12 @@ public interface IEntity {
|
||||
*/
|
||||
default void addMatchedRule(MatchedRule matchedRule) {
|
||||
|
||||
MatchedRule previous = getMatchedRule();
|
||||
getMatchedRuleList().add(matchedRule);
|
||||
if (getMatchedRule().equals(previous)) {
|
||||
return;
|
||||
}
|
||||
update();
|
||||
}
|
||||
|
||||
|
||||
@ -341,7 +352,12 @@ public interface IEntity {
|
||||
if (getMatchedRuleList().equals(matchedRules)) {
|
||||
return;
|
||||
}
|
||||
MatchedRule previous = getMatchedRule();
|
||||
getMatchedRuleList().addAll(matchedRules);
|
||||
if (getMatchedRule().equals(previous)) {
|
||||
return;
|
||||
}
|
||||
update();
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -8,11 +8,14 @@ import java.util.HashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.PriorityQueue;
|
||||
import java.util.Set;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.Engine;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.KieSessionUpdater;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.TextRange;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.nodes.Document;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.nodes.Page;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.nodes.SemanticNode;
|
||||
import com.iqser.red.service.redaction.v1.server.utils.IdBuilder;
|
||||
@ -22,6 +25,7 @@ import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.NonNull;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
/**
|
||||
@ -311,4 +315,31 @@ public class TextEntity implements IEntity {
|
||||
.orElse(getMatchedRule().isWriteValueWithLineBreaks() ? getValueWithLineBreaks() : value);
|
||||
}
|
||||
|
||||
|
||||
public void update() {
|
||||
|
||||
getKieSessionUpdater().ifPresent(updater -> updater.update(this));
|
||||
}
|
||||
|
||||
|
||||
private @NonNull Optional<KieSessionUpdater> getKieSessionUpdater() {
|
||||
|
||||
if (intersectingNodes.isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
if (intersectingNodes.get(0) instanceof Document document) {
|
||||
if (document.getKieSessionUpdater() == null) {
|
||||
return Optional.empty();
|
||||
}
|
||||
return Optional.of(document.getKieSessionUpdater());
|
||||
}
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
|
||||
public void updateIntersectingNodes() {
|
||||
|
||||
getKieSessionUpdater().ifPresent(updater -> updater.updateIntersectingNodes(this));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -9,8 +9,8 @@ import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.ConsecutiveTextBlockCollector;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.DocumentTree;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.KieSessionUpdater;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.textblock.TextBlock;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
@ -35,6 +35,7 @@ public class Document extends AbstractSemanticNode {
|
||||
|
||||
Set<Page> pages;
|
||||
Integer numberOfPages;
|
||||
KieSessionUpdater kieSessionUpdater;
|
||||
|
||||
@Builder.Default
|
||||
static final SectionIdentifier sectionIdentifier = SectionIdentifier.document();
|
||||
|
||||
@ -3,22 +3,18 @@ package com.iqser.red.service.redaction.v1.server.model.document.nodes;
|
||||
import java.awt.geom.Rectangle2D;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.PriorityQueue;
|
||||
import java.util.Set;
|
||||
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.DocumentTree;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.KieSessionUpdater;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.TextRange;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.entity.IEntity;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.entity.ManualChangeOverwrite;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.entity.MatchedRule;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.entity.TextEntity;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.textblock.TextBlock;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.textblock.TextBlockCollector;
|
||||
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.LayoutEngine;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.AllArgsConstructor;
|
||||
@ -26,12 +22,12 @@ import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.NonNull;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
/**
|
||||
*
|
||||
Represents an image within the document.
|
||||
* Represents an image within the document.
|
||||
*/
|
||||
@Data
|
||||
@SuperBuilder
|
||||
@ -96,7 +92,15 @@ public class Image extends AbstractSemanticNode implements IEntity {
|
||||
@Override
|
||||
public String type() {
|
||||
|
||||
return getManualOverwrite().getType().orElse(imageType.toString().toLowerCase(Locale.ENGLISH));
|
||||
return getManualOverwrite().getType()
|
||||
.orElse(imageType.toString().toLowerCase(Locale.ENGLISH));
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void update() {
|
||||
|
||||
getKieSessionUpdater().ifPresent(updater -> updater.update(this));
|
||||
}
|
||||
|
||||
|
||||
@ -154,4 +158,19 @@ public class Image extends AbstractSemanticNode implements IEntity {
|
||||
return (area / calculatedIntersection) > containmentThreshold;
|
||||
}
|
||||
|
||||
|
||||
private @NonNull Optional<KieSessionUpdater> getKieSessionUpdater() {
|
||||
|
||||
if (getDocumentTree() == null) {
|
||||
return Optional.empty();
|
||||
}
|
||||
if (getDocumentTree().getRoot().getNode() instanceof Document document) {
|
||||
if (document.getKieSessionUpdater() == null) {
|
||||
return Optional.empty();
|
||||
}
|
||||
return Optional.of(document.getKieSessionUpdater());
|
||||
}
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -28,10 +28,12 @@ import com.iqser.red.service.redaction.v1.server.model.NerEntities;
|
||||
import com.iqser.red.service.redaction.v1.server.model.dictionary.SearchImplementation;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.ConsecutiveBoundaryCollector;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.DocumentTree;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.KieSessionUpdater;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.TextRange;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.entity.EntityType;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.entity.ManualChangeOverwrite;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.entity.TextEntity;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.nodes.Document;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.nodes.NodeType;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.nodes.SemanticNode;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.nodes.Table;
|
||||
@ -49,14 +51,12 @@ import lombok.extern.slf4j.Slf4j;
|
||||
public class EntityCreationService {
|
||||
|
||||
private final EntityEnrichmentService entityEnrichmentService;
|
||||
private final KieSession kieSession;
|
||||
private final Set<SemanticNode> nodesInKieSession; // empty set means all nodes are in kieSession
|
||||
|
||||
|
||||
public EntityCreationService(EntityEnrichmentService entityEnrichmentService) {
|
||||
|
||||
this.entityEnrichmentService = entityEnrichmentService;
|
||||
this.kieSession = null;
|
||||
this.nodesInKieSession = Collections.emptySet();
|
||||
}
|
||||
|
||||
@ -64,7 +64,6 @@ public class EntityCreationService {
|
||||
public EntityCreationService(EntityEnrichmentService entityEnrichmentService, KieSession kieSession) {
|
||||
|
||||
this.entityEnrichmentService = entityEnrichmentService;
|
||||
this.kieSession = kieSession;
|
||||
this.nodesInKieSession = Collections.emptySet();
|
||||
}
|
||||
|
||||
@ -1016,7 +1015,7 @@ public class EntityCreationService {
|
||||
return Optional.empty();
|
||||
}
|
||||
entity.addEngines(engines);
|
||||
insertToKieSession(entity);
|
||||
insertToKieSession(entity, node);
|
||||
return Optional.of(entity);
|
||||
}
|
||||
|
||||
@ -1091,7 +1090,7 @@ public class EntityCreationService {
|
||||
entityEnrichmentService.enrichEntity(mergedEntity, node.getTextBlock());
|
||||
|
||||
addEntityToGraph(mergedEntity, node);
|
||||
insertToKieSession(mergedEntity);
|
||||
insertToKieSession(mergedEntity, node);
|
||||
|
||||
entitiesToMerge.stream()
|
||||
.filter(e -> !e.equals(mergedEntity))
|
||||
@ -1158,10 +1157,14 @@ public class EntityCreationService {
|
||||
*
|
||||
* @param textEntity The merged text entity to insert.
|
||||
*/
|
||||
public void insertToKieSession(TextEntity textEntity) {
|
||||
public void insertToKieSession(TextEntity textEntity, SemanticNode node) {
|
||||
|
||||
if (kieSession != null) {
|
||||
kieSession.insert(textEntity);
|
||||
if (node.getDocumentTree().getRoot().getNode() instanceof Document document) {
|
||||
KieSessionUpdater updater = document.getKieSessionUpdater();
|
||||
if (updater == null) {
|
||||
return;
|
||||
}
|
||||
updater.insert(textEntity);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -27,6 +27,7 @@ import com.iqser.red.service.redaction.v1.server.logger.RulesLogger;
|
||||
import com.iqser.red.service.redaction.v1.server.logger.TrackingAgendaEventListener;
|
||||
import com.iqser.red.service.redaction.v1.server.model.NerEntities;
|
||||
import com.iqser.red.service.redaction.v1.server.model.dictionary.Dictionary;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.KieSessionUpdater;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.nodes.Document;
|
||||
import com.iqser.red.service.redaction.v1.server.model.document.nodes.SemanticNode;
|
||||
import com.iqser.red.service.redaction.v1.server.service.ManualChangesApplicationService;
|
||||
@ -96,7 +97,10 @@ public class EntityDroolsExecutionService {
|
||||
Set<SemanticNode> nodesInKieSession = sectionsToAnalyze.size() == document.streamAllSubNodes()
|
||||
.count() ? Collections.emptySet() : buildSet(sectionsToAnalyze, document);
|
||||
|
||||
EntityCreationService entityCreationService = new EntityCreationService(entityEnrichmentService, kieSession, nodesInKieSession);
|
||||
KieSessionUpdater kieSessionUpdater = new KieSessionUpdater(nodesInKieSession, kieSession);
|
||||
document.setKieSessionUpdater(kieSessionUpdater);
|
||||
|
||||
EntityCreationService entityCreationService = new EntityCreationService(entityEnrichmentService, nodesInKieSession);
|
||||
RulesLogger logger = new RulesLogger(webSocketService, context);
|
||||
if (settings.isDroolsDebug()) {
|
||||
logger.enableAgendaTracking();
|
||||
@ -164,6 +168,8 @@ public class EntityDroolsExecutionService {
|
||||
throw new RuntimeException(e);
|
||||
} catch (TimeoutException e) {
|
||||
throw new DroolsTimeoutException(e, false, RuleFileType.ENTITY);
|
||||
} finally {
|
||||
document.setKieSessionUpdater(null);
|
||||
}
|
||||
|
||||
List<FileAttribute> resultingFileAttributes = getFileAttributes(kieSession);
|
||||
|
||||
@ -7,23 +7,16 @@ import static com.knecon.fforesight.tenantcommons.model.TenantResponse.builder;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileVisitOption;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
@ -44,7 +37,6 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.iqser.red.commons.jackson.ObjectMapperFactory;
|
||||
import com.iqser.red.service.dictionarymerge.commons.DictionaryEntryModel;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.AnalyzeRequest;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.RuleFileType;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.ManualRedactions;
|
||||
@ -53,15 +45,14 @@ import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemp
|
||||
import com.iqser.red.service.redaction.v1.server.client.DictionaryClient;
|
||||
import com.iqser.red.service.redaction.v1.server.client.LegalBasisClient;
|
||||
import com.iqser.red.service.redaction.v1.server.client.RulesClient;
|
||||
import com.iqser.red.service.redaction.v1.server.model.dictionary.Dictionary;
|
||||
import com.iqser.red.service.redaction.v1.server.model.dictionary.DictionaryIncrement;
|
||||
import com.iqser.red.service.redaction.v1.server.model.dictionary.DictionaryModel;
|
||||
import com.iqser.red.service.redaction.v1.server.model.dictionary.DictionaryVersion;
|
||||
import com.iqser.red.service.redaction.v1.server.service.AnalyzeService;
|
||||
import com.iqser.red.service.redaction.v1.server.service.DictionaryService;
|
||||
import com.iqser.red.service.redaction.v1.server.service.websocket.RedisSyncedWebSocketService;
|
||||
import com.iqser.red.service.redaction.v1.server.storage.RedactionStorageService;
|
||||
import com.iqser.red.service.redaction.v1.server.testcontainers.MongoDBTestContainer;
|
||||
import com.iqser.red.service.redaction.v1.server.utils.TestDossierTemplate;
|
||||
import com.iqser.red.storage.commons.service.StorageService;
|
||||
import com.knecon.fforesight.keycloakcommons.security.TenantAuthenticationManagerResolver;
|
||||
import com.knecon.fforesight.mongo.database.commons.liquibase.TenantMongoLiquibaseExecutor;
|
||||
@ -195,22 +186,23 @@ import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
testDossierTemplate = new TestDossierTemplate(dossierTemplateToUse);
|
||||
when(dictionaryService.updateDictionary(any(), any())).thenReturn(new DictionaryVersion(0, 0));
|
||||
when(dictionaryService.getDeepCopyDictionary(any(), any())).thenReturn(testDossierTemplate.testDictionary);
|
||||
when(dictionaryService.getDeepCopyDictionary(any(), any())).thenReturn(testDossierTemplate.getTestDictionary());
|
||||
when(dictionaryService.getDictionaryIncrements(any(), any(), any())).thenReturn(new DictionaryIncrement(Collections.emptySet(), new DictionaryVersion(0, 0)));
|
||||
when(dictionaryService.isHint(any(String.class), any())).thenAnswer(invocation -> {
|
||||
String type = invocation.getArgument(0);
|
||||
return testDossierTemplate.testDictionary.isHint(type);
|
||||
return testDossierTemplate.getTestDictionary().isHint(type);
|
||||
});
|
||||
when(dictionaryService.getColor(any(String.class), any())).thenAnswer(invocation -> {
|
||||
String type = invocation.getArgument(0);
|
||||
return testDossierTemplate.testDictionary.getType(type).getColor();
|
||||
return testDossierTemplate.getTestDictionary().getType(type).getColor();
|
||||
});
|
||||
when(dictionaryService.getNotRedactedColor(any())).thenReturn(new float[]{0.2f, 0.2f, 0.2f});
|
||||
|
||||
when(rulesClient.getVersion(testDossierTemplate.id, RuleFileType.ENTITY)).thenReturn(System.currentTimeMillis());
|
||||
when(rulesClient.getRules(testDossierTemplate.id, RuleFileType.ENTITY)).thenReturn(JSONPrimitive.of(testDossierTemplate.rules));
|
||||
when(rulesClient.getVersion(testDossierTemplate.id, RuleFileType.COMPONENT)).thenReturn(testDossierTemplate.componentRules != null ? System.currentTimeMillis() : -1);
|
||||
when(rulesClient.getRules(testDossierTemplate.id, RuleFileType.COMPONENT)).thenReturn(JSONPrimitive.of(testDossierTemplate.componentRules));
|
||||
when(rulesClient.getVersion(testDossierTemplate.getId(), RuleFileType.ENTITY)).thenReturn(System.currentTimeMillis());
|
||||
when(rulesClient.getRules(testDossierTemplate.getId(), RuleFileType.ENTITY)).thenReturn(JSONPrimitive.of(testDossierTemplate.getRules()));
|
||||
when(rulesClient.getVersion(testDossierTemplate.getId(), RuleFileType.COMPONENT)).thenReturn(testDossierTemplate.getComponentRules()
|
||||
!= null ? System.currentTimeMillis() : -1);
|
||||
when(rulesClient.getRules(testDossierTemplate.getId(), RuleFileType.COMPONENT)).thenReturn(JSONPrimitive.of(testDossierTemplate.getComponentRules()));
|
||||
}
|
||||
|
||||
|
||||
@ -251,7 +243,7 @@ import lombok.extern.slf4j.Slf4j;
|
||||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.setDossierId(UUID.randomUUID().toString());
|
||||
request.setFileId(UUID.randomUUID().toString());
|
||||
request.setDossierTemplateId(testDossierTemplate.id);
|
||||
request.setDossierTemplateId(testDossierTemplate.getId());
|
||||
request.setAnalysisNumber(-1);
|
||||
|
||||
Path manualRedactionFile = folder.resolve(fileName + ".MANUAL_REDACTIONS.json");
|
||||
@ -339,106 +331,6 @@ import lombok.extern.slf4j.Slf4j;
|
||||
}
|
||||
|
||||
|
||||
public class TestDossierTemplate {
|
||||
|
||||
String id;
|
||||
Dictionary testDictionary;
|
||||
AtomicInteger dictEntryIdCounter = new AtomicInteger(0);
|
||||
String rules;
|
||||
String componentRules;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
TestDossierTemplate(Path dossierTemplateToUse) {
|
||||
|
||||
Map<String, Object> dossierTemplate = mapper.readValue(dossierTemplateToUse.resolve("dossierTemplate.json").toFile(), HashMap.class);
|
||||
this.id = (String) dossierTemplate.get("dossierTemplateId");
|
||||
|
||||
List<DictionaryModel> dictionaries = Files.walk(dossierTemplateToUse, FileVisitOption.FOLLOW_LINKS)
|
||||
.filter(path -> path.getFileName().toString().equals("dossierType.json"))
|
||||
.map(this::loadDictionaryModel)
|
||||
.toList();
|
||||
|
||||
File ruleFile = dossierTemplateToUse.resolve("rules.drl").toFile();
|
||||
rules = new String(Files.readAllBytes(ruleFile.toPath()));
|
||||
|
||||
File componentRuleFile = dossierTemplateToUse.resolve("componentRules.drl").toFile();
|
||||
if (componentRuleFile.exists()) {
|
||||
componentRules = new String(Files.readAllBytes(componentRuleFile.toPath()));
|
||||
}
|
||||
|
||||
testDictionary = new Dictionary(dictionaries, new DictionaryVersion(0, 0));
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private DictionaryModel loadDictionaryModel(Path path) {
|
||||
|
||||
Map<String, Object> model = mapper.readValue(path.toFile(), HashMap.class);
|
||||
Set<DictionaryEntryModel> entries = new HashSet<>();
|
||||
Set<DictionaryEntryModel> falsePositives = new HashSet<>();
|
||||
Set<DictionaryEntryModel> falseRecommendations = new HashSet<>();
|
||||
|
||||
String type = (String) model.get("type");
|
||||
Integer rank = (Integer) model.get("rank");
|
||||
float[] color = hexToFloatArr((String) model.get("hexColor"));
|
||||
Boolean caseInsensitive = (Boolean) model.get("caseInsensitive");
|
||||
Boolean hint = (Boolean) model.get("hint");
|
||||
Boolean hasDictionary = (Boolean) model.get("hasDictionary");
|
||||
|
||||
boolean isDossierDictionary;
|
||||
if (model.containsKey("dossierDictionaryOnly")) {
|
||||
isDossierDictionary = true;
|
||||
} else {
|
||||
isDossierDictionary = ((String) model.get("id")).split(":").length == 3;
|
||||
}
|
||||
|
||||
if (hasDictionary) {
|
||||
try (var in = new FileInputStream(path.getParent().resolve("entries.txt").toFile())) {
|
||||
entries.addAll(parseDictionaryEntryModelFromFile(new String(in.readAllBytes()), dictEntryIdCounter, (String) model.get("typeId")));
|
||||
}
|
||||
try (var in = new FileInputStream(path.getParent().resolve("falsePositives.txt").toFile())) {
|
||||
falsePositives.addAll(parseDictionaryEntryModelFromFile(new String(in.readAllBytes()), dictEntryIdCounter, (String) model.get("typeId")));
|
||||
}
|
||||
try (var in = new FileInputStream(path.getParent().resolve("falseRecommendations.txt").toFile())) {
|
||||
falseRecommendations.addAll(parseDictionaryEntryModelFromFile(new String(in.readAllBytes()), dictEntryIdCounter, (String) model.get("typeId")));
|
||||
}
|
||||
}
|
||||
|
||||
return new DictionaryModel(type, rank, color, caseInsensitive, hint, entries, falsePositives, falseRecommendations, isDossierDictionary);
|
||||
}
|
||||
|
||||
|
||||
private Set<DictionaryEntryModel> parseDictionaryEntryModelFromFile(String s, AtomicInteger dictEntryIdCounter, String typeId) {
|
||||
|
||||
String[] values = s.split("\n");
|
||||
return Arrays.stream(values)
|
||||
.map(value -> new DictionaryEntryModel(dictEntryIdCounter.getAndIncrement(), value, 0L, false, typeId))
|
||||
.collect(Collectors.toUnmodifiableSet());
|
||||
}
|
||||
|
||||
|
||||
private float[] hexToFloatArr(String hexColor) {
|
||||
|
||||
// Remove # symbol if present
|
||||
String cleanHexColor = hexColor.replace("#", "");
|
||||
|
||||
// Parse hex string into RGB components
|
||||
int r = Integer.parseInt(cleanHexColor.substring(0, 2), 16);
|
||||
int g = Integer.parseInt(cleanHexColor.substring(2, 4), 16);
|
||||
int b = Integer.parseInt(cleanHexColor.substring(4, 6), 16);
|
||||
|
||||
// Normalize RGB values to floats between 0 and 1
|
||||
float[] rgbFloat = new float[3];
|
||||
rgbFloat[0] = r / 255.0f;
|
||||
rgbFloat[1] = g / 255.0f;
|
||||
rgbFloat[2] = b / 255.0f;
|
||||
|
||||
return rgbFloat;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private record FileToUpload(Path path, FileType fileType) {
|
||||
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user