Compare commits

..

21 Commits

Author SHA1 Message Date
yhampe
5b1a3bf56a RED-9472
fixing 500 bug when calling upload endpoint
2024-12-12 08:43:46 +01:00
yhampe
83b163a93b RED-9472
adjusted permissions/action roles
2024-12-04 09:05:37 +01:00
yhampe
0e6455da10 RED-9472
adjusted permissions/action roles
2024-12-03 10:29:46 +01:00
yhampe
650a325450 RED-9393 user stats controller
added authority check
2024-11-25 09:31:36 +01:00
yhampe
daf393d003 Merge remote-tracking branch 'origin/feature/RED-9472' into feature/RED-9472 2024-11-25 09:25:48 +01:00
Yannik Hampe
730ac8653d Update com.iqser.red.service.java-conventions.gradle.kts 2024-11-22 10:24:39 +01:00
Yannik Hampe
b3c4e670a8 Merge branch 'master' into 'feature/RED-9472'
# Conflicts:
#   buildSrc/src/main/kotlin/com.iqser.red.service.java-conventions.gradle.kts
2024-11-22 10:12:38 +01:00
yhampe
d26a0189fe RED-9472:
solving merge conflicts
2024-11-22 10:11:46 +01:00
yhampe
11b799f8f4 RED-9472: seperation of system rules
now calling respected endpoints in rules controller

RED-9472: seperation of system rules

now calling respected endpoints in rules controller

RED-9472: seperation of system rules

removed rulesbuilder to fix failing pipeline and break circular dependency

RED-9472: seperation of system rules

added removal of system rules on download

added partial merge of user updated rules on upload

RED-9472: seperation of system rules

upgdatet redaction version

RED-9472: seperation of system rules

added endpoints for full rule files (including system rules)

added roles

RED-9472: seperation of system rules

changed path of full file paths

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests

added validatioresponse to rulesresponse

RED-9472: seperation of system rules

added a new model for rule upload

RED-9472: seperation of system rules
disabled tests for getting tag

RED-9472: seperation of system rules

RED-9472: seperation of system rules

changed tests for new response of rulebuilderclient
2024-11-22 10:11:12 +01:00
yhampe
a02e48c17a RED-9472:
solving merge conflicts
2024-11-22 10:10:42 +01:00
yhampe
1991cfb8e5 RED-9472: seperation of system rules
now calling respected endpoints in rules controller

RED-9472: seperation of system rules

now calling respected endpoints in rules controller

RED-9472: seperation of system rules

removed rulesbuilder to fix failing pipeline and break circular dependency

RED-9472: seperation of system rules

added removal of system rules on download

added partial merge of user updated rules on upload

RED-9472: seperation of system rules

upgdatet redaction version

RED-9472: seperation of system rules

added endpoints for full rule files (including system rules)

added roles

RED-9472: seperation of system rules

changed path of full file paths

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests

added validatioresponse to rulesresponse

RED-9472: seperation of system rules

added a new model for rule upload

RED-9472: seperation of system rules
disabled tests for getting tag

RED-9472: seperation of system rules

RED-9472: seperation of system rules

changed tests for new response of rulebuilderclient
2024-11-22 10:10:42 +01:00
yhampe
7153681eb6 RED-9472: seperation of system rules
now calling respected endpoints in rules controller

RED-9472: seperation of system rules

now calling respected endpoints in rules controller

RED-9472: seperation of system rules

removed rulesbuilder to fix failing pipeline and break circular dependency

RED-9472: seperation of system rules

added removal of system rules on download

added partial merge of user updated rules on upload

RED-9472: seperation of system rules

upgdatet redaction version

RED-9472: seperation of system rules

added endpoints for full rule files (including system rules)

added roles

RED-9472: seperation of system rules

changed path of full file paths

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests

added validatioresponse to rulesresponse

RED-9472: seperation of system rules

added a new model for rule upload

RED-9472: seperation of system rules
disabled tests for getting tag

RED-9472: seperation of system rules

RED-9472: seperation of system rules

changed tests for new response of rulebuilderclient
2024-11-22 10:10:37 +01:00
yhampe
d8274d8dd7 Merge remote-tracking branch 'origin/feature/RED-9472' into feature/RED-9472
# Conflicts:
#	persistence-service-v1/persistence-service-external-api-v1/src/main/java/com/iqser/red/service/persistence/service/v1/api/external/resource/RulesResource.java
2024-11-19 08:28:35 +01:00
yhampe
482cad2a46 RED-9472:
solving merge conflicts
2024-11-19 08:28:05 +01:00
yhampe
f31d9170f8 RED-9472: seperation of system rules
now calling respected endpoints in rules controller

RED-9472: seperation of system rules

now calling respected endpoints in rules controller

RED-9472: seperation of system rules

removed rulesbuilder to fix failing pipeline and break circular dependency

RED-9472: seperation of system rules

added removal of system rules on download

added partial merge of user updated rules on upload

RED-9472: seperation of system rules

upgdatet redaction version

RED-9472: seperation of system rules

added endpoints for full rule files (including system rules)

added roles

RED-9472: seperation of system rules

changed path of full file paths

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests

added validatioresponse to rulesresponse

RED-9472: seperation of system rules

added a new model for rule upload

RED-9472: seperation of system rules
disabled tests for getting tag

RED-9472: seperation of system rules

RED-9472: seperation of system rules

changed tests for new response of rulebuilderclient
2024-11-19 08:28:02 +01:00
yhampe
e5dd3e7abc RED-9472: seperation of system rules
now calling respected endpoints in rules controller

RED-9472: seperation of system rules

now calling respected endpoints in rules controller

RED-9472: seperation of system rules

removed rulesbuilder to fix failing pipeline and break circular dependency

RED-9472: seperation of system rules

added removal of system rules on download

added partial merge of user updated rules on upload

RED-9472: seperation of system rules

upgdatet redaction version

RED-9472: seperation of system rules

added endpoints for full rule files (including system rules)

added roles

RED-9472: seperation of system rules

changed path of full file paths

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests

added validatioresponse to rulesresponse

RED-9472: seperation of system rules

added a new model for rule upload

RED-9472: seperation of system rules
disabled tests for getting tag

RED-9472: seperation of system rules

RED-9472: seperation of system rules

changed tests for new response of rulebuilderclient
2024-11-19 08:26:02 +01:00
yhampe
65d72a60ac RED-9472:
solving merge conflicts
2024-11-18 11:05:31 +01:00
yhampe
94f288fc0f RED-9472:
solving merge conflicts
2024-11-18 09:45:26 +01:00
yhampe
7e8e081553 Merge remote-tracking branch 'origin/feature/RED-9472' into feature/RED-9472
# Conflicts:
#	buildSrc/src/main/kotlin/com.iqser.red.service.java-conventions.gradle.kts
#	persistence-service-v1/persistence-service-external-api-impl-v1/src/main/java/com/iqser/red/persistence/service/v1/external/api/impl/controller/RulesController.java
#	persistence-service-v1/persistence-service-external-api-v1/src/main/java/com/iqser/red/service/persistence/service/v1/api/external/resource/RulesResource.java
#	persistence-service-v1/persistence-service-server-v1/src/test/java/com/iqser/red/service/peristence/v1/server/integration/tests/FileTest.java
#	persistence-service-v1/persistence-service-server-v1/src/test/java/com/iqser/red/service/peristence/v1/server/integration/utils/AbstractPersistenceServerServiceTest.java
2024-11-18 09:38:11 +01:00
yhampe
d2380d47b2 RED-9472: seperation of system rules
now calling respected endpoints in rules controller

RED-9472: seperation of system rules

now calling respected endpoints in rules controller

RED-9472: seperation of system rules

removed rulesbuilder to fix failing pipeline and break circular dependency

RED-9472: seperation of system rules

added removal of system rules on download

added partial merge of user updated rules on upload

RED-9472: seperation of system rules

upgdatet redaction version

RED-9472: seperation of system rules

added endpoints for full rule files (including system rules)

added roles

RED-9472: seperation of system rules

changed path of full file paths

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests

added validatioresponse to rulesresponse

RED-9472: seperation of system rules

added a new model for rule upload

RED-9472: seperation of system rules
disabled tests for getting tag

RED-9472: seperation of system rules

RED-9472: seperation of system rules

changed tests for new response of rulebuilderclient
2024-11-18 09:37:00 +01:00
yhampe
0c1670461b RED-9472: seperation of system rules
now calling respected endpoints in rules controller

RED-9472: seperation of system rules

now calling respected endpoints in rules controller

RED-9472: seperation of system rules

removed rulesbuilder to fix failing pipeline and break circular dependency

RED-9472: seperation of system rules

added removal of system rules on download

added partial merge of user updated rules on upload

RED-9472: seperation of system rules

upgdatet redaction version

RED-9472: seperation of system rules

added endpoints for full rule files (including system rules)

added roles

RED-9472: seperation of system rules

changed path of full file paths

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests by mocking rulebuilder service

RED-9472: seperation of system rules

fixed failing tests

added validatioresponse to rulesresponse

RED-9472: seperation of system rules

added a new model for rule upload

RED-9472: seperation of system rules
disabled tests for getting tag

RED-9472: seperation of system rules

RED-9472: seperation of system rules

changed tests for new response of rulebuilderclient
2024-10-23 09:20:21 +02:00
127 changed files with 1738 additions and 2010 deletions

View File

@ -6,11 +6,10 @@ plugins {
jacoco
}
val redactionServiceVersion by rootProject.extra { "4.290.0" }
val redactionServiceVersion by rootProject.extra { "4.434.0-RED9472.1" }
val pdftronRedactionServiceVersion by rootProject.extra { "4.90.0-RED10115.0" }
val redactionReportServiceVersion by rootProject.extra { "4.81.0" }
val searchServiceVersion by rootProject.extra { "2.90.0" }
val documentVersion by rootProject.extra { "4.433.0" }
repositories {
mavenLocal()

View File

@ -357,9 +357,9 @@ public class DictionaryController implements DictionaryResource {
public void changeFlags(@PathVariable(TYPE_PARAMETER_NAME) String type,
@PathVariable(DOSSIER_TEMPLATE_PARAMETER_NAME) String dossierTemplateId,
@PathVariable(value = DOSSIER_ID_PARAMETER_NAME) String dossierId,
@RequestParam(value = "addToDictionaryAction") boolean addToDictionaryAction) {
@RequestParam(value = "addToDictionary") boolean addToDictionary) {
dictionaryService.changeAddToDictionary(type, dossierTemplateId, dossierId, addToDictionaryAction);
dictionaryService.changeAddToDictionary(type, dossierTemplateId, dossierId, addToDictionary);
}

View File

@ -17,7 +17,6 @@ import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.DossierPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.utils.DossierAttributeConfigMapper;
import com.knecon.fforesight.keycloakcommons.security.KeycloakSecurity;
import com.iqser.red.service.persistence.management.v1.processor.entity.dossier.DossierAttributeConfigEntity;
import com.iqser.red.service.persistence.management.v1.processor.service.AccessControlService;
@ -53,7 +52,7 @@ public class DossierAttributesController implements DossierAttributesResource {
var result = MagicConverter.convert(dossierAttributeConfigPersistenceService.setDossierAttributesConfig(dossierTemplateId,
MagicConverter.convert(dossierAttributesConfig.getDossierAttributeConfigs(),
DossierAttributeConfigEntity.class)),
DossierAttributeConfig.class, new DossierAttributeConfigMapper());
DossierAttributeConfig.class);
auditPersistenceService.insertRecord(AuditRequest.builder()
.userId(KeycloakSecurity.getUserId())
.objectId(dossierTemplateId)
@ -73,7 +72,7 @@ public class DossierAttributesController implements DossierAttributesResource {
var result = MagicConverter.convert(dossierAttributeConfigPersistenceService.addOrUpdateDossierAttribute(dossierTemplateId,
MagicConverter.convert(dossierAttribute,
DossierAttributeConfigEntity.class)),
DossierAttributeConfig.class, new DossierAttributeConfigMapper());
DossierAttributeConfig.class);
auditPersistenceService.insertRecord(AuditRequest.builder()
.userId(KeycloakSecurity.getUserId())
.objectId(dossierTemplateId)
@ -119,7 +118,7 @@ public class DossierAttributesController implements DossierAttributesResource {
public DossierAttributesConfig getDossierAttributesConfig(@PathVariable(DOSSIER_TEMPLATE_ID) String dossierTemplateId) {
var result = dossierAttributeConfigPersistenceService.getDossierAttributes(dossierTemplateId);
return new DossierAttributesConfig(MagicConverter.convert(result, DossierAttributeConfig.class, new DossierAttributeConfigMapper()));
return new DossierAttributesConfig(MagicConverter.convert(result, DossierAttributeConfig.class));
}

View File

@ -28,7 +28,6 @@ import com.iqser.red.service.persistence.management.v1.processor.service.AccessC
import com.iqser.red.service.persistence.management.v1.processor.service.DossierManagementService;
import com.iqser.red.service.persistence.management.v1.processor.service.DownloadService;
import com.iqser.red.service.persistence.management.v1.processor.service.FileManagementStorageService;
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusManagementService;
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.AuditPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.utils.StringEncodingUtils;
@ -71,7 +70,6 @@ public class DownloadController implements DownloadResource {
private final OneTimeTokenService oneTimeTokenDownloadService;
private final AccessControlService accessControlService;
private final FileManagementStorageService fileManagementStorageService;
private final FileStatusManagementService fileStatusManagementService;
private final String REPORT_INFO = "/REPORT_INFO.json";
@ -138,7 +136,7 @@ public class DownloadController implements DownloadResource {
if (StringUtils.isBlank(dossierId)) {
throw new BadRequestException("Empty dossier id");
}
dossierService.getDossierById(dossierId, true, false);
dossierService.getDossierById(dossierId, true, true);
accessControlService.verifyUserIsDossierOwnerOrApprover(dossierId);
}
@ -148,14 +146,11 @@ public class DownloadController implements DownloadResource {
List<FileModel> validFiles = fileStatusService.getDossierStatus(request.getDossierId());
var fileIds = request.getFileIds();
if (fileIds != null && !fileIds.isEmpty()) { // validate the ids provided
if(fileIds.size() == 1) {
fileStatusManagementService.getFileStatus(fileIds.get(0), false);
}
validFiles = validFiles.stream()
.filter(f -> fileIds.contains(f.getId()))
.collect(Collectors.toList());
if (validFiles.isEmpty()) {
throw new NotFoundException("No provided file id was found");
throw new NotFoundException("No file id provided is found");
}
} // otherwise consider the files from dossier

View File

@ -15,7 +15,6 @@ import org.springframework.web.bind.annotation.RestController;
import com.iqser.red.service.persistence.management.v1.processor.model.websocket.FileEventType;
import com.iqser.red.service.persistence.management.v1.processor.service.websocket.WebsocketService;
import com.iqser.red.service.persistence.management.v1.processor.utils.FileAttributeConfigMapper;
import com.knecon.fforesight.keycloakcommons.security.KeycloakSecurity;
import com.iqser.red.service.persistence.management.v1.processor.entity.configuration.FileAttributesGeneralConfigurationEntity;
import com.iqser.red.service.persistence.management.v1.processor.entity.dossier.FileAttributeConfigEntity;
@ -62,9 +61,9 @@ public class FileAttributesController implements FileAttributesResource {
}
fileAttributeConfigPersistenceService.setFileAttributesGeneralConfig(dossierTemplateId,
MagicConverter.convert(fileAttributesConfig, FileAttributesGeneralConfigurationEntity.class));
fileAttributeConfigPersistenceService.setFileAttributesConfig(dossierTemplateId,
MagicConverter.convert(fileAttributesConfig.getFileAttributeConfigs(), FileAttributeConfigEntity.class));
var result = fileAttributeConfigPersistenceService.setFileAttributesConfig(dossierTemplateId,
MagicConverter.convert(fileAttributesConfig.getFileAttributeConfigs(),
FileAttributeConfigEntity.class));
auditPersistenceService.audit(AuditRequest.builder()
.userId(KeycloakSecurity.getUserId())
.objectId(dossierTemplateId)
@ -75,9 +74,7 @@ public class FileAttributesController implements FileAttributesResource {
.filenameMappingColumnHeaderName(fileAttributesConfig.getFilenameMappingColumnHeaderName())
.delimiter(fileAttributesConfig.getDelimiter())
.encoding(fileAttributesConfig.getEncoding())
.fileAttributeConfigs(MagicConverter.convert(fileAttributeConfigPersistenceService.getFileAttributes(dossierTemplateId),
FileAttributeConfig.class,
new FileAttributeConfigMapper()))
.fileAttributeConfigs(MagicConverter.convert(result, FileAttributeConfig.class))
.build();
}
@ -99,7 +96,7 @@ public class FileAttributesController implements FileAttributesResource {
dossierTemplateId))
.build());
return MagicConverter.convert(result, FileAttributeConfig.class, new FileAttributeConfigMapper());
return MagicConverter.convert(result, FileAttributeConfig.class);
}
@ -148,7 +145,7 @@ public class FileAttributesController implements FileAttributesResource {
.filenameMappingColumnHeaderName(generalConfig.getFilenameMappingColumnHeaderName())
.delimiter(generalConfig.getDelimiter())
.encoding(generalConfig.getEncoding())
.fileAttributeConfigs(MagicConverter.convert(fileAttributeConfigs, FileAttributeConfig.class, new FileAttributeConfigMapper()))
.fileAttributeConfigs(MagicConverter.convert(fileAttributeConfigs, FileAttributeConfig.class))
.build();
}

View File

@ -232,7 +232,7 @@ public class FileManagementController implements FileManagementResource {
public void restoreFiles(@PathVariable(DOSSIER_ID) String dossierId, @RequestBody Set<String> fileIds) {
accessControlService.checkAccessPermissionsToDossier(dossierId);
accessControlService.verifyUserIsDossierOwnerOrApproverOrAssignedReviewer(dossierId, fileIds);
verifyUserIsDossierOwnerOrApproverOrAssignedReviewer(dossierId, fileIds);
fileService.undeleteFiles(dossierId, fileIds);
auditPersistenceService.audit(AuditRequest.builder()
.userId(KeycloakSecurity.getUserId())
@ -289,4 +289,20 @@ public class FileManagementController implements FileManagementResource {
}
}
private void verifyUserIsDossierOwnerOrApproverOrAssignedReviewer(String dossierId, Set<String> fileIds) {
try {
accessControlService.verifyUserIsDossierOwnerOrApprover(dossierId);
} catch (AccessDeniedException e1) {
try {
for (String fileId : fileIds) {
accessControlService.verifyUserIsReviewer(dossierId, fileId);
}
} catch (NotAllowedException e2) {
throw new NotAllowedException("User must be dossier owner, approver or assigned reviewer of the file.");
}
}
}
}

View File

@ -0,0 +1,111 @@
package com.iqser.red.persistence.service.v1.external.api.impl.controller;
import com.iqser.red.service.persistence.management.v1.processor.entity.migration.SaasMigrationStatusEntity;
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
import com.iqser.red.service.persistence.management.v1.processor.migration.SaasMigrationService;
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.SaasMigrationStatusPersistenceService;
import com.iqser.red.service.persistence.service.v1.api.external.resource.MigrationStatusResource;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.SaasMigrationStatus;
import com.iqser.red.service.persistence.service.v1.api.shared.model.saas.migration.MigrationStatusResponse;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
import lombok.experimental.FieldDefaults;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RestController;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Collectors;
import static com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.SaasMigrationStatus.*;
@RestController
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
@RequiredArgsConstructor
public class MigrationStatusController implements MigrationStatusResource {
SaasMigrationService saasMigrationService;
SaasMigrationStatusPersistenceService saasMigrationStatusPersistenceService;
FileStatusService fileStatusService;
public MigrationStatusResponse migrationStatus() {
int numberOfFilesToMigrate = saasMigrationStatusPersistenceService.countAll();
Map<SaasMigrationStatus, Integer> filesInStatus = new HashMap<>();
filesInStatus.put(MIGRATION_REQUIRED, saasMigrationStatusPersistenceService.countByStatus(MIGRATION_REQUIRED));
filesInStatus.put(DOCUMENT_FILES_MIGRATED, saasMigrationStatusPersistenceService.countByStatus(DOCUMENT_FILES_MIGRATED));
filesInStatus.put(REDACTION_LOGS_MIGRATED, saasMigrationStatusPersistenceService.countByStatus(REDACTION_LOGS_MIGRATED));
filesInStatus.put(ANNOTATION_IDS_MIGRATED, saasMigrationStatusPersistenceService.countByStatus(ANNOTATION_IDS_MIGRATED));
filesInStatus.put(FINISHED, saasMigrationStatusPersistenceService.countByStatus(FINISHED));
filesInStatus.put(ERROR, saasMigrationStatusPersistenceService.countByStatus(ERROR));
var filesInErrorState = saasMigrationStatusPersistenceService.findAllByStatus(ERROR);
var errorCauses = filesInErrorState.stream()
.collect(Collectors.toMap(errorFile -> errorFile.getDossierId() + "/" + errorFile.getFileId(), SaasMigrationStatusEntity::getErrorCause));
return MigrationStatusResponse.builder().numberOfFilesToMigrate(numberOfFilesToMigrate).filesInStatus(filesInStatus).errorCauses(errorCauses).build();
}
@Override
public ResponseEntity<?> startMigrationForFile(String dossierId, String fileId) {
if (!fileStatusService.fileExists(fileId)) {
throw new NotFoundException(String.format("File with id %s does not exist", fileId));
}
saasMigrationService.startMigrationForFile(dossierId, fileId);
return ResponseEntity.ok().build();
}
@Override
public ResponseEntity<?> revertMigrationForFile(String dossierId, String fileId) {
if (!fileStatusService.fileExists(fileId)) {
throw new NotFoundException(String.format("File with id %s does not exist", fileId));
}
if (!saasMigrationStatusPersistenceService.findById(fileId).getStatus().equals(FINISHED)) {
throw new BadRequestException(String.format("File with id %s is not migrated yet, can't revert.", fileId));
}
saasMigrationService.revertMigrationForFile(dossierId, fileId);
return ResponseEntity.ok().build();
}
@Override
public ResponseEntity<?> requeueErrorFiles() {
MigrationStatusResponse migrationStatus = migrationStatus();
if (!migrationIsFinished(migrationStatus)) {
throw new BadRequestException("There are still files processing, please wait until migration has finished to retry!");
}
saasMigrationService.requeueErrorFiles();
return ResponseEntity.ok().build();
}
private static boolean migrationIsFinished(MigrationStatusResponse migrationStatus) {
return migrationStatus.getFilesInStatus().entrySet()
.stream()
.filter(e -> e.getValue() > 0)
.allMatch(e -> e.getKey().equals(FINISHED) || e.getKey().equals(ERROR));
}
}

View File

@ -118,12 +118,11 @@ public class ReanalysisController implements ReanalysisResource {
@PreAuthorize("hasAuthority('" + REANALYZE_FILE + "')")
public void ocrFile(@PathVariable(DOSSIER_ID) String dossierId,
@PathVariable(FILE_ID) String fileId,
@RequestParam(value = FORCE_PARAM, required = false, defaultValue = FALSE) boolean force,
@RequestParam(value = ALL_PAGES, required = false, defaultValue = FALSE) boolean allPages) {
@RequestParam(value = FORCE_PARAM, required = false, defaultValue = FALSE) boolean force) {
accessControlService.checkDossierExistenceAndAccessPermissionsToDossier(dossierId);
validateOCR(dossierId, fileId);
reanalysisService.ocrFile(dossierId, fileId, force, allPages);
reanalysisService.ocrFile(dossierId, fileId, force);
auditPersistenceService.audit(AuditRequest.builder()
.userId(KeycloakSecurity.getUserId())
.objectId(dossierId)
@ -141,7 +140,7 @@ public class ReanalysisController implements ReanalysisResource {
accessControlService.checkDossierExistenceAndAccessPermissionsToDossier(dossierId);
fileIds.forEach(fileId -> validateOCR(dossierId, fileId));
reanalysisService.ocrFiles(dossierId, fileIds, false);
reanalysisService.ocrFiles(dossierId, fileIds);
auditPersistenceService.audit(AuditRequest.builder()
.userId(KeycloakSecurity.getUserId())
.objectId(dossierId)

View File

@ -1,7 +1,9 @@
package com.iqser.red.persistence.service.v1.external.api.impl.controller;
import static com.iqser.red.service.persistence.management.v1.processor.roles.ActionRoles.READ_RULES;
import static com.iqser.red.service.persistence.management.v1.processor.roles.ActionRoles.READ_SYSTEM_RULES;
import static com.iqser.red.service.persistence.management.v1.processor.roles.ActionRoles.WRITE_RULES;
import static com.iqser.red.service.persistence.management.v1.processor.roles.ActionRoles.WRITE_SYSTEM_RULES;
import java.io.ByteArrayInputStream;
import java.io.IOException;
@ -21,6 +23,7 @@ import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import com.iqser.red.service.persistence.management.v1.processor.client.redactionservice.RuleBuilderClient;
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
import com.iqser.red.service.persistence.management.v1.processor.exception.FileUploadException;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
@ -35,8 +38,11 @@ import com.iqser.red.service.persistence.service.v1.api.shared.model.RuleFileTyp
import com.iqser.red.service.persistence.service.v1.api.shared.model.audit.AuditRequest;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.rules.DroolsValidationResponse;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.rules.RulesResponse;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.rules.RulesUpdateRequest;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.rules.RulesUploadRequest;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.rules.RulesUploadRequestModel;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.rules.RulesUploadResponse;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.rules.SystemRulesSeperationRequest;
import com.knecon.fforesight.keycloakcommons.security.KeycloakSecurity;
import feign.FeignException;
@ -50,6 +56,7 @@ public class RulesController implements RulesResource {
private static final String DOWNLOAD_FILE_NAME = "rules.drl";
private final RuleBuilderClient ruleBuilderClient;
private final RulesPersistenceService rulesPersistenceService;
private final RulesValidationService rulesValidationService;
private final AuditPersistenceService auditPersistenceService;
@ -62,21 +69,37 @@ public class RulesController implements RulesResource {
RulesUploadRequest rulesUploadRequest = RulesUploadRequest.fromModel(rules);
DroolsValidationResponse droolsValidationResponse = new DroolsValidationResponse();
String mergedRulesString = "";
try {
var droolsValidation = rulesValidationService.validateRules(rulesUploadRequest.getRuleFileType(), rulesUploadRequest.getRules());
droolsValidationResponse = RulesValidationMapper.createFromDroolsValidation(droolsValidation);
if (!droolsValidation.isCompiled()) {
var ruleEntityOptional = rulesPersistenceService.getRules(rulesUploadRequest.getDossierTemplateId(), rulesUploadRequest.getRuleFileType());
if (ruleEntityOptional.isPresent()) {
RulesUpdateRequest rulesUpdateRequest = new RulesUpdateRequest(ruleEntityOptional.get().getValue(), rules.getRules());
try {
ResponseEntity<RulesUploadResponse> mergedRules = ruleBuilderClient.mergeUserUpdateRules(rulesUpdateRequest);
return new ResponseEntity<>(droolsValidationResponse, !rules.isDryRun() ? HttpStatus.UNPROCESSABLE_ENTITY : HttpStatus.OK);
}
} catch (FeignException e) {
if (e.status() == HttpStatus.BAD_REQUEST.value()) {
throw new BadRequestException("The provided rule string is not a valid drools rule file!");
if (mergedRules == null || mergedRules.getBody() == null) {
throw new BadRequestException("Failed to merge rules - received null response from service");
}
if (mergedRules.getStatusCode().equals(HttpStatus.UNPROCESSABLE_ENTITY)) {
DroolsValidationResponse mergedRulesValidationResponse = mergedRules.getBody().getDroolsValidationResponse();
return new ResponseEntity<>(mergedRulesValidationResponse, !rules.isDryRun() ? HttpStatus.UNPROCESSABLE_ENTITY : HttpStatus.OK);
}
mergedRulesString = mergedRules.getBody().getRules();
if (mergedRulesString == null) {
throw new BadRequestException("Failed to merge rules - merged result is null");
}
} catch (FeignException e) {
if (e.status() == HttpStatus.BAD_REQUEST.value()) {
throw new BadRequestException("The provided rule file is not a valid drools rule file!");
}
throw new BadRequestException("Failed to merge rules: " + e.getMessage());
}
}
if (!rules.isDryRun()) {
rulesPersistenceService.setRules(rulesUploadRequest.getRules(), rulesUploadRequest.getDossierTemplateId(), rulesUploadRequest.getRuleFileType());
rulesPersistenceService.setRules(mergedRulesString, rulesUploadRequest.getDossierTemplateId(), rulesUploadRequest.getRuleFileType());
fileStatusPersistenceService.resetErrorCounter(rules.getDossierTemplateId());
}
@ -99,6 +122,47 @@ public class RulesController implements RulesResource {
}
@Override
@PreAuthorize("hasAuthority('" + WRITE_SYSTEM_RULES + "')")
public ResponseEntity<DroolsValidationResponse> uploadFullFile(@PathVariable(DOSSIER_TEMPLATE_PARAMETER_NAME) String dossierTemplateId,
@PathVariable(RULE_FILE_TYPE_PARAMETER_NAME) RuleFileType ruleFileType,
@RequestParam(value = DRY_RUN_PARAMETER) boolean dryRun,
@RequestPart(name = "file") MultipartFile file) {
try {
String rules = new String(file.getBytes(), StandardCharsets.UTF_8);
DroolsValidationResponse droolsValidationResponse = new DroolsValidationResponse();
try {
var droolsValidation = rulesValidationService.validateRules(ruleFileType, rules);
droolsValidationResponse = RulesValidationMapper.createFromDroolsValidation(droolsValidation);
if (!droolsValidation.isCompiled()) {
return new ResponseEntity<>(droolsValidationResponse, !dryRun ? HttpStatus.UNPROCESSABLE_ENTITY : HttpStatus.OK);
}
} catch (FeignException e) {
if (e.status() == HttpStatus.BAD_REQUEST.value()) {
throw new BadRequestException("The provided rule string is not a valid drools rule file!");
}
}
if (!dryRun) {
rulesPersistenceService.setRules(rules, dossierTemplateId, ruleFileType);
}
auditPersistenceService.audit(AuditRequest.builder()
.userId(KeycloakSecurity.getUserId())
.objectId(dossierTemplateId)
.category(AuditCategory.DOSSIER_TEMPLATE.name())
.message(String.format("%s Rules have been updated", ruleFileType))
.build());
return new ResponseEntity<>(droolsValidationResponse, HttpStatus.OK);
} catch (IOException e) {
throw new FileUploadException("Could not upload file.", e);
}
}
@Override
@PreAuthorize("hasAuthority('" + READ_RULES + "')")
public RulesResponse download(@PathVariable(DOSSIER_TEMPLATE_PARAMETER_NAME) String dossierTemplateId, @PathVariable(RULE_FILE_TYPE_PARAMETER_NAME) RuleFileType ruleFileType) {
@ -107,6 +171,11 @@ public class RulesController implements RulesResource {
if (ruleEntityOptional.isEmpty()) {
throw new NotFoundException(String.format("No rule file of type %s found for dossierTemplateId %s", ruleFileType, dossierTemplateId));
}
log.info("calling rules management to remove system rules of file");
SystemRulesSeperationRequest systemRulesSeperationRequest = new SystemRulesSeperationRequest(ruleEntityOptional.get().getValue());
log.info(systemRulesSeperationRequest.getRules());
String cleanedRules = ruleBuilderClient.getRuleFileWithoutSystemRules(systemRulesSeperationRequest).getRules();
ruleEntityOptional.get().setValue(cleanedRules);
return new RulesResponse(ruleEntityOptional.get().getValue(), dossierTemplateId, ruleEntityOptional.get().isTimeoutDetected());
}
@ -137,6 +206,28 @@ public class RulesController implements RulesResource {
}
@Override
@PreAuthorize("hasAuthority('" + READ_SYSTEM_RULES + "')")
public ResponseEntity<?> downloadFullFile(@PathVariable(DOSSIER_TEMPLATE_PARAMETER_NAME) String dossierTemplateId,
@PathVariable(RULE_FILE_TYPE_PARAMETER_NAME) RuleFileType ruleFileType) {
var ruleEntityOptional = rulesPersistenceService.getRules(dossierTemplateId, ruleFileType);
if (ruleEntityOptional.isEmpty()) {
throw new NotFoundException(String.format("No rule file of type %s found for dossierTemplateId %s", ruleFileType, dossierTemplateId));
}
RulesResponse rulesResponse = new RulesResponse(ruleEntityOptional.get().getValue(), dossierTemplateId, ruleEntityOptional.get().isTimeoutDetected());
byte[] data = rulesResponse.getRules().getBytes(StandardCharsets.UTF_8);
HttpHeaders httpHeaders = new HttpHeaders();
httpHeaders.setContentType(MediaType.TEXT_PLAIN);
httpHeaders.add("Content-Disposition", "attachment; filename*=utf-8''" + ruleFileType.name() + "_" + DOWNLOAD_FILE_NAME);
InputStream is = new ByteArrayInputStream(data);
return new ResponseEntity<>(new InputStreamResource(is), httpHeaders, HttpStatus.OK);
}
@Override
@PreAuthorize("hasAuthority('" + READ_RULES + "')")
public ResponseEntity<?> downloadFile(@PathVariable(DOSSIER_TEMPLATE_PARAMETER_NAME) String dossierTemplateId) {

View File

@ -28,7 +28,6 @@ import org.springframework.web.bind.annotation.RestController;
import com.iqser.red.service.persistence.management.v1.processor.acl.custom.dossier.DossierACLService;
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
import com.iqser.red.service.persistence.management.v1.processor.exception.ConflictException;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotAllowedException;
import com.iqser.red.service.persistence.management.v1.processor.roles.ApplicationRoles;
import com.iqser.red.service.persistence.management.v1.processor.service.AccessControlService;
@ -102,13 +101,8 @@ public class StatusController implements StatusResource {
var accessibleDossierIds = filterByPermissionsService.onlyViewableDossierIds(new ArrayList<>(filesByDossier.getValue().keySet()));
var response = new HashMap<String, List<FileStatus>>();
for (var dossierId : accessibleDossierIds) {
var allFoundFiles = fileStatusManagementService.findAllDossierIdAndIds(dossierId,
filesByDossier.getValue()
.get(dossierId));
response.put(dossierId,
allFoundFiles.stream()
.map(FileStatusMapper::toFileStatus)
.collect(Collectors.toList()));
var allFoundFiles = fileStatusManagementService.findAllDossierIdAndIds(dossierId, filesByDossier.getValue().get(dossierId));
response.put(dossierId, allFoundFiles.stream().map(FileStatusMapper::toFileStatus).collect(Collectors.toList()));
}
return new JSONPrimitive<>(response);
@ -357,10 +351,6 @@ public class StatusController implements StatusResource {
.build());
var dossier = dossierACLService.enhanceDossierWithACLData(dossierManagementService.getDossierById(dossierId, false, false));
if (dossier.getOwnerId() == null) {
throw new ConflictException("Dossier has no owner!");
}
if (!dossier.getOwnerId().equals(KeycloakSecurity.getUserId())) {
var fileStatus = fileStatusManagementService.getFileStatus(fileId);

View File

@ -23,11 +23,12 @@ import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import com.iqser.red.service.persistence.management.v1.processor.service.FileFormatValidationService;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotAllowedException;
import com.knecon.fforesight.keycloakcommons.security.KeycloakSecurity;
import com.iqser.red.service.pdftron.redaction.v1.api.model.ByteContentDocument;
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotAllowedException;
import com.iqser.red.service.persistence.management.v1.processor.service.AccessControlService;
import com.iqser.red.service.persistence.management.v1.processor.service.FileFormatValidationService;
import com.iqser.red.service.persistence.management.v1.processor.service.ReanalysisService;
import com.iqser.red.service.persistence.management.v1.processor.service.UploadService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.AuditPersistenceService;
@ -36,7 +37,6 @@ import com.iqser.red.service.persistence.service.v1.api.external.resource.Upload
import com.iqser.red.service.persistence.service.v1.api.shared.model.AuditCategory;
import com.iqser.red.service.persistence.service.v1.api.shared.model.FileUploadResult;
import com.iqser.red.service.persistence.service.v1.api.shared.model.audit.AuditRequest;
import com.knecon.fforesight.keycloakcommons.security.KeycloakSecurity;
import com.knecon.fforesight.tenantcommons.TenantContext;
import feign.FeignException;
@ -53,9 +53,9 @@ import lombok.extern.slf4j.Slf4j;
@SuppressWarnings("PMD")
public class UploadController implements UploadResource {
private static final int THRESHOLD_ENTRIES = 10000; // Maximum number of files allowed
private static final int THRESHOLD_SIZE = 1000000000; // 1 GB total unzipped data
private static final double THRESHOLD_RATIO = 10; // Max allowed compression ratio
private static final int THRESHOLD_ENTRIES = 10000;
private static final int THRESHOLD_SIZE = 1000000000; // 1 GB
private static final double THRESHOLD_RATIO = 10;
private final UploadService uploadService;
private final ReanalysisService reanalysisService;
@ -72,25 +72,31 @@ public class UploadController implements UploadResource {
@Parameter(name = DISABLE_AUTOMATIC_ANALYSIS_PARAM, description = "Disables automatic redaction for the uploaded file, imports only imported redactions") @RequestParam(value = DISABLE_AUTOMATIC_ANALYSIS_PARAM, required = false, defaultValue = "false") boolean disableAutomaticAnalysis) {
accessControlService.checkAccessPermissionsToDossier(dossierId);
String originalFilename = file.getOriginalFilename();
if (originalFilename == null) {
if (file.getOriginalFilename() == null) {
throw new BadRequestException("Could not upload file, no filename provided.");
}
String extension = getExtension(originalFilename);
var extension = getExtension(file.getOriginalFilename());
try {
return switch (extension) {
case "zip" -> handleZip(dossierId, file.getBytes(), keepManualRedactions, disableAutomaticAnalysis);
case "csv" -> uploadService.importCsv(dossierId, file.getBytes());
default -> {
validateExtensionOrThrow(extension);
yield uploadService.processSingleFile(dossierId, originalFilename, file.getBytes(), keepManualRedactions, disableAutomaticAnalysis);
}
};
switch (extension) {
case "zip":
return handleZip(dossierId, file.getBytes(), keepManualRedactions, disableAutomaticAnalysis);
case "csv":
return uploadService.importCsv(dossierId, file.getBytes());
default:
if (!fileFormatValidationService.getAllFileFormats().contains(extension)) {
throw new BadRequestException("Invalid file uploaded");
}
if (!fileFormatValidationService.getValidFileFormatsForTenant(TenantContext.getTenantId()).contains(extension)) {
throw new NotAllowedException("Insufficient permissions");
}
return uploadService.processSingleFile(dossierId, file.getOriginalFilename(), file.getBytes(), keepManualRedactions, disableAutomaticAnalysis);
}
} catch (IOException e) {
throw new BadRequestException("Failed to process file: " + e.getMessage(), e);
throw new BadRequestException(e.getMessage(), e);
}
}
@ -105,6 +111,7 @@ public class UploadController implements UploadResource {
accessControlService.verifyUserIsReviewerOrApprover(dossierId, fileId);
try {
reanalysisService.importRedactions(ByteContentDocument.builder().dossierId(dossierId).fileId(fileId).document(file.getBytes()).pages(pageInclusionRequest).build());
auditPersistenceService.audit(AuditRequest.builder()
@ -115,116 +122,84 @@ public class UploadController implements UploadResource {
.details(Map.of("dossierId", dossierId))
.build());
} catch (IOException e) {
throw new BadRequestException("Failed to import redactions: " + e.getMessage(), e);
throw new BadRequestException(e.getMessage(), e);
} catch (FeignException e) {
throw processFeignException(e);
}
}
private void validateExtensionOrThrow(String extension) {
private String getExtension(String fileName) {
if (!fileFormatValidationService.getAllFileFormats().contains(extension)) {
throw new BadRequestException("Invalid file uploaded (unrecognized extension).");
}
if (!fileFormatValidationService.getValidFileFormatsForTenant(TenantContext.getTenantId()).contains(extension)) {
throw new NotAllowedException("Insufficient permissions for this file type.");
}
return fileName.substring(fileName.lastIndexOf(".") + 1).toLowerCase(Locale.ROOT);
}
/**
* 1. Write the uploaded content to a temp ZIP file
* 2. Check the number of entries and reject if too big or if symlinks found
* 3. Unzip and process each file, while checking size and ratio.
*/
private FileUploadResult handleZip(String dossierId, byte[] fileContent, boolean keepManualRedactions, boolean disableAutomaticAnalysis) throws IOException {
File tempZip = FileUtils.createTempFile(UUID.randomUUID().toString(), ".zip");
try (FileOutputStream fos = new FileOutputStream(tempZip)) {
IOUtils.write(fileContent, fos);
File tempFile = FileUtils.createTempFile(UUID.randomUUID().toString(), ".zip");
try (var fileOutputStream = new FileOutputStream(tempFile)) {
IOUtils.write(fileContent, fileOutputStream);
}
validateZipEntries(tempZip);
try {
ZipData zipData = processZipContents(tempZip, dossierId, keepManualRedactions, disableAutomaticAnalysis);
checkForSymlinks(tempFile);
var zipData = unzip(tempFile, dossierId, keepManualRedactions, disableAutomaticAnalysis);
if (zipData.csvBytes != null) {
try {
FileUploadResult csvResult = uploadService.importCsv(dossierId, zipData.csvBytes);
zipData.fileUploadResult.getProcessedAttributes().addAll(csvResult.getProcessedAttributes());
zipData.fileUploadResult.getProcessedFileIds().addAll(csvResult.getProcessedFileIds());
var importResult = uploadService.importCsv(dossierId, zipData.csvBytes);
zipData.fileUploadResult.getProcessedAttributes().addAll(importResult.getProcessedAttributes());
zipData.fileUploadResult.getProcessedFileIds().addAll(importResult.getProcessedFileIds());
} catch (Exception e) {
log.debug("CSV file inside ZIP failed to import", e);
log.debug("CSV file inside ZIP failed", e);
// TODO return un-processed files to client
}
} else if (zipData.fileUploadResult.getFileIds().isEmpty()) {
if (zipData.containedUnpermittedFiles) {
throw new NotAllowedException("Zip file contains unpermitted files.");
throw new NotAllowedException("Zip file contains unpermitted files");
} else {
throw new BadRequestException("Only unsupported files in the ZIP.");
throw new BadRequestException("Only unsupported files in zip file");
}
}
return zipData.fileUploadResult;
} finally {
if (!tempZip.delete()) {
log.warn("Could not delete temporary ZIP file: {}", tempZip);
boolean isDeleted = tempFile.delete();
if (!isDeleted) {
log.warn("tempFile could not be deleted");
}
}
}
private void validateZipEntries(File tempZip) throws IOException {
try (FileInputStream fis = new FileInputStream(tempZip); ZipFile zipFile = new ZipFile(fis.getChannel())) {
int count = 0;
var entries = zipFile.getEntries();
while (entries.hasMoreElements()) {
ZipArchiveEntry ze = entries.nextElement();
private void checkForSymlinks(File tempFile) throws IOException {
try (var fis = new FileInputStream(tempFile); var zipFile = new ZipFile(fis.getChannel())) {
for (var entryEnum = zipFile.getEntries(); entryEnum.hasMoreElements(); ) {
var ze = entryEnum.nextElement();
if (ze.isUnixSymlink()) {
throw new BadRequestException("ZIP-files with symlinks are not allowed.");
}
if (!ze.isDirectory() && !ze.getName().startsWith(".")) {
count++;
if (count > THRESHOLD_ENTRIES) {
throw new BadRequestException("ZIP-Bomb detected: too many entries.");
}
throw new BadRequestException("ZIP-files with symlinks are not allowed");
}
}
}
}
private ZipData processZipContents(File tempZip, String dossierId, boolean keepManualRedactions, boolean disableAutomaticAnalysis) throws IOException {
private ZipData unzip(File tempFile, String dossierId, boolean keepManualRedactions, boolean disableAutomaticAnalysis) throws IOException {
ZipData zipData = new ZipData();
var zipData = new ZipData();
try (FileInputStream fis = new FileInputStream(tempZip); ZipFile zipFile = new ZipFile(fis.getChannel())) {
try (var fis = new FileInputStream(tempFile); var zipFile = new ZipFile(fis.getChannel())) {
var entries = zipFile.getEntries();
while (entries.hasMoreElements()) {
ZipArchiveEntry entry = entries.nextElement();
for (var entryEnum = zipFile.getEntries(); entryEnum.hasMoreElements(); ) {
var ze = entryEnum.nextElement();
zipData.totalEntryArchive++;
if (entry.isDirectory() || entry.getName().startsWith(".")) {
continue;
}
byte[] entryBytes = readEntryWithRatioCheck(entry, zipFile);
zipData.totalSizeArchive += entryBytes.length;
if (zipData.totalSizeArchive > THRESHOLD_SIZE) {
throw new BadRequestException("ZIP-Bomb detected (exceeds total size limit).");
}
String extension = getExtension(entry.getName());
if ("csv".equalsIgnoreCase(extension)) {
zipData.csvBytes = entryBytes;
} else {
handleRegularFile(dossierId, entryBytes, extension, extractFileName(entry.getName()), zipData, keepManualRedactions, disableAutomaticAnalysis);
if (!ze.isDirectory()) {
processFileZipEntry(ze, zipFile, dossierId, keepManualRedactions, zipData, disableAutomaticAnalysis);
}
}
}
@ -232,70 +207,73 @@ public class UploadController implements UploadResource {
}
private byte[] readEntryWithRatioCheck(ZipArchiveEntry entry, ZipFile zipFile) throws IOException {
private void processFileZipEntry(ZipArchiveEntry ze, ZipFile zipFile, String dossierId, boolean keepManualRedactions, ZipData zipData, boolean disableAutomaticAnalysis) throws IOException {
long compressedSize = entry.getCompressedSize() > 0 ? entry.getCompressedSize() : 1;
try (var is = zipFile.getInputStream(entry); var bos = new ByteArrayOutputStream()) {
var extension = getExtension(ze.getName());
byte[] buffer = new byte[4096];
int bytesRead;
int totalUncompressed = 0;
final String fileName;
if (ze.getName().lastIndexOf("/") >= 0) {
fileName = ze.getName().substring(ze.getName().lastIndexOf("/") + 1);
} else {
fileName = ze.getName();
}
while ((bytesRead = is.read(buffer)) != -1) {
bos.write(buffer, 0, bytesRead);
totalUncompressed += bytesRead;
if (fileName.startsWith(".")) {
return;
}
double ratio = (double) totalUncompressed / compressedSize;
if (ratio > THRESHOLD_RATIO) {
throw new BadRequestException("ZIP-Bomb detected (compression ratio too high).");
var entryAsBytes = readCurrentZipEntry(ze, zipFile);
zipData.totalSizeArchive += entryAsBytes.length;
// 1. the uncompressed data size is too much for the application resource capacity
// 2. too many entries in the archive can lead to inode exhaustion of the file-system
if (zipData.totalSizeArchive > THRESHOLD_SIZE || zipData.totalEntryArchive > THRESHOLD_ENTRIES) {
throw new BadRequestException("ZIP-Bomb detected.");
}
if ("csv".equals(extension)) {
zipData.csvBytes = entryAsBytes;
} else if (fileFormatValidationService.getAllFileFormats().contains(extension)) {
if (!fileFormatValidationService.getValidFileFormatsForTenant(TenantContext.getTenantId()).contains(extension)) {
zipData.containedUnpermittedFiles = true;
return;
}
zipData.containedUnpermittedFiles = false;
try {
var result = uploadService.processSingleFile(dossierId, fileName, entryAsBytes, keepManualRedactions, disableAutomaticAnalysis);
zipData.fileUploadResult.getFileIds().addAll(result.getFileIds());
} catch (Exception e) {
log.debug("PDF File inside ZIP failed", e);
// TODO return un-processed files to client
}
}
}
private byte[] readCurrentZipEntry(ZipArchiveEntry ze, ZipFile zipFile) throws IOException {
var bos = new ByteArrayOutputStream();
try (var entryStream = zipFile.getInputStream(ze)) {
var buffer = new byte[2048];
var nBytes = 0;
int totalSizeEntry = 0;
while ((nBytes = entryStream.read(buffer)) > 0) {
bos.write(buffer, 0, nBytes);
totalSizeEntry += nBytes;
double compressionRatio = (float) totalSizeEntry / ze.getCompressedSize();
if (compressionRatio > THRESHOLD_RATIO) {
// ratio between compressed and uncompressed data is highly suspicious, looks like a Zip Bomb Attack
throw new BadRequestException("ZIP-Bomb detected.");
}
}
return bos.toByteArray();
}
}
private void handleRegularFile(String dossierId,
byte[] fileBytes,
String extension,
String fileName,
ZipData zipData,
boolean keepManualRedactions,
boolean disableAutomaticAnalysis) {
if (!fileFormatValidationService.getAllFileFormats().contains(extension)) {
zipData.containedUnpermittedFiles = false;
return;
}
if (!fileFormatValidationService.getValidFileFormatsForTenant(TenantContext.getTenantId()).contains(extension)) {
zipData.containedUnpermittedFiles = true;
return;
}
try {
FileUploadResult result = uploadService.processSingleFile(dossierId, fileName, fileBytes, keepManualRedactions, disableAutomaticAnalysis);
zipData.fileUploadResult.getFileIds().addAll(result.getFileIds());
} catch (Exception e) {
log.debug("Failed to process file '{}' in ZIP: {}", fileName, e.getMessage(), e);
}
}
private String extractFileName(String path) {
int idx = path.lastIndexOf('/');
return (idx >= 0) ? path.substring(idx + 1) : path;
}
private String getExtension(String fileName) {
int idx = fileName.lastIndexOf('.');
if (idx < 0) {
return "";
}
return fileName.substring(idx + 1).toLowerCase(Locale.ROOT);
return bos.toByteArray();
}
@ -304,6 +282,7 @@ public class UploadController implements UploadResource {
byte[] csvBytes;
int totalSizeArchive;
int totalEntryArchive;
FileUploadResult fileUploadResult = new FileUploadResult();
boolean containedUnpermittedFiles;

View File

@ -1,17 +1,13 @@
package com.iqser.red.persistence.service.v1.external.api.impl.controller;
import static com.iqser.red.service.persistence.management.v1.processor.roles.ActionRoles.READ_USER_STATS;
import java.util.ArrayList;
import java.util.List;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.RestController;
import com.iqser.red.service.persistence.management.v1.processor.acl.custom.dossier.DossierACLService;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
import com.iqser.red.service.persistence.management.v1.processor.service.DossierService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.FileStatusPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.users.UserService;
@ -33,17 +29,15 @@ public class UserStatsController implements UserStatsResource {
@Override
@PreAuthorize("hasAuthority('" + READ_USER_STATS + "')")
public ResponseEntity<UserStats> getUserStats(String userId) {
if (userService.getUserById(userId).isEmpty()) {
throw new NotFoundException(String.format("The user with id %s is not found.", userId));
return new ResponseEntity(null, HttpStatus.NOT_FOUND);
}
List<String> dossierMemberships = new ArrayList<>();
List<String> dossierOwnerships = new ArrayList<>();
dossierService.getAllDossiers()
.stream()
.filter(dossierEntity -> dossierEntity.getHardDeletedTime() == null)
.forEach(d -> {
if (dossierACLService.getMembers(d.getId()).contains(userId)) {
dossierMemberships.add(d.getId());

View File

@ -7,24 +7,19 @@ import static com.iqser.red.service.persistence.service.v2.api.external.resource
import java.util.ArrayList;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import com.iqser.red.persistence.service.v1.external.api.impl.controller.DossierController;
import com.iqser.red.persistence.service.v1.external.api.impl.controller.StatusController;
import com.iqser.red.persistence.service.v2.external.api.impl.mapper.ComponentMapper;
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotAllowedException;
import com.iqser.red.service.persistence.management.v1.processor.roles.ApplicationRoles;
import com.iqser.red.service.persistence.management.v1.processor.service.AccessControlService;
import com.iqser.red.service.persistence.management.v1.processor.service.ComponentLogService;
import com.iqser.red.service.persistence.management.v1.processor.service.CurrentApplicationTypeProvider;
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusService;
@ -34,7 +29,6 @@ import com.iqser.red.service.persistence.management.v1.processor.service.users.m
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.componentlog.ComponentLogEntry;
import com.iqser.red.service.persistence.service.v1.api.shared.model.component.RevertOverrideRequest;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
import com.iqser.red.service.persistence.service.v2.api.external.model.BulkComponentsRequest;
import com.iqser.red.service.persistence.service.v2.api.external.model.Component;
import com.iqser.red.service.persistence.service.v2.api.external.model.ComponentOverrideList;
import com.iqser.red.service.persistence.service.v2.api.external.model.FileComponents;
@ -43,7 +37,6 @@ import com.iqser.red.service.persistence.service.v2.api.external.resource.Compon
import com.knecon.fforesight.keycloakcommons.security.KeycloakSecurity;
import com.knecon.fforesight.tenantcommons.TenantProvider;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
@ -54,21 +47,15 @@ import lombok.experimental.FieldDefaults;
@Tag(name = "4. Component endpoints", description = "Provides operations related to components")
public class ComponentControllerV2 implements ComponentResource {
private final AccessControlService accessControlService;
private final ComponentLogService componentLogService;
private final UserService userService;
private final StatusController statusController;
private final FileStatusService fileStatusService;
private final DossierController dossierController;
private final DossierTemplatePersistenceService dossierTemplatePersistenceService;
private final CurrentApplicationTypeProvider currentApplicationTypeProvider;
private final ComponentMapper componentMapper = ComponentMapper.INSTANCE;
@Value("${documine.components.filesLimit:100}")
private int documineComponentsFilesLimit = 100;
@Override
public FileComponents getComponents(@PathVariable(DOSSIER_TEMPLATE_ID_PARAM) String dossierTemplateId,
@PathVariable(DOSSIER_ID_PARAM) String dossierId,
@ -105,37 +92,12 @@ public class ComponentControllerV2 implements ComponentResource {
checkApplicationType();
dossierTemplatePersistenceService.checkDossierTemplateExistsOrElseThrow404(dossierTemplateId);
var dossierFiles = statusController.getDossierStatus(dossierId);
if(dossierFiles.size() > documineComponentsFilesLimit) {
throw new BadRequestException(String.format("The dossier you requested components for contains %s files this is above the limit of %s files for this endpoint, please use the POST %s", dossierFiles.size(), documineComponentsFilesLimit, FILE_PATH + BULK_COMPONENTS_PATH));
}
return new FileComponentsList(dossierFiles.stream()
.map(file -> getComponents(dossierTemplateId, dossierId, file.getFileId(), includeDetails))
.toList());
}
@Override
public FileComponentsList getComponentsForFiles(@PathVariable(DOSSIER_TEMPLATE_ID_PARAM) String dossierTemplateId,
@PathVariable(DOSSIER_ID_PARAM) String dossierId,
@RequestParam(name = INCLUDE_DETAILS_PARAM, defaultValue = "false", required = false) boolean includeDetails,
@RequestBody BulkComponentsRequest bulkComponentsRequest){
if(bulkComponentsRequest.getFileIds().size() > documineComponentsFilesLimit) {
throw new BadRequestException(String.format("You requested components for %s files this is above the limit of %s files for this endpoint, lower the fileIds in the request", bulkComponentsRequest.getFileIds().size(), documineComponentsFilesLimit));
}
checkApplicationType();
dossierTemplatePersistenceService.checkDossierTemplateExistsOrElseThrow404(dossierTemplateId);
dossierController.getDossier(dossierId, false, false);
return new FileComponentsList(bulkComponentsRequest.getFileIds().stream()
.map(fileId -> getComponents(dossierTemplateId, dossierId, fileId, includeDetails))
.toList());
}
@Override
@PreAuthorize("hasAuthority('" + GET_RSS + "')")
public void addOverride(@PathVariable(DOSSIER_TEMPLATE_ID_PARAM) String dossierTemplateId,
@ -144,7 +106,6 @@ public class ComponentControllerV2 implements ComponentResource {
@RequestBody Component override) {
checkApplicationType();
accessControlService.verifyUserIsReviewer(dossierId, fileId);
dossierTemplatePersistenceService.checkDossierTemplateExistsOrElseThrow404(dossierTemplateId);
componentLogService.overrideComponent(dossierId, fileId, componentMapper.toComponentLogEntry(override));
@ -185,7 +146,6 @@ public class ComponentControllerV2 implements ComponentResource {
@RequestBody RevertOverrideRequest revertOverrideRequest) {
checkApplicationType();
accessControlService.verifyUserIsReviewer(dossierId, fileId);
dossierTemplatePersistenceService.checkDossierTemplateExistsOrElseThrow404(dossierTemplateId);
componentLogService.revertOverrides(dossierId, fileId, revertOverrideRequest);

View File

@ -247,7 +247,6 @@ public class DossierTemplateControllerV2 implements DossierTemplateResource {
.filterable(fileAttributeConfig.isFilterable())
.displayedInFileList(fileAttributeConfig.isDisplayedInFileList())
.build())
.includeInCsvExport(fileAttributeConfig.isIncludeInCsvExport())
.build())
.toList();
@ -450,15 +449,11 @@ public class DossierTemplateControllerV2 implements DossierTemplateResource {
return new DossierAttributeDefinitionList(dossierAttributeConfigPersistenceService.getDossierAttributes(dossierTemplateId)
.stream()
.map(config -> DossierAttributeDefinition.builder()
.id(config.getId())
.name(config.getLabel())
.type(config.getType())
.reportingPlaceholder(config.getPlaceholder())
.displaySettings(DossierAttributeDefinition.DossierDisplaySettings.builder()
.editable(config.isEditable())
.filterable(config.isFilterable())
.displayedInDossierList(config.isDisplayedInDossierList())
.build())
.build())
.toList());
}

View File

@ -156,6 +156,8 @@ public class FileControllerV2 implements FileResource {
@PathVariable(FILE_ID_PARAM) String fileId,
@RequestBody DownloadRequest downloadRequest) {
fileStatusManagementService.getFileStatus(fileId, false);
return prepareBulkDownload(dossierTemplateId,
dossierId,
BulkDownloadRequest.builder()

View File

@ -223,7 +223,7 @@ public interface DictionaryResource {
void changeFlags(@PathVariable(TYPE_PARAMETER_NAME) String type,
@PathVariable(DOSSIER_TEMPLATE_PARAMETER_NAME) String dossierTemplateId,
@PathVariable(DOSSIER_ID_PARAMETER_NAME) String dossierId,
@RequestParam(value = "addToDictionaryAction") boolean addToDictionaryAction);
@RequestParam(value = "addToDictionary") boolean addToDictionary);
@PostMapping(value = DICTIONARY_REST_PATH + DIFFERENCE + DOSSIER_TEMPLATE_PATH_VARIABLE, consumes = MediaType.APPLICATION_JSON_VALUE)

View File

@ -44,11 +44,7 @@ public interface FileAttributesResource {
String FILE_ID = "fileId";
String FILE_ID_PATH_VARIABLE = "/{" + FILE_ID + "}";
String UTF_ENCODING = "UTF-8";
String ASCII_ENCODING = "ASCII";
String ISO_ENCODING = "ISO-8859-1";
Set<String> encodingList = Sets.newHashSet(ISO_ENCODING, ASCII_ENCODING, UTF_ENCODING);
Set<String> encodingList = Sets.newHashSet("ISO", "ASCII", "UTF-8");
@ResponseBody

View File

@ -0,0 +1,56 @@
package com.iqser.red.service.persistence.service.v1.api.external.resource;
import com.iqser.red.service.persistence.service.v1.api.shared.model.saas.migration.MigrationStatusResponse;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
public interface MigrationStatusResource {
String MIGRATION_STATUS_REST_PATH = ExternalApi.BASE_PATH + "/migration-status";
String START_MIGRATION_REST_PATH = ExternalApi.BASE_PATH + "/start_migration";
String REVERT_MIGRATION_REST_PATH = ExternalApi.BASE_PATH + "/revert_migration";
String RETRY_MIGRATION_REST_PATH = ExternalApi.BASE_PATH + "/retry_migration";
String FILE_ID = "fileId";
String FILE_ID_PATH_VARIABLE = "/{" + FILE_ID + "}";
String DOSSIER_ID = "dossierId";
String DOSSIER_ID_PATH_VARIABLE = "/{" + DOSSIER_ID + "}";
@ResponseBody
@PostMapping(value = MIGRATION_STATUS_REST_PATH, produces = MediaType.APPLICATION_JSON_VALUE)
@Operation(summary = "Show the status of the migration", description = "None")
@ApiResponses(value = {@ApiResponse(responseCode = "200", description = "Success.")})
MigrationStatusResponse migrationStatus();
@ResponseBody
@PostMapping(value = START_MIGRATION_REST_PATH + FILE_ID_PATH_VARIABLE + DOSSIER_ID_PATH_VARIABLE)
@Operation(summary = "Start SAAS migration for specific file", description = "None")
@ApiResponses(value = {@ApiResponse(responseCode = "200", description = "Success.")})
ResponseEntity<?> startMigrationForFile(@RequestParam(value = DOSSIER_ID) String dossierId, @RequestParam(value = FILE_ID) String fileId);
@ResponseBody
@PostMapping(value = REVERT_MIGRATION_REST_PATH + FILE_ID_PATH_VARIABLE + DOSSIER_ID_PATH_VARIABLE)
@Operation(summary = "Start SAAS migration for specific file", description = "None")
@ApiResponses(value = {@ApiResponse(responseCode = "200", description = "Success.")})
ResponseEntity<?> revertMigrationForFile(@RequestParam(value = DOSSIER_ID) String dossierId, @RequestParam(value = FILE_ID) String fileId);
@ResponseBody
@PostMapping(value = RETRY_MIGRATION_REST_PATH)
@Operation(summary = "Restart SAAS migration for all files in error state", description = "None")
@ApiResponses(value = {@ApiResponse(responseCode = "200", description = "Success.")})
ResponseEntity<?> requeueErrorFiles();
}

View File

@ -38,7 +38,6 @@ public interface ReanalysisResource {
String EXCLUDED_STATUS_PARAM = "excluded";
String FORCE_PARAM = "force";
String ALL_PAGES = "allPages";
@PostMapping(value = REANALYSIS_REST_PATH + DOSSIER_ID_PATH_VARIABLE)
@ -74,8 +73,7 @@ public interface ReanalysisResource {
@ApiResponses(value = {@ApiResponse(responseCode = "204", description = "OK"), @ApiResponse(responseCode = "409", description = "Conflict"), @ApiResponse(responseCode = "404", description = "Not found"), @ApiResponse(responseCode = "403", description = "Forbidden"), @ApiResponse(responseCode = "400", description = "Cannot OCR approved file")})
void ocrFile(@PathVariable(DOSSIER_ID) String dossierId,
@PathVariable(FILE_ID) String fileId,
@RequestParam(value = FORCE_PARAM, required = false, defaultValue = FALSE) boolean force,
@RequestParam(value = ALL_PAGES, required = false, defaultValue = FALSE) boolean allPages);
@RequestParam(value = FORCE_PARAM, required = false, defaultValue = FALSE) boolean force);
@Operation(summary = "Ocr and reanalyze multiple files for a dossier", description = "None")

View File

@ -28,7 +28,9 @@ public interface RulesResource {
String RULES_PATH = ExternalApi.BASE_PATH + "/rules";
String UPLOAD_PATH = "/upload";
String UPLOAD_FULL_PATH = "/uploadFull";
String DOWNLOAD_PATH = "/download";
String DOWNLOAD_FULL_PATH = "/downloadFull";
String RESET_PATH = "/reset";
String DOSSIER_TEMPLATE_PARAMETER_NAME = "dossierTemplateId";
@ -92,6 +94,16 @@ public interface RulesResource {
@Schema(type = "string", format = "binary", name = "file") @RequestPart(name = "file") MultipartFile file);
@ResponseStatus(value = HttpStatus.NO_CONTENT)
@PostMapping(value = RULES_PATH + DOSSIER_TEMPLATE_PATH_VARIABLE + RULE_FILE_TYPE_PATH_VARIABLE + UPLOAD_FULL_PATH, consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
@Operation(summary = "Takes object containing string or rules as argument, which will be used by the redaction service.")
@ApiResponses(value = {@ApiResponse(responseCode = "200", description = "Rules upload successful or rules validation done"), @ApiResponse(responseCode = "400", description = "Uploaded rules could not be verified."), @ApiResponse(responseCode = "422", description = "Uploaded rules could not be compiled.")})
ResponseEntity<DroolsValidationResponse> uploadFullFile(@PathVariable(DOSSIER_TEMPLATE_PARAMETER_NAME) String dossierTemplateId,
@PathVariable(RULE_FILE_TYPE_PARAMETER_NAME) RuleFileType ruleFileType,
@RequestParam(value = DRY_RUN_PARAMETER) boolean dryRun,
@Schema(type = "string", format = "binary", name = "file") @RequestPart(name = "file") MultipartFile file);
@ResponseBody
@ResponseStatus(value = HttpStatus.OK)
@Operation(summary = "Returns file containing the currently used Drools rules.")
@ -113,4 +125,13 @@ public interface RulesResource {
@ApiResponses(value = {@ApiResponse(responseCode = "204", description = "No content")})
@PutMapping(value = RULES_PATH + DOSSIER_TEMPLATE_PATH_VARIABLE + RULE_FILE_TYPE_PATH_VARIABLE + RESET_PATH, produces = MediaType.APPLICATION_JSON_VALUE)
void unlockRules(@PathVariable(DOSSIER_TEMPLATE_PARAMETER_NAME) String dossierTemplateId, @PathVariable(RULE_FILE_TYPE_PARAMETER_NAME) RuleFileType ruleFileType);
@ResponseBody
@ResponseStatus(value = HttpStatus.OK)
@Operation(summary = "Returns file containing the currently used Drools rules.")
@ApiResponses(value = {@ApiResponse(responseCode = "200", description = "OK")})
@GetMapping(value = RULES_PATH + DOSSIER_TEMPLATE_PATH_VARIABLE + RULE_FILE_TYPE_PATH_VARIABLE + DOWNLOAD_FULL_PATH)
ResponseEntity<?> downloadFullFile(@PathVariable(DOSSIER_TEMPLATE_PARAMETER_NAME) String dossierTemplateId,
@PathVariable(RULE_FILE_TYPE_PARAMETER_NAME) RuleFileType ruleFileType);
}

View File

@ -1,18 +0,0 @@
package com.iqser.red.service.persistence.service.v2.api.external.model;
import java.util.ArrayList;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class BulkComponentsRequest {
private List<String> fileIds = new ArrayList<>();
}

View File

@ -17,18 +17,5 @@ public class DossierAttributeDefinition {
private String name;
private DossierAttributeType type;
private String reportingPlaceholder;
private DossierDisplaySettings displaySettings;
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public static class DossierDisplaySettings {
private boolean editable;
private boolean filterable;
private boolean displayedInDossierList;
}
}

View File

@ -20,7 +20,6 @@ public class FileAttributeDefinition {
private String mappedCsvColumnHeader;
private String reportingPlaceholder;
private DisplaySettings displaySettings;
private boolean includeInCsvExport;
@Data
@NoArgsConstructor

View File

@ -20,7 +20,6 @@ import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import com.iqser.red.service.persistence.service.v1.api.shared.model.component.RevertOverrideRequest;
import com.iqser.red.service.persistence.service.v2.api.external.model.BulkComponentsRequest;
import com.iqser.red.service.persistence.service.v2.api.external.model.Component;
import com.iqser.red.service.persistence.service.v2.api.external.model.ComponentOverrideList;
import com.iqser.red.service.persistence.service.v2.api.external.model.FileComponents;
@ -75,16 +74,6 @@ public interface ComponentResource {
@Parameter(name = INCLUDE_DETAILS_PARAM, description = INCLUDE_DETAILS_DESCRIPTION) @RequestParam(name = INCLUDE_DETAILS_PARAM, defaultValue = "false", required = false) boolean includeDetails);
@PostMapping(value = FILE_PATH
+ BULK_COMPONENTS_PATH, produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}, consumes = MediaType.APPLICATION_JSON_VALUE)
@Operation(summary = "Returns the components for all files of a dossier", description = "None")
@ApiResponses(value = {@ApiResponse(responseCode = "200", description = "OK")})
FileComponentsList getComponentsForFiles(@Parameter(name = DOSSIER_TEMPLATE_ID_PARAM, description = "The identifier of the dossier template that is used for the dossier.", required = true) @PathVariable(DOSSIER_TEMPLATE_ID_PARAM) String dossierTemplateId,
@Parameter(name = DOSSIER_ID_PARAM, description = "The identifier of the dossier that contains the file.", required = true) @PathVariable(DOSSIER_ID_PARAM) String dossierId,
@Parameter(name = INCLUDE_DETAILS_PARAM, description = INCLUDE_DETAILS_DESCRIPTION) @RequestParam(name = INCLUDE_DETAILS_PARAM, defaultValue = "false", required = false) boolean includeDetails,
@RequestBody BulkComponentsRequest bulkComponentsRequest);
@ResponseBody
@ResponseStatus(value = HttpStatus.NO_CONTENT)
@PostMapping(value = FILE_PATH + FILE_ID_PATH_VARIABLE + OVERRIDES_PATH, consumes = MediaType.APPLICATION_JSON_VALUE)

View File

@ -1626,50 +1626,6 @@ paths:
$ref: '#/components/responses/429'
"500":
$ref: '#/components/responses/500'
post:
operationId: getComponentsForFiles
tags:
- 4. Components
summary: Returns the FileComponents for requested files
description: |
This endpoint fetches components for the requested files by its ids. Like individual file components,
these represent various aspects, metadata or content of the files. Entity and component rules define these components based on the file's
content. They can give a *structured view* on a document's text.
To include detailed component information, set the `includeDetails` query parameter to `true`.
parameters:
- $ref: '#/components/parameters/dossierTemplateId'
- $ref: '#/components/parameters/dossierId'
- $ref: '#/components/parameters/includeComponentDetails'
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/BulkComponentsRequest'
required: true
responses:
"200":
content:
application/json:
schema:
$ref: '#/components/schemas/FileComponentsList'
application/xml:
schema:
$ref: '#/components/schemas/FileComponentsList'
description: |
Successfully fetched components for all files in the dossier.
"400":
$ref: '#/components/responses/400'
"401":
$ref: '#/components/responses/401'
"403":
$ref: '#/components/responses/403'
"404":
$ref: '#/components/responses/404-dossier'
"429":
$ref: '#/components/responses/429'
"500":
$ref: '#/components/responses/500'
/api/downloads:
get:
operationId: getDownloadStatusList
@ -2817,16 +2773,6 @@ components:
entityRuleId: DEF.13.37
type: another_entity_type
page: 456
BulkComponentsRequest:
type: object
description: Request payload to get components for multiple files.
properties:
fileIds:
type: array
description: A list with unique identifiers of the files for which components should be retrieved.
items:
type: string
description: The unique identifier of a file.
DossierStatusDefinition:
type: object
description: |
@ -2918,8 +2864,6 @@ components:
placeholder follows a specific format convention:
`{{dossier.attribute.<name>}}` while the name is transformed into 'PascalCase' and does not contain
whitespaces. The placeholder is unique in a dossier template.
displaySettings:
$ref: '#/components/schemas/DossierAttributeDisplaySettings'
required:
- name
- type
@ -2928,35 +2872,6 @@ components:
name: "Document Summary"
type: "TEXT"
reportingPlaceholder: "{{dossier.attribute.DocumentSummary}}"
displaySettings:
editable: true
filterable: false
displayedInDossierList: false
DossierAttributeDisplaySettings:
type: object
description: |
Display setting for the user interface. These settings control how the UI handles and presents the dossier attributes.
properties:
editable:
type: boolean
description: |
If `true`, the user interfaces allow manual editing of the value. Otherwise only importing and setting by rules would be possible.
filterable:
type: boolean
description: |
If `true`, the user interfaces add filter options to the dossier list.
displayedInDossierList:
type: boolean
description: |
if `true`, the user interfaces show the values in the dossier list.
required:
- editable
- filterable
- displayedInDossierList
example:
editable: true
filterable: true
displayedInDossierList: false
FileAttributeDefinition:
type: object
description: |
@ -3079,18 +2994,10 @@ components:
name: "Dossier Summary"
type: "TEXT"
reportingPlaceholder: "{{dossier.attribute.DossierSummary}}"
displaySettings:
editable: true
filterable: false
displayedInFileList: false
- id: "23e45678-e90b-12d3-a456-765114174321"
name: "Comment"
type: "TEXT"
reportingPlaceholder: "{{dossier.attribute.Comment}}"
displaySettings:
editable: true
filterable: false
displayedInFileList: false
FileAttributeDefinitionList:
type: object
description: A list of file attribute definitions.

View File

@ -1492,8 +1492,6 @@ components:
placeholder follows a specific format convention:
`{{dossier.attribute.<name>}}` while the name is transformed into 'PascalCase' and does not contain
whitespaces. The placeholder is unique in a dossier template.
displaySettings:
$ref: '#/components/schemas/DossierAttributeDisplaySettings'
required:
- name
- type
@ -1502,35 +1500,6 @@ components:
name: "Document Summary"
type: "TEXT"
reportingPlaceholder: "{{dossier.attribute.DocumentSummary}}"
displaySettings:
editable: true
filterable: false
displayedInDossierList: false
DossierAttributeDisplaySettings:
type: object
description: |
Display setting for the user interface. These settings control how the UI handles and presents the dossier attributes.
properties:
editable:
type: boolean
description: |
If `true`, the user interfaces allow manual editing of the value. Otherwise only importing and setting by rules would be possible.
filterable:
type: boolean
description: |
If `true`, the user interfaces add filter options to the dossier list.
displayedInDossierList:
type: boolean
description: |
if `true`, the user interfaces show the values in the dossier list.
required:
- editable
- filterable
- displayedInDossierList
example:
editable: true
filterable: true
displayedInDossierList: false
FileAttributeDefinition:
type: object
description: |
@ -1653,18 +1622,10 @@ components:
name: "Dossier Summary"
type: "TEXT"
reportingPlaceholder: "{{dossier.attribute.DossierSummary}}"
displaySettings:
editable: true
filterable: false
displayedInFileList: false
- id: "23e45678-e90b-12d3-a456-765114174321"
name: "Comment"
type: "TEXT"
reportingPlaceholder: "{{dossier.attribute.Comment}}"
displaySettings:
editable: true
filterable: false
displayedInFileList: false
FileAttributeDefinitionList:
type: object
description: A list of file attribute definitions.

View File

@ -10,7 +10,6 @@ import org.springframework.web.bind.annotation.RestController;
import com.iqser.red.service.persistence.management.v1.processor.entity.dossier.DossierAttributeConfigEntity;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.DossierAttributeConfigPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.utils.DossierAttributeConfigMapper;
import com.iqser.red.service.persistence.service.v1.api.internal.resources.DossierAttributesConfigResource;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.DossierAttributeConfig;
@ -28,7 +27,7 @@ public class DossierAttributesConfigInternalController implements DossierAttribu
@Override
public List<DossierAttributeConfig> getDossierAttributes(@PathVariable(DOSSIER_TEMPLATE_ID) String dossierTemplateId) {
return convert(dossierAttributeConfigPersistenceService.getDossierAttributes(dossierTemplateId), DossierAttributeConfig.class, new DossierAttributeConfigMapper());
return convert(dossierAttributeConfigPersistenceService.getDossierAttributes(dossierTemplateId), DossierAttributeConfig.class);
}
}

View File

@ -8,7 +8,6 @@ import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RestController;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.FileAttributeConfigPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.utils.FileAttributeConfigMapper;
import com.iqser.red.service.persistence.service.v1.api.internal.resources.FileAttributesConfigResource;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileAttributeConfig;
@ -26,7 +25,7 @@ public class FileAttributesConfigInternalController implements FileAttributesCon
@Override
public List<FileAttributeConfig> getFileAttributeConfigs(@PathVariable(DOSSIER_TEMPLATE_ID) String dossierTemplateId) {
return convert(fileAttributeConfigPersistenceService.getFileAttributes(dossierTemplateId), FileAttributeConfig.class, new FileAttributeConfigMapper());
return convert(fileAttributeConfigPersistenceService.getFileAttributes(dossierTemplateId), FileAttributeConfig.class);
}
}

View File

@ -57,7 +57,7 @@ public class AdminInterfaceController {
fileStatusService.validateFileIsNotDeletedAndNotApproved(fileId);
fileStatusService.setStatusOcrQueued(dossierId, fileId, false);
fileStatusService.setStatusOcrQueued(dossierId, fileId);
}
@ -91,7 +91,7 @@ public class AdminInterfaceController {
if (!dryRun) {
fileStatusService.validateFileIsNotDeletedAndNotApproved(file.getId());
fileStatusService.setStatusOcrQueued(file.getDossierId(), file.getId(), false);
fileStatusService.setStatusOcrQueued(file.getDossierId(), file.getId());
}
}

View File

@ -14,7 +14,6 @@ configurations {
}
}
dependencies {
api(project(":persistence-service-shared-api-v1"))
api(project(":persistence-service-shared-mongo-v1"))
api(project(":persistence-service-external-api-v1"))
@ -37,12 +36,12 @@ dependencies {
}
api("com.knecon.fforesight:azure-ocr-service-api:0.13.0")
implementation("com.knecon.fforesight:llm-service-api:1.20.0-RED10072.2")
api("com.knecon.fforesight:jobs-commons:0.13.0")
api("com.knecon.fforesight:jobs-commons:0.10.0")
api("com.iqser.red.commons:storage-commons:2.50.0")
api("com.knecon.fforesight:tenant-commons:0.31.0-RED10196.0") {
exclude(group = "com.iqser.red.commons", module = "storage-commons")
}
api("com.knecon.fforesight:database-tenant-commons:0.31.0") {
api("com.knecon.fforesight:database-tenant-commons:0.28.0-RED10196.0") {
exclude(group = "com.knecon.fforesight", module = "tenant-commons")
}
api("com.knecon.fforesight:keycloak-commons:0.30.0") {
@ -71,6 +70,7 @@ dependencies {
api("commons-validator:commons-validator:1.7")
api("com.opencsv:opencsv:5.9")
implementation("com.google.protobuf:protobuf-java:4.27.1")
implementation("org.mapstruct:mapstruct:1.6.2")
annotationProcessor("org.mapstruct:mapstruct-processor:1.6.2")

View File

@ -20,6 +20,7 @@ import org.springframework.retry.support.RetryTemplate;
import com.iqser.red.service.persistence.management.v1.processor.client.pdftronredactionservice.PDFTronClient;
import com.iqser.red.service.persistence.management.v1.processor.client.redactionreportservice.StatusReportClient;
import com.iqser.red.service.persistence.management.v1.processor.client.redactionservice.RedactionClient;
import com.iqser.red.service.persistence.management.v1.processor.client.redactionservice.RuleBuilderClient;
import com.iqser.red.service.persistence.management.v1.processor.client.searchservice.SearchClient;
import com.iqser.red.service.persistence.management.v1.processor.client.tenantusermanagementservice.UsersClient;
import com.iqser.red.service.persistence.management.v1.processor.settings.FileManagementServiceSettings;
@ -30,7 +31,7 @@ import lombok.extern.slf4j.Slf4j;
@Slf4j
@Configuration
@ComponentScan
@EnableFeignClients(basePackageClasses = {PDFTronClient.class, StatusReportClient.class, SearchClient.class, RedactionClient.class, UsersClient.class})
@EnableFeignClients(basePackageClasses = {PDFTronClient.class, StatusReportClient.class, SearchClient.class, RedactionClient.class, UsersClient.class, RuleBuilderClient.class})
@ImportAutoConfiguration(SharedMongoAutoConfiguration.class)
public class PersistenceServiceProcessorConfiguration {

View File

@ -0,0 +1,10 @@
package com.iqser.red.service.persistence.management.v1.processor.client.redactionservice;
import org.springframework.cloud.openfeign.FeignClient;
import com.iqser.red.service.redaction.v1.resources.RuleBuilderResource;
@FeignClient(name = "RuleBuilderClient", url = "${redaction-service.url}")
public interface RuleBuilderClient extends RuleBuilderResource {
}

View File

@ -53,8 +53,6 @@ import com.iqser.red.service.persistence.management.v1.processor.service.persist
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.LegalBasisMappingPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.ReportTemplatePersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.RulesPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.utils.DossierAttributeConfigMapper;
import com.iqser.red.service.persistence.management.v1.processor.utils.FileAttributeConfigMapper;
import com.iqser.red.service.persistence.management.v1.processor.utils.FileSystemBackedArchiver;
import com.iqser.red.service.persistence.management.v1.processor.utils.StorageIdUtils;
import com.iqser.red.service.persistence.service.v1.api.shared.model.RuleFileType;
@ -185,16 +183,13 @@ public class DossierTemplateExportService {
fileSystemBackedArchiver.addEntries(new FileSystemBackedArchiver.ArchiveModel(folder,
getFilename(ExportFilename.DOSSIER_ATTRIBUTES_CONFIG, JSON_EXT),
objectMapper.writeValueAsBytes(convert(dossierAttributesConfig,
DossierAttributeConfig.class,
new DossierAttributeConfigMapper()))));
DossierAttributeConfig.class))));
// add file attribute configs
List<FileAttributeConfigEntity> fileAttributeConfigList = fileAttributeConfigPersistenceService.getFileAttributes(dossierTemplate.getId());
fileSystemBackedArchiver.addEntries(new FileSystemBackedArchiver.ArchiveModel(folder,
getFilename(ExportFilename.FILE_ATTRIBUTE_CONFIG, JSON_EXT),
objectMapper.writeValueAsBytes(convert(fileAttributeConfigList,
FileAttributeConfig.class,
new FileAttributeConfigMapper()))));
objectMapper.writeValueAsBytes(convert(fileAttributeConfigList, FileAttributeConfig.class))));
// add legal basis mapping
List<LegalBasisEntity> legalBasisMappingList = legalBasisMappingPersistenceService.getLegalBasisMapping(dossierTemplate.getId());

View File

@ -1,39 +0,0 @@
package com.iqser.red.service.persistence.management.v1.processor.entity.configuration;
import java.time.OffsetDateTime;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.IdClass;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.FieldDefaults;
@Data
@Entity
@Builder
@AllArgsConstructor
@NoArgsConstructor
@IdClass(AppVersionEntityKey.class)
@Table(name = "app_version_history")
@FieldDefaults(level = AccessLevel.PRIVATE)
public class AppVersionEntity {
@Id
@Column
@NotNull String appVersion;
@Id
@Column
@NotNull String layoutParserVersion;
@Column
@NotNull OffsetDateTime date;
}

View File

@ -1,27 +0,0 @@
package com.iqser.red.service.persistence.management.v1.processor.entity.configuration;
import java.io.Serializable;
import jakarta.persistence.Column;
import jakarta.validation.constraints.NotNull;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.FieldDefaults;
@Data
@NoArgsConstructor
@AllArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE)
public class AppVersionEntityKey implements Serializable {
@Column
@NotNull String appVersion;
@Column
@NotNull String layoutParserVersion;
}

View File

@ -30,10 +30,6 @@ public class DossierAttributeConfigEntity {
@Column
private boolean editable;
@Column
private boolean filterable;
@Column
private boolean displayedInDossierList;
@Column
private String placeholder;
@Column

View File

@ -41,8 +41,6 @@ public class FileAttributeConfigEntity {
@Column
private String placeholder;
@Column
private boolean includeInCsvExport;
@Column
@Enumerated(EnumType.STRING)
@Builder.Default
private FileAttributeType type = FileAttributeType.TEXT;

View File

@ -9,7 +9,6 @@ import java.util.Set;
import org.hibernate.annotations.Fetch;
import org.hibernate.annotations.FetchMode;
import com.iqser.red.service.persistence.management.v1.processor.entity.download.DownloadStatusEntity;
import com.iqser.red.service.persistence.management.v1.processor.utils.JSONIntegerSetConverter;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.ErrorCode;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.ProcessingStatus;
@ -23,10 +22,8 @@ import jakarta.persistence.Entity;
import jakarta.persistence.EnumType;
import jakarta.persistence.Enumerated;
import jakarta.persistence.FetchType;
import jakarta.persistence.ForeignKey;
import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.OneToMany;
import jakarta.persistence.Table;
import lombok.AllArgsConstructor;
@ -77,9 +74,6 @@ public class FileEntity {
@Column
private OffsetDateTime lastLayoutProcessed;
@Column
private String layoutParserVersion;
@Column
private OffsetDateTime lastIndexed;
@ -224,11 +218,6 @@ public class FileEntity {
@Column
private boolean protobufMigrationDone;
@ManyToOne(fetch = FetchType.EAGER)
@JoinColumn(name = "last_download", referencedColumnName = "storage_id", foreignKey = @ForeignKey(name = "fk_file_last_download"))
private DownloadStatusEntity lastDownload;
public OffsetDateTime getLastOCRTime() {
return this.ocrStartTime;

View File

@ -43,7 +43,6 @@ import lombok.experimental.FieldDefaults;
public class DownloadStatusEntity {
@Id
@Column(name = "storage_id")
String storageId;
@Column
String uuid;

View File

@ -0,0 +1,35 @@
package com.iqser.red.service.persistence.management.v1.processor.entity.migration;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.SaasMigrationStatus;
import jakarta.persistence.*;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
@Entity
@Table(name = "saas_migration_status")
public class SaasMigrationStatusEntity {
@Id
private String fileId;
@Column
private String dossierId;
@Column
@Enumerated(EnumType.STRING)
private SaasMigrationStatus status;
@Column
private Integer processingErrorCounter;
@Column
private String errorCause;
}

View File

@ -1,61 +0,0 @@
package com.iqser.red.service.persistence.management.v1.processor.lifecycle;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Component;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.AppVersionPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.utils.TenantUtils;
import com.knecon.fforesight.tenantcommons.TenantContext;
import com.knecon.fforesight.tenantcommons.TenantProvider;
import jakarta.validation.ConstraintViolationException;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Component
@RequiredArgsConstructor
public class AppVersionTracker {
@Value("${BACKEND_APP_VERSION:}")
private String appVersion;
@Value("${LAYOUT_PARSER_VERSION:}")
private String layoutParserVersion;
private final AppVersionPersistenceService appVersionPersistenceService;
private final TenantProvider tenantProvider;
@EventListener(ApplicationReadyEvent.class)
public void trackAppVersion() {
tenantProvider.getTenants()
.forEach(tenant -> {
if (!TenantUtils.isTenantReadyForPersistence(tenant)) {
return;
}
if (appVersion.isBlank() || layoutParserVersion.isBlank()) {
log.info("No app version or layout parser version was provided. Version tracking skipped. ");
return;
}
TenantContext.setTenantId(tenant.getTenantId());
try {
if (appVersionPersistenceService.insertIfNotExists(appVersion, layoutParserVersion)) {
log.info("Started with new app version {} / layout parser version {}.", appVersion, layoutParserVersion);
}
} catch (ConstraintViolationException cve) {
log.error("Validation failed for app version {} / layout parser version {}.", appVersion, layoutParserVersion, cve);
} catch (Exception e) {
log.error("Failed to track app version {} / layout parser version {}.", appVersion, layoutParserVersion, e);
}
});
}
}

View File

@ -0,0 +1,170 @@
package com.iqser.red.service.persistence.management.v1.processor.migration;
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.*;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository.CommentRepository;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository.FileRepository;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository.annotationentity.*;
import com.knecon.fforesight.databasetenantcommons.providers.utils.MagicConverter;
import jakarta.transaction.Transactional;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
import lombok.experimental.FieldDefaults;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
@Slf4j
@Service
@Transactional
@RequiredArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class SaasAnnotationIdMigrationService {
ManualRedactionRepository manualRedactionRepository;
RemoveRedactionRepository removeRedactionRepository;
ForceRedactionRepository forceRedactionRepository;
ResizeRedactionRepository resizeRedactionRepository;
RecategorizationRepository recategorizationRepository;
LegalBasisChangeRepository legalBasisChangeRepository;
CommentRepository commentRepository;
FileRepository fileRepository;
public int updateManualAddRedaction(AnnotationEntityId oldAnnotationEntityId, AnnotationEntityId newAnnotationEntityId) {
if (oldAnnotationEntityId.equals(newAnnotationEntityId)) {
return 0;
}
var oldEntry = manualRedactionRepository.findById(oldAnnotationEntityId);
if (oldEntry.isPresent()) {
var newEntry = MagicConverter.convert(oldEntry.get(), ManualRedactionEntryEntity.class);
newEntry.setPositions(MagicConverter.convert(oldEntry.get().getPositions(), RectangleEntity.class));
newEntry.setFileStatus(fileRepository.findById(oldAnnotationEntityId.getFileId())
.get());
newEntry.setId(newAnnotationEntityId);
manualRedactionRepository.deleteById(oldAnnotationEntityId);
manualRedactionRepository.save(newEntry);
return 1;
}
return 0;
}
public int updateRemoveRedaction(AnnotationEntityId oldAnnotationEntityId, AnnotationEntityId newAnnotationEntityId) {
if (oldAnnotationEntityId.equals(newAnnotationEntityId)) {
return 0;
}
var oldEntry = removeRedactionRepository.findById(oldAnnotationEntityId);
if (oldEntry.isPresent()) {
var newEntry = MagicConverter.convert(oldEntry.get(), IdRemovalEntity.class);
newEntry.setFileStatus(fileRepository.findById(oldAnnotationEntityId.getFileId())
.get());
newEntry.setId(newAnnotationEntityId);
removeRedactionRepository.deleteById(oldAnnotationEntityId);
removeRedactionRepository.save(newEntry);
return 1;
}
return 0;
}
public int updateForceRedaction(AnnotationEntityId oldAnnotationEntityId, AnnotationEntityId newAnnotationEntityId) {
if (oldAnnotationEntityId.equals(newAnnotationEntityId)) {
return 0;
}
var oldEntry = forceRedactionRepository.findById(oldAnnotationEntityId);
if (oldEntry.isPresent()) {
var newEntry = MagicConverter.convert(oldEntry.get(), ManualForceRedactionEntity.class);
newEntry.setFileStatus(fileRepository.findById(oldAnnotationEntityId.getFileId())
.get());
newEntry.setId(newAnnotationEntityId);
forceRedactionRepository.deleteById(oldAnnotationEntityId);
forceRedactionRepository.save(newEntry);
return 1;
}
return 0;
}
public int updateResizeRedaction(AnnotationEntityId oldAnnotationEntityId, AnnotationEntityId newAnnotationEntityId) {
if (oldAnnotationEntityId.equals(newAnnotationEntityId)) {
return 0;
}
var oldEntry = resizeRedactionRepository.findById(oldAnnotationEntityId);
if (oldEntry.isPresent()) {
var newEntry = MagicConverter.convert(oldEntry.get(), ManualResizeRedactionEntity.class);
newEntry.setId(newAnnotationEntityId);
newEntry.setPositions(MagicConverter.convert(oldEntry.get().getPositions(), RectangleEntity.class));
newEntry.setFileStatus(fileRepository.findById(oldAnnotationEntityId.getFileId())
.get());
resizeRedactionRepository.deleteById(oldAnnotationEntityId);
resizeRedactionRepository.save(newEntry);
return 1;
}
return 0;
}
public int updateRecategorizationRedaction(AnnotationEntityId oldAnnotationEntityId, AnnotationEntityId newAnnotationEntityId) {
if (oldAnnotationEntityId.equals(newAnnotationEntityId)) {
return 0;
}
var oldEntry = recategorizationRepository.findById(oldAnnotationEntityId);
if (oldEntry.isPresent()) {
var newEntry = MagicConverter.convert(oldEntry.get(), ManualRecategorizationEntity.class);
newEntry.setId(newAnnotationEntityId);
newEntry.setFileStatus(fileRepository.findById(oldAnnotationEntityId.getFileId())
.get());
recategorizationRepository.deleteById(oldAnnotationEntityId);
recategorizationRepository.save(newEntry);
return 1;
}
return 0;
}
public int updateLegalBasisChangeRedaction(AnnotationEntityId oldAnnotationEntityId, AnnotationEntityId newAnnotationEntityId) {
if (oldAnnotationEntityId.equals(newAnnotationEntityId)) {
return 0;
}
var oldEntry = legalBasisChangeRepository.findById(oldAnnotationEntityId);
if (oldEntry.isPresent()) {
var newEntry = MagicConverter.convert(oldEntry.get(), ManualLegalBasisChangeEntity.class);
newEntry.setId(newAnnotationEntityId);
newEntry.setFileStatus(fileRepository.findById(oldAnnotationEntityId.getFileId())
.get());
legalBasisChangeRepository.deleteById(oldAnnotationEntityId);
legalBasisChangeRepository.save(newEntry);
return 1;
}
return 0;
}
public int updateCommentIds(String fileId, String key, String value) {
if (key.equals(value)) {
return 0;
}
return commentRepository.saasMigrationUpdateAnnotationIds(fileId, key, value);
}
}

View File

@ -0,0 +1,83 @@
package com.iqser.red.service.persistence.management.v1.processor.migration;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import org.springframework.stereotype.Service;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.AnnotationEntityId;
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.ManualRedactionEntryEntity;
import com.iqser.red.service.persistence.management.v1.processor.model.ManualChangesQueryOptions;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.annotations.AddRedactionPersistenceService;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.type.DictionaryEntryType;
import lombok.RequiredArgsConstructor;
@Service
@RequiredArgsConstructor
public class SaasMigrationManualChangesUpdateService {
private final AddRedactionPersistenceService addRedactionPersistenceService;
private final HashFunction hashFunction = Hashing.murmur3_128();
public void convertUnprocessedAddToDictionariesToLocalChanges(String fileId) {
var unprocessedManualAdds = addRedactionPersistenceService.findEntriesByFileIdAndOptions(fileId, ManualChangesQueryOptions.unprocessedOnly());
for (var unprocessedManualAdd : unprocessedManualAdds) {
if (!unprocessedManualAdd.getDictionaryEntryType().equals(DictionaryEntryType.ENTRY)) {
continue;
}
if (unprocessedManualAdd.isAddToDictionary() || unprocessedManualAdd.isAddToAllDossiers()) {
// copy pending dict change to a new one with a different id. Can't reuse the same one, as it's the primary key of the table.
// It has no functionality, its only there, such that the UI can show a pending change.
ManualRedactionEntryEntity pendingDictAdd = new ManualRedactionEntryEntity(buildSecondaryId(unprocessedManualAdd.getId(), fileId),
unprocessedManualAdd.getUser(),
unprocessedManualAdd.getTypeId(),
unprocessedManualAdd.getValue(),
unprocessedManualAdd.getReason(),
unprocessedManualAdd.getLegalBasis(),
unprocessedManualAdd.getSection(),
unprocessedManualAdd.isRectangle(),
unprocessedManualAdd.isAddToDictionary(),
unprocessedManualAdd.isAddToAllDossiers(),
unprocessedManualAdd.isAddToDossierDictionary(),
DictionaryEntryType.ENTRY,
unprocessedManualAdd.getRequestDate(),
null,
null,
new ArrayList<>(unprocessedManualAdd.getPositions()),
unprocessedManualAdd.getFileStatus(),
unprocessedManualAdd.getTextBefore(),
unprocessedManualAdd.getTextAfter(),
unprocessedManualAdd.getSourceId(),
new HashSet<>(unprocessedManualAdd.getTypeIdsOfModifiedDictionaries()));
addRedactionPersistenceService.update(pendingDictAdd);
// change existing dict add to unprocessed manual add. ID must match with prior entry, such that other unprocessed manual changes may be applied to it.
unprocessedManualAdd.setAddToDictionary(false);
unprocessedManualAdd.setAddToAllDossiers(false);
unprocessedManualAdd.setLegalBasis("");
unprocessedManualAdd.setTypeIdsOfModifiedDictionaries(Collections.emptySet());
unprocessedManualAdd.setDictionaryEntryType(null);
addRedactionPersistenceService.update(unprocessedManualAdd);
}
}
}
private AnnotationEntityId buildSecondaryId(AnnotationEntityId annotationEntityId, String fileId) {
return new AnnotationEntityId(hashFunction.hashString(annotationEntityId.getAnnotationId(), StandardCharsets.UTF_8).toString(), fileId);
}
}

View File

@ -0,0 +1,396 @@
package com.iqser.red.service.persistence.management.v1.processor.migration;
import static com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration.MIGRATION_REQUEST_QUEUE;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.AnnotationEntityId;
import com.iqser.red.service.persistence.management.v1.processor.exception.InternalServerErrorException;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
import com.iqser.red.service.persistence.management.v1.processor.model.ManualChangesQueryOptions;
import com.iqser.red.service.persistence.management.v1.processor.service.CommentService;
import com.iqser.red.service.persistence.management.v1.processor.service.DossierService;
import com.iqser.red.service.persistence.management.v1.processor.service.IndexingService;
import com.iqser.red.service.persistence.management.v1.processor.service.job.AutomaticAnalysisJob;
import com.iqser.red.service.persistence.management.v1.processor.service.layoutparsing.LayoutParsingRequestFactory;
import com.iqser.red.service.persistence.management.v1.processor.service.manualredactions.ManualRedactionProviderService;
import com.iqser.red.service.persistence.management.v1.processor.service.manualredactions.ManualRedactionService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.FileStatusPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.SaasMigrationStatusPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.settings.FileManagementServiceSettings;
import com.iqser.red.service.persistence.management.v1.processor.utils.StorageIdUtils;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.migration.MigratedIds;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.entitymapped.ManualRedactionEntry;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.SaasMigrationStatus;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
import com.iqser.red.service.redaction.v1.model.MigrationRequest;
import com.iqser.red.storage.commons.exception.StorageException;
import com.iqser.red.storage.commons.exception.StorageObjectDoesNotExist;
import com.iqser.red.storage.commons.service.StorageService;
import com.knecon.fforesight.databasetenantcommons.providers.TenantSyncService;
import com.knecon.fforesight.service.layoutparser.internal.api.queue.LayoutParsingQueueNames;
import com.knecon.fforesight.tenantcommons.TenantContext;
import com.knecon.fforesight.tenantcommons.TenantProvider;
import com.knecon.fforesight.tenantcommons.model.TenantSyncEvent;
import com.knecon.fforesight.tenantcommons.model.UpdateDetailsRequest;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
import lombok.experimental.FieldDefaults;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class SaasMigrationService implements TenantSyncService {
AutomaticAnalysisJob automaticAnalysisJob;
FileStatusPersistenceService fileStatusPersistenceService;
SaasMigrationStatusPersistenceService saasMigrationStatusPersistenceService;
DossierService dossierService;
ManualRedactionProviderService manualRedactionProviderService;
TenantProvider tenantProvider;
IndexingService indexingService;
LayoutParsingRequestFactory layoutParsingRequestFactory;
RabbitTemplate rabbitTemplate;
FileManagementServiceSettings settings;
StorageService storageService;
SaasAnnotationIdMigrationService saasAnnotationIdMigrationService;
UncompressedFilesMigrationService uncompressedFilesMigrationService;
ManualRedactionService manualRedactionService;
CommentService commentService;
RankDeDuplicationService rankDeDuplicationService;
SaasMigrationManualChangesUpdateService saasMigrationManualChangesUpdateService;
@Override
public synchronized void syncTenant(TenantSyncEvent tenantSyncEvent) {
startMigrationForTenant(tenantSyncEvent.getTenantId());
}
// Persistence-Service needs to be scaled to 1.
public void startMigrationForTenant(String tenantId) {
// TODO migrate rules.
automaticAnalysisJob.stopForTenant(tenantId);
log.info("Starting uncompressed files migration ...");
uncompressedFilesMigrationService.migrateUncompressedFiles(tenantId);
log.info("Finished uncompressed files migration ...");
rankDeDuplicationService.deduplicate();
int numberOfFiles = 0;
var files = saasMigrationStatusPersistenceService.findAll();
for (var file : files) {
var dossier = dossierService.getDossierById(file.getDossierId());
if (dossier.getHardDeletedTime() != null) {
if (fileStatusPersistenceService.getStatus(file.getFileId()).getHardDeletedTime() != null) {
saasMigrationStatusPersistenceService.updateStatus(file.getFileId(), SaasMigrationStatus.FINISHED);
continue;
} else {
fileStatusPersistenceService.hardDelete(file.getFileId(), dossier.getHardDeletedTime());
saasMigrationStatusPersistenceService.updateStatus(file.getFileId(), SaasMigrationStatus.FINISHED);
continue;
}
}
if (fileStatusPersistenceService.getStatus(file.getFileId()).getHardDeletedTime() != null) {
saasMigrationStatusPersistenceService.updateStatus(file.getFileId(), SaasMigrationStatus.FINISHED);
continue;
}
if (!file.getStatus().equals(SaasMigrationStatus.MIGRATION_REQUIRED)) {
log.info("Skipping {} for tenant {} since migration status is {}", file.getFileId(), TenantContext.getTenantId(), file.getStatus());
continue;
}
// delete NER_ENTITIES since offsets depend on old document structure.
storageService.deleteObject(TenantContext.getTenantId(), StorageIdUtils.getStorageId(file.getDossierId(), file.getFileId(), FileType.NER_ENTITIES));
var layoutParsingRequest = layoutParsingRequestFactory.build(dossier.getDossierTemplate().getId(), file.getDossierId(), file.getFileId(), false);
rabbitTemplate.convertAndSend(LayoutParsingQueueNames.LAYOUT_PARSING_REQUEST_EXCHANGE, TenantContext.getTenantId(), layoutParsingRequest);
numberOfFiles++;
}
log.info("Added {} documents for tenant {} to Layout-Parsing queue for saas migration", numberOfFiles, TenantContext.getTenantId());
if (numberOfFiles == 0) {
finalizeMigration();
}
}
public void startMigrationForFile(String dossierId, String fileId) {
var dossier = dossierService.getDossierById(dossierId);
if (dossier.getHardDeletedTime() != null) {
if (fileStatusPersistenceService.getStatus(fileId).getHardDeletedTime() != null) {
saasMigrationStatusPersistenceService.updateStatus(fileId, SaasMigrationStatus.FINISHED);
return;
} else {
fileStatusPersistenceService.hardDelete(fileId, dossier.getHardDeletedTime());
saasMigrationStatusPersistenceService.updateStatus(fileId, SaasMigrationStatus.FINISHED);
return;
}
}
if (fileStatusPersistenceService.getStatus(fileId).getHardDeletedTime() != null) {
saasMigrationStatusPersistenceService.updateStatus(fileId, SaasMigrationStatus.FINISHED);
return;
}
log.info("Starting Migration for dossierId {} and fileId {}", dossierId, fileId);
saasMigrationStatusPersistenceService.createMigrationRequiredStatus(dossierId, fileId);
var layoutParsingRequest = layoutParsingRequestFactory.build(dossier.getDossierTemplate().getId(), dossierId, fileId, false);
rabbitTemplate.convertAndSend(LayoutParsingQueueNames.LAYOUT_PARSING_REQUEST_EXCHANGE, TenantContext.getTenantId(), layoutParsingRequest);
}
public void handleLayoutParsingFinished(String dossierId, String fileId) {
if (!layoutParsingFilesExist(dossierId, fileId)) {
saasMigrationStatusPersistenceService.updateErrorStatus(fileId, "Layout parsing files not written!");
return;
}
log.info("Layout Parsing finished for saas migration for tenant {} dossier {} and file {}", TenantContext.getTenantId(), dossierId, fileId);
saasMigrationStatusPersistenceService.updateStatus(fileId, SaasMigrationStatus.DOCUMENT_FILES_MIGRATED);
if (fileStatusPersistenceService.getStatus(fileId).getWorkflowStatus().equals(WorkflowStatus.APPROVED)) {
saasMigrationManualChangesUpdateService.convertUnprocessedAddToDictionariesToLocalChanges(fileId);
}
try {
indexingService.reindex(dossierId, Set.of(fileId), false);
String dossierTemplateId = dossierService.getDossierById(dossierId).getDossierTemplateId();
rabbitTemplate.convertAndSend(MIGRATION_REQUEST_QUEUE,
MigrationRequest.builder()
.dossierTemplateId(dossierTemplateId)
.dossierId(dossierId)
.fileId(fileId)
.fileIsApproved(fileStatusPersistenceService.getStatus(fileId).getWorkflowStatus().equals(WorkflowStatus.APPROVED))
.manualRedactions(manualRedactionProviderService.getManualRedactions(fileId, ManualChangesQueryOptions.allWithoutDeleted()))
.entitiesWithComments(commentService.getCommentCounts(fileId).keySet())
.build());
} catch (Exception e) {
log.error("Queuing of entityLog migration failed with {}", e.getMessage());
saasMigrationStatusPersistenceService.updateErrorStatus(fileId, String.format("Queuing of entityLog migration failed with %s", e.getMessage()));
}
}
private boolean layoutParsingFilesExist(String dossierId, String fileId) {
return storageService.objectExists(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.DOCUMENT_STRUCTURE)) //
&& storageService.objectExists(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.DOCUMENT_TEXT)) //
&& storageService.objectExists(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.DOCUMENT_PAGES)) //
&& storageService.objectExists(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.DOCUMENT_POSITION));
}
public void handleEntityLogMigrationFinished(String dossierId, String fileId) {
if (!entityLogMigrationFilesExist(dossierId, fileId)) {
saasMigrationStatusPersistenceService.updateErrorStatus(fileId, "Migration Files not written!");
return;
}
saasMigrationStatusPersistenceService.updateStatus(fileId, SaasMigrationStatus.REDACTION_LOGS_MIGRATED);
log.info("EntityLog migration finished for saas migration for tenant {} dossier {} and file {}", TenantContext.getTenantId(), dossierId, fileId);
migrateAnnotationIdsAndAddManualAddRedactionsAndDeleteSectionGrid(dossierId, fileId);
}
private boolean entityLogMigrationFilesExist(String dossierId, String fileId) {
return storageService.objectExists(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.ENTITY_LOG)) && storageService.objectExists(
TenantContext.getTenantId(),
StorageIdUtils.getStorageId(dossierId, fileId, FileType.MIGRATED_IDS));
}
public void handleError(String dossierId, String fileId, String errorCause, String retryExchange) {
var migrationEntry = saasMigrationStatusPersistenceService.findById(fileId);
Integer numErrors = migrationEntry.getProcessingErrorCounter();
if (numErrors != null && numErrors <= settings.getMaxErrorRetries()) {
saasMigrationStatusPersistenceService.updateErrorCounter(fileId, numErrors + 1, errorCause);
rabbitTemplate.convertAndSend(retryExchange, TenantContext.getTenantId(), MigrationRequest.builder().dossierId(dossierId).fileId(fileId).build());
log.error("Retrying error during saas migration for tenant {} dossier {} and file {}, cause {}", TenantContext.getTenantId(), dossierId, fileId, errorCause);
} else {
saasMigrationStatusPersistenceService.updateErrorStatus(fileId, errorCause);
log.error("Error during saas migration for tenant {} dossier {} and file {}, cause {}", TenantContext.getTenantId(), dossierId, fileId, errorCause);
}
}
public void requeueErrorFiles() {
automaticAnalysisJob.stopForTenant(TenantContext.getTenantId());
saasMigrationStatusPersistenceService.findAllByStatus(SaasMigrationStatus.ERROR)
.forEach(migrationStatus -> startMigrationForFile(migrationStatus.getDossierId(), migrationStatus.getFileId()));
}
private void migrateAnnotationIdsAndAddManualAddRedactionsAndDeleteSectionGrid(String dossierId, String fileId) {
MigratedIds migratedIds = getMigratedIds(dossierId, fileId);
Map<String, String> oldToNewMapping = migratedIds.buildOldToNewMapping();
updateAnnotationIds(dossierId, fileId, oldToNewMapping);
List<String> forceRedactionIdsToDelete = migratedIds.getForceRedactionIdsToDelete();
softDeleteForceRedactions(fileId, forceRedactionIdsToDelete);
log.info("Soft-deleted force redactions.");
List<ManualRedactionEntry> manualRedactionEntriesToAdd = migratedIds.getManualRedactionEntriesToAdd();
int count = addManualRedactionEntries(manualRedactionEntriesToAdd);
log.info("Added {} additional manual entries.", count);
deleteSectionGridAndNerEntitiesFiles(dossierId, fileId);
saasMigrationStatusPersistenceService.updateStatus(fileId, SaasMigrationStatus.FINISHED);
log.info("AnnotationIds migration finished for saas migration for tenant {} dossier {} and file {}", TenantContext.getTenantId(), dossierId, fileId);
finalizeMigration(); // AutomaticAnalysisJob should be re-enabled by re-starting the persistence service pod after a rule change
}
private void deleteSectionGridAndNerEntitiesFiles(String dossierId, String fileId) {
try {
storageService.deleteObject(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.SECTION_GRID));
} catch (StorageObjectDoesNotExist e) {
log.info("No sectiongrid found for {}, {}, ignoring....", dossierId, fileId);
}
try {
storageService.deleteObject(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.NER_ENTITIES));
} catch (StorageObjectDoesNotExist e) {
log.info("No ner entities file found for {}, {}, ignoring....", dossierId, fileId);
}
}
private void softDeleteForceRedactions(String fileId, List<String> forceRedactionIdsToDelete) {
manualRedactionService.softDeleteForceRedactions(fileId, forceRedactionIdsToDelete);
}
private int addManualRedactionEntries(List<ManualRedactionEntry> manualRedactionEntriesToAdd) {
manualRedactionEntriesToAdd.forEach(add -> {
if (add.getSection() != null && add.getSection().length() > 254) {
add.setSection(add.getSection().substring(0, 254));
}
});
return manualRedactionService.addManualRedactionEntries(manualRedactionEntriesToAdd, true);
}
public void revertMigrationForFile(String dossierId, String fileId) {
log.info("Reverting Migration for dossierId {} and fileId {}", dossierId, fileId);
MigratedIds migratedIds = getMigratedIds(dossierId, fileId);
Map<String, String> newToOldMapping = migratedIds.buildNewToOldMapping();
updateAnnotationIds(dossierId, fileId, newToOldMapping);
deleteManualRedactionEntries(migratedIds.getManualRedactionEntriesToAdd());
undeleteForceRedactions(fileId, migratedIds.getForceRedactionIdsToDelete());
saasMigrationStatusPersistenceService.createMigrationRequiredStatus(dossierId, fileId);
}
private void undeleteForceRedactions(String fileId, List<String> forceRedactionIdsToDelete) {
manualRedactionService.undeleteForceRedactions(fileId, forceRedactionIdsToDelete);
}
private void deleteManualRedactionEntries(List<ManualRedactionEntry> manualRedactionEntriesToAdd) {
manualRedactionService.deleteManualRedactionEntries(manualRedactionEntriesToAdd);
}
private void updateAnnotationIds(String dossierId, String fileId, Map<String, String> idMapping) {
try {
updateAnnotationIds(fileId, idMapping);
} catch (Exception e) {
String message = String.format("Error during annotation id migration for tenant %s dossier %s and file %s, cause %s",
TenantContext.getTenantId(),
dossierId,
fileId,
e.getMessage());
saasMigrationStatusPersistenceService.updateErrorStatus(fileId, message);
log.error(message);
throw e;
}
}
private void finalizeMigration() {
if (saasMigrationStatusPersistenceService.countByStatus(SaasMigrationStatus.FINISHED) == saasMigrationStatusPersistenceService.countAll()) {
// automaticAnalysisJob.startForTenant(TenantContext.getTenantId()); // AutomaticAnalysisJob should be re-enabled by re-starting the persistence service pod after a rule change
tenantProvider.updateDetails(TenantContext.getTenantId(), UpdateDetailsRequest.builder().key("persistence-service-ready").value(true).build());
log.info("Saas migration finished for tenantId {}, re-enabled scheduler", TenantContext.getTenantId());
}
}
public void updateAnnotationIds(String fileId, Map<String, String> idMapping) {
AtomicInteger numUpdates = new AtomicInteger(0);
AtomicInteger numCommentUpdates = new AtomicInteger(0);
idMapping.forEach((key, value) -> {
AnnotationEntityId oldAnnotationEntityId = buildAnnotationId(fileId, key);
AnnotationEntityId newAnnotationEntityId = buildAnnotationId(fileId, value);
numUpdates.getAndAdd(saasAnnotationIdMigrationService.updateManualAddRedaction(oldAnnotationEntityId, newAnnotationEntityId));
numUpdates.getAndAdd(saasAnnotationIdMigrationService.updateRemoveRedaction(oldAnnotationEntityId, newAnnotationEntityId));
numUpdates.getAndAdd(saasAnnotationIdMigrationService.updateForceRedaction(oldAnnotationEntityId, newAnnotationEntityId));
numUpdates.getAndAdd(saasAnnotationIdMigrationService.updateResizeRedaction(oldAnnotationEntityId, newAnnotationEntityId));
numUpdates.getAndAdd(saasAnnotationIdMigrationService.updateRecategorizationRedaction(oldAnnotationEntityId, newAnnotationEntityId));
numUpdates.getAndAdd(saasAnnotationIdMigrationService.updateLegalBasisChangeRedaction(oldAnnotationEntityId, newAnnotationEntityId));
numCommentUpdates.getAndAdd(saasAnnotationIdMigrationService.updateCommentIds(fileId, key, value));
});
log.info("Migrated {} annotationIds and {} comments for file {}", numUpdates.get(), numCommentUpdates, fileId);
}
private AnnotationEntityId buildAnnotationId(String fileId, String annotationId) {
return new AnnotationEntityId(annotationId, fileId);
}
private MigratedIds getMigratedIds(String dossierId, String fileId) {
try {
return storageService.readJSONObject(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.MIGRATED_IDS), MigratedIds.class);
} catch (StorageObjectDoesNotExist e) {
throw new NotFoundException(String.format("MigratedIds does not exist for Dossier ID \"%s\" and File ID \"%s\"!", dossierId, fileId));
} catch (StorageException e) {
throw new InternalServerErrorException(e.getMessage());
}
}
}

View File

@ -1,35 +0,0 @@
package com.iqser.red.service.persistence.management.v1.processor.migration.migrations;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.management.v1.processor.migration.Migration;
import com.iqser.red.service.persistence.management.v1.processor.migration.RankDeDuplicationService;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
public class V32RankDeduplicationMigration extends Migration {
private static final String NAME = "Adding to the migration the rank de-duplication";
private static final long VERSION = 32;
private final RankDeDuplicationService rankDeDuplicationService;
public V32RankDeduplicationMigration(RankDeDuplicationService rankDeDuplicationService) {
super(NAME, VERSION);
this.rankDeDuplicationService = rankDeDuplicationService;
}
@Override
protected void migrate() {
log.info("Migration: Checking for duplicate ranks");
rankDeDuplicationService.deduplicate();
}
}

View File

@ -49,7 +49,9 @@ public final class ActionRoles {
// Rules
public static final String READ_RULES = "red-read-rules";
public static final String READ_SYSTEM_RULES = "red-read-system-rules";
public static final String WRITE_RULES = "red-write-rules";
public static final String WRITE_SYSTEM_RULES = "red-write-system-rules";
// Data formats
public static final String READ_DATA_FORMATS = "red-read-data-formats";

View File

@ -25,7 +25,7 @@ public final class ApplicationRoles {
UPDATE_LICENSE,
GET_TENANTS,
CREATE_TENANT,
READ_USERS, READ_ALL_USERS, READ_USER_STATS,
READ_USERS, READ_ALL_USERS, READ_SYSTEM_RULES, WRITE_SYSTEM_RULES,
WRITE_USERS,
READ_SMTP_CONFIGURATION,
WRITE_SMTP_CONFIGURATION,
@ -61,7 +61,8 @@ public final class ApplicationRoles {
PROCESS_MANUAL_REDACTION_REQUEST,
READ_COLORS,
READ_DICTIONARY_TYPES,
READ_DIGITAL_SIGNATURE, READ_DOSSIER,
READ_DIGITAL_SIGNATURE,
READ_DOSSIER,
READ_DOSSIER_ATTRIBUTES,
READ_DOSSIER_ATTRIBUTES_CONFIG,
READ_DOSSIER_TEMPLATES,
@ -73,11 +74,10 @@ public final class ApplicationRoles {
READ_MANUAL_REDACTIONS,
READ_NOTIFICATIONS,
READ_REDACTION_LOG,
READ_RULES,
READ_DATA_FORMATS,
READ_USERS,
READ_VERSIONS,
READ_WATERMARK,
READ_WATERMARK, READ_RULES,
REANALYZE_DOSSIER,
REANALYZE_FILE,
REQUEST_MANUAL_REDACTION,
@ -116,9 +116,9 @@ public final class ApplicationRoles {
READ_DOSSIER_TEMPLATES,
READ_FILE_ATTRIBUTES_CONFIG,
READ_LEGAL_BASIS,
READ_LICENSE_REPORT, READ_NOTIFICATIONS, READ_USER_STATS,
READ_RULES,
READ_DATA_FORMATS,
READ_LICENSE_REPORT,
READ_NOTIFICATIONS,
READ_DATA_FORMATS, READ_RULES,
READ_SMTP_CONFIGURATION,
READ_VERSIONS,
READ_WATERMARK,
@ -131,9 +131,7 @@ public final class ApplicationRoles {
WRITE_DOSSIER_ATTRIBUTES_CONFIG,
WRITE_DOSSIER_TEMPLATES,
WRITE_FILE_ATTRIBUTES_CONFIG,
WRITE_GENERAL_CONFIGURATION,
WRITE_LEGAL_BASIS,
WRITE_RULES,
WRITE_GENERAL_CONFIGURATION, WRITE_LEGAL_BASIS, WRITE_RULES, WRITE_SYSTEM_RULES,
WRITE_DATA_FORMATS,
WRITE_SMTP_CONFIGURATION,
WRITE_WATERMARK,
@ -150,8 +148,9 @@ public final class ApplicationRoles {
READ_APP_CONFIG,
READ_GENERAL_CONFIGURATION,
READ_GENERAL_CONFIGURATION,
GET_SIMILAR_IMAGES, READ_NOTIFICATIONS,
READ_USERS, READ_USER_STATS,
GET_SIMILAR_IMAGES,
READ_NOTIFICATIONS,
READ_USERS,
UPDATE_MY_PROFILE,
UPDATE_NOTIFICATIONS,
WRITE_USERS,

View File

@ -1,9 +1,6 @@
package com.iqser.red.service.persistence.management.v1.processor.service;
import java.util.Set;
import org.springframework.http.HttpStatus;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.access.prepost.PostAuthorize;
import org.springframework.security.acls.AclPermissionEvaluator;
import org.springframework.security.core.context.SecurityContextHolder;
@ -206,19 +203,4 @@ public class AccessControlService {
}
}
public void verifyUserIsDossierOwnerOrApproverOrAssignedReviewer(String dossierId, Set<String> fileIds) {
try {
verifyUserIsDossierOwnerOrApprover(dossierId);
} catch (AccessDeniedException e1) {
try {
for (String fileId : fileIds) {
verifyUserIsReviewer(dossierId, fileId);
}
} catch (NotAllowedException e2) {
throw new NotAllowedException("User must be dossier owner, approver or assigned reviewer of the file.");
}
}
}
}

View File

@ -24,7 +24,6 @@ import com.iqser.red.service.persistence.management.v1.processor.mapper.Componen
import com.iqser.red.service.persistence.management.v1.processor.model.ComponentMapping;
import com.iqser.red.service.persistence.management.v1.processor.model.ComponentMappingDownloadModel;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.DossierTemplatePersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.utils.StringEncodingUtils;
import com.iqser.red.service.persistence.service.v1.api.shared.model.component.ComponentMappingMetadata;
import com.opencsv.CSVParserBuilder;
import com.opencsv.CSVReader;
@ -108,7 +107,7 @@ public class ComponentMappingService {
String fileName,
char quoteChar) {
Charset charset = StringEncodingUtils.resolveCharset(encoding);
Charset charset = resolveCharset(encoding);
CsvStats stats = sortCSVFile(delimiter, mappingFile, charset, quoteChar);
@ -127,6 +126,20 @@ public class ComponentMappingService {
}
private static Charset resolveCharset(String encoding) {
try {
return Charset.forName(encoding);
} catch (IllegalCharsetNameException e) {
throw new BadRequestException("Invalid character encoding: " + encoding);
} catch (UnsupportedCharsetException e) {
throw new BadRequestException("Unsupported character encoding: " + encoding);
} catch (IllegalArgumentException e) {
throw new BadRequestException("Encoding can't be null.");
}
}
private static CsvStats sortCSVFile(char delimiter, File mappingFile, Charset charset, char quoteChar) throws BadRequestException, IOException {
Path tempFile = Files.createTempFile("mapping", ".tmp");

View File

@ -494,9 +494,9 @@ public class DictionaryService {
}
@PreAuthorize("hasAuthority('" + ADD_UPDATE_DICTIONARY_TYPE + "')")
public void changeAddToDictionary(String type, String dossierTemplateId, String dossierId, boolean addToDictionaryAction) {
public void changeAddToDictionary(String type, String dossierTemplateId, String dossierId, boolean addToDictionary) {
dictionaryPersistenceService.updateAddToDictionary(toTypeId(type, dossierTemplateId, dossierId), addToDictionaryAction);
dictionaryPersistenceService.updateAddToDictionary(toTypeId(type, dossierTemplateId, dossierId), addToDictionary);
}

View File

@ -156,7 +156,7 @@ public class DossierTemplateCloneService {
private void cloneComponents(String dossierTemplate, String clonedDossierTemplateId) {
List<ComponentDefinitionEntity> componentDefinitionEntities = componentDefinitionPersistenceService.findByDossierTemplateIdAndNotSoftDeleted(dossierTemplate);
List<ComponentDefinitionEntity> componentDefinitionEntities = componentDefinitionPersistenceService.findComponentsByDossierTemplateId(dossierTemplate);
for (ComponentDefinitionEntity componentDefinitionEntity : componentDefinitionEntities) {
ComponentDefinitionAddRequest componentDefinitionAddRequest = ComponentDefinitionAddRequest.builder()

View File

@ -1,7 +1,5 @@
package com.iqser.red.service.persistence.management.v1.processor.service;
import static com.iqser.red.service.persistence.service.v1.api.external.resource.FileAttributesResource.ASCII_ENCODING;
import static com.iqser.red.service.persistence.service.v1.api.external.resource.FileAttributesResource.ISO_ENCODING;
import static com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileAttributeTypeFormats.FILE_ATTRIBUTE_TYPE_DATE_FORMAT;
import static com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileAttributeTypeFormats.FILE_ATTRIBUTE_TYPE_NUMBER_REGEX;
@ -34,7 +32,6 @@ import com.iqser.red.service.persistence.management.v1.processor.exception.Confl
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.DossierPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.FileAttributeConfigPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.FileStatusPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.utils.StringEncodingUtils;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.ImportCsvRequest;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.ImportCsvResponse;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileAttributeType;
@ -62,6 +59,10 @@ public class FileAttributesManagementService {
private final DossierPersistenceService dossierPersistenceService;
private final IndexingService indexingService;
public static String UTF_ENCODING = "UTF-8";
public static String ASCII_ENCODING = "ASCII";
public static String ISO_ENCODING = "ISO";
@Transactional
public ImportCsvResponse importCsv(String dossierId, ImportCsvRequest importCsvRequest) {
@ -143,7 +144,7 @@ public class FileAttributesManagementService {
throw new IllegalArgumentException("Delimiter must be a single character.");
}
char delimiterChar = delimiter.charAt(0);
Charset charset = StringEncodingUtils.resolveCharset(encoding);
Charset charset = Charset.forName(encoding);
try (CSVReader csvReader = new CSVReaderBuilder(new InputStreamReader(new ByteArrayInputStream(csvFileBytes), charset)).withCSVParser(new CSVParserBuilder().withSeparator(
delimiterChar).build()).build()) {
@ -213,7 +214,7 @@ public class FileAttributesManagementService {
if (ASCII_ENCODING.equalsIgnoreCase(encoding) || StandardCharsets.US_ASCII.name().equalsIgnoreCase(encoding)) {
return StandardCharsets.US_ASCII;
}
// accept only name "ISO_8859_1" of the charset
// accept both "ISO" (non-unique name) and the actual name "US-ASCII" of the charset
if (ISO_ENCODING.equalsIgnoreCase(encoding) || StandardCharsets.ISO_8859_1.name().equalsIgnoreCase(encoding)) {
return StandardCharsets.ISO_8859_1;
}

View File

@ -1,5 +1,6 @@
package com.iqser.red.service.persistence.management.v1.processor.service;
import static com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.DocumentStructureProto.DocumentStructure;
import java.io.BufferedInputStream;
import java.io.File;
@ -17,15 +18,15 @@ import com.iqser.red.service.persistence.management.v1.processor.utils.StorageId
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.componentlog.ComponentLog;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.EntityLog;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.imported.ImportedLegalBases;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.imported.ImportedRedactions;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.imported.ImportedRedactionsPerPage;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.section.SectionGrid;
import com.iqser.red.service.persistence.service.v1.api.shared.mongo.service.ComponentLogMongoService;
import com.iqser.red.service.persistence.service.v1.api.shared.mongo.service.EntityLogMongoService;
import com.iqser.red.service.redaction.v1.server.data.DocumentStructureProto;
import com.iqser.red.service.redaction.v1.server.data.DocumentStructureWrapper;
import com.iqser.red.storage.commons.exception.StorageObjectDoesNotExist;
import com.iqser.red.storage.commons.service.StorageService;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.DocumentStructureWrapper;
import com.knecon.fforesight.tenantcommons.TenantContext;
import lombok.RequiredArgsConstructor;
@ -320,7 +321,7 @@ public class FileManagementStorageService {
return new DocumentStructureWrapper(storageService.readProtoObject(TenantContext.getTenantId(),
StorageIdUtils.getStorageId(dossierId, fileId, FileType.DOCUMENT_STRUCTURE),
DocumentStructureProto.DocumentStructure.parser()));
DocumentStructure.parser()));
}
}

View File

@ -125,7 +125,7 @@ public class FileStatusManagementService {
if (userId != null) {
assignee = userId;
}
fileStatusService.setStatusSuccessful(fileId, assignee != null ? WorkflowStatus.UNDER_REVIEW : WorkflowStatus.NEW, fileStatus.getWorkflowStatus());
fileStatusService.setStatusSuccessful(fileId, assignee != null ? WorkflowStatus.UNDER_REVIEW : WorkflowStatus.NEW);
fileStatusService.setAssignee(fileId, assignee);
indexingService.addToIndexingQueue(IndexMessageType.UPDATE, null, dossierId, fileId, 2);
}
@ -139,7 +139,7 @@ public class FileStatusManagementService {
assignee = approverId;
}
fileStatusService.setStatusSuccessful(fileId, assignee != null ? WorkflowStatus.UNDER_APPROVAL : WorkflowStatus.NEW, fileStatus.getWorkflowStatus());
fileStatusService.setStatusSuccessful(fileId, assignee != null ? WorkflowStatus.UNDER_APPROVAL : WorkflowStatus.NEW);
fileStatusService.setAssignee(fileId, approverId);
indexingService.addToIndexingQueue(IndexMessageType.UPDATE, null, dossierId, fileId, 2);
}
@ -169,7 +169,7 @@ public class FileStatusManagementService {
throw new BadRequestException("Allowed transition not possible from: " + fileStatus.getWorkflowStatus() + " to status NEW");
}
fileStatusService.setAssignee(fileId, null);
fileStatusService.setStatusSuccessful(fileId, WorkflowStatus.NEW, fileStatus.getWorkflowStatus());
fileStatusService.setStatusSuccessful(fileId, WorkflowStatus.NEW);
}

View File

@ -50,7 +50,6 @@ public class FileStatusMapper {
.legalBasisVersion(status.getLegalBasisVersion())
.lastProcessed(status.getLastProcessed())
.lastLayoutProcessed(status.getLastLayoutProcessed())
.layoutParserVersion(status.getLayoutParserVersion())
.approvalDate(status.getApprovalDate())
.lastUploaded(status.getLastUploaded())
.analysisDuration(status.getAnalysisDuration())
@ -70,7 +69,6 @@ public class FileStatusMapper {
.fileSize(status.getFileSize())
.fileErrorInfo(status.getFileErrorInfo())
.componentMappingVersions(status.getComponentMappingVersions())
.lastDownload(status.getLastDownloadDate())
.build();
}

View File

@ -122,7 +122,7 @@ public class FileStatusProcessingUpdateService {
} else {
fileStatusService.setStatusOcrProcessing(fileId,
fileEntity.getProcessingStatus().equals(ProcessingStatus.OCR_PROCESSING) ? fileEntity.getProcessingErrorCounter() + 1 : 0);
fileStatusService.addToOcrQueue(dossierId, fileId, 2, false);
fileStatusService.addToOcrQueue(dossierId, fileId, 2);
}
}

View File

@ -316,7 +316,7 @@ public class FileStatusService {
}
log.info("Add file: {} from dossier {} to OCR queue", fileId, dossierId);
setStatusOcrQueued(dossierId, fileId, false);
setStatusOcrQueued(dossierId, fileId);
sendReadOnlyAnalysisEvent(dossierId, fileId, fileEntity);
return;
}
@ -352,6 +352,7 @@ public class FileStatusService {
return;
}
boolean forceAnalysis = false;
if (settings.isLlmNerServiceEnabled()) {
boolean objectExists = fileManagementStorageService.objectExists(dossierId, fileId, FileType.LLM_NER_ENTITIES);
@ -385,7 +386,7 @@ public class FileStatusService {
boolean reanalyse = fileModel.isReanalysisRequired() || analysisType.equals(AnalysisType.MANUAL_REDACTION_REANALYZE);
MessageType messageType = calculateMessageType(reanalyse, fileModel.getProcessingStatus(), fileModel);
if (analysisType == AnalysisType.FORCE_ANALYSE || forceAnalysis) {
if(analysisType == AnalysisType.FORCE_ANALYSE || forceAnalysis) {
messageType = MessageType.ANALYSE;
}
@ -566,7 +567,7 @@ public class FileStatusService {
}
public void setStatusOcrQueued(String dossierId, String fileId, boolean allPages) {
public void setStatusOcrQueued(String dossierId, String fileId) {
FileEntity fileStatus = fileStatusPersistenceService.getStatus(fileId);
@ -578,7 +579,7 @@ public class FileStatusService {
updateOCRStartTime(fileId);
fileStatusPersistenceService.updateProcessingStatus(fileId, ProcessingStatus.OCR_PROCESSING_QUEUED);
websocketService.sendAnalysisEvent(dossierId, fileId, AnalyseStatus.OCR_PROCESSING, fileStatus.getNumberOfAnalyses() + 1);
addToOcrQueue(dossierId, fileId, 2, allPages);
addToOcrQueue(dossierId, fileId, 2);
}
@ -759,16 +760,13 @@ public class FileStatusService {
}
public void addToOcrQueue(String dossierId, String fileId, int priority, boolean allPages) {
public void addToOcrQueue(String dossierId, String fileId, int priority) {
var removeWatermark = dossierTemplatePersistenceService.getDossierTemplate(dossierPersistenceService.getDossierTemplateId(dossierId)).isRemoveWatermark();
Set<AzureOcrFeature> features = new HashSet<>();
if (removeWatermark) {
features.add(AzureOcrFeature.REMOVE_WATERMARKS);
}
if (allPages) {
features.add(AzureOcrFeature.ALL_PAGES);
}
if (currentApplicationTypeProvider.isDocuMine()) {
features.add(AzureOcrFeature.ROTATION_CORRECTION);
features.add(AzureOcrFeature.FONT_STYLE_DETECTION);
@ -818,13 +816,9 @@ public class FileStatusService {
}
public void setStatusSuccessful(String fileId, WorkflowStatus newWorkflowStatus, WorkflowStatus oldWorkflowStatus) {
public void setStatusSuccessful(String fileId, WorkflowStatus workflowStatus) {
fileStatusPersistenceService.updateWorkflowStatus(fileId, newWorkflowStatus, false);
if (oldWorkflowStatus == WorkflowStatus.APPROVED && newWorkflowStatus != WorkflowStatus.APPROVED) {
fileStatusPersistenceService.clearLastDownload(fileId);
}
fileStatusPersistenceService.updateWorkflowStatus(fileId, workflowStatus, false);
}
@ -979,7 +973,7 @@ public class FileStatusService {
if (runOcr) {
fileStatusPersistenceService.resetOcrStartAndEndDate(fileId);
setStatusOcrQueued(dossierId, fileId, false);
setStatusOcrQueued(dossierId, fileId);
return;
}
@ -1066,7 +1060,6 @@ public class FileStatusService {
addToAnalysisQueue(dossierId, fileId, priority, Sets.newHashSet(), AnalysisType.DEFAULT);
}
@Transactional
public void setStatusForceAnalyse(String dossierId, String fileId, boolean priority) {
@ -1082,9 +1075,9 @@ public class FileStatusService {
}
public void updateLayoutParserVersionAndProcessedTime(String fileId, String version) {
public void updateLayoutProcessedTime(String fileId) {
fileStatusPersistenceService.updateLayoutParserVersionAndProcessedTime(fileId, version);
fileStatusPersistenceService.updateLayoutProcessedTime(fileId);
}

View File

@ -12,8 +12,8 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.persistence.service.v1.api.shared.model.utils.Scope;
import com.iqser.red.service.persistence.service.v1.api.shared.mongo.document.ImageDocument;
import com.iqser.red.service.persistence.service.v1.api.shared.mongo.service.ImageMongoService;
import com.iqser.red.service.redaction.v1.server.data.DocumentStructureWrapper;
import com.iqser.red.service.redaction.v1.server.data.NodeTypeProto;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.DocumentStructureWrapper;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeTypeProto.NodeType;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
@ -35,7 +35,7 @@ public class ImageSimilarityService {
DocumentStructureWrapper documentStructure = fileManagementStorageService.getDocumentStructure(dossierId, fileId);
List<ImageDocument> imageDocuments = new ArrayList<>();
documentStructure.streamAllEntries()
.filter(entry -> entry.getType().equals(NodeTypeProto.NodeType.IMAGE))
.filter(entry -> entry.getType().equals(NodeType.IMAGE))
.forEach(i -> {
Map<String, String> properties = i.getPropertiesMap();
ImageDocument imageDocument = new ImageDocument();

View File

@ -25,7 +25,6 @@ import com.iqser.red.service.persistence.management.v1.processor.service.persist
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.DossierPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.LegalBasisMappingPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.RulesPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.settings.FileManagementServiceSettings;
import com.iqser.red.service.persistence.service.v1.api.shared.model.RuleFileType;
import com.iqser.red.service.persistence.service.v1.api.shared.model.component.ComponentMappingMetadata;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.ErrorCode;
@ -51,7 +50,6 @@ public class ReanalysisRequiredStatusService {
DossierPersistenceService dossierPersistenceService;
LegalBasisMappingPersistenceService legalBasisMappingPersistenceService;
ComponentMappingService componentMappingService;
FileManagementServiceSettings settings;
public FileModel enhanceFileStatusWithAnalysisRequirements(FileModel fileModel) {
@ -67,12 +65,7 @@ public class ReanalysisRequiredStatusService {
Map<String, DossierEntity> dossierMap = new HashMap<>();
Map<String, Map<String, Integer>> componentMappingVersionMap = new HashMap<>();
fileModels.forEach(entry -> {
var analysisRequiredResult = computeAnalysisRequired(entry,
ignoreProcessingStates,
dossierTemplateVersionMap,
dossierVersionMap,
dossierMap,
componentMappingVersionMap);
var analysisRequiredResult = computeAnalysisRequired(entry, ignoreProcessingStates, dossierTemplateVersionMap, dossierVersionMap, dossierMap, componentMappingVersionMap);
entry.setReanalysisRequired(analysisRequiredResult.isReanalysisRequired());
entry.setFullAnalysisRequired(analysisRequiredResult.isFullAnalysisRequired());
entry.setComponentReanalysisRequired(analysisRequiredResult.isComponentReanalysisRequired());
@ -130,7 +123,7 @@ public class ReanalysisRequiredStatusService {
if (fileStatus.getLastFileAttributeChange() != null && fileStatus.getLastProcessed().isBefore(fileStatus.getLastFileAttributeChange())) {
return new AnalysisRequiredResult(true, false);
} else {
return requiresReanalysisBasedOnVersionDifference(fileStatus, dossier, dossierTemplateVersionMap, dossierVersionMap, componentMappingVersionMap);
return requiresReanalysisBasedOnVersionDifference(fileStatus, dossier, dossierTemplateVersionMap, dossierVersionMap,componentMappingVersionMap);
}
default:
return new AnalysisRequiredResult(false, false);
@ -141,18 +134,14 @@ public class ReanalysisRequiredStatusService {
}
private AnalysisRequiredResult computeAnalysisRequiredForErrorState(DossierEntity dossier,
FileModel fileStatus,
Map<String, Map<VersionType, Long>> dossierTemplateVersionMap) {
private AnalysisRequiredResult computeAnalysisRequiredForErrorState(DossierEntity dossier,FileModel fileStatus, Map<String, Map<VersionType, Long>> dossierTemplateVersionMap){
Map<VersionType, Long> dossierTemplateVersions = dossierTemplateVersionMap.computeIfAbsent(dossier.getDossierTemplateId(),
k -> buildVersionData(dossier.getDossierTemplateId()));
var rulesVersionMatches = fileStatus.getRulesVersion() == dossierTemplateVersions.getOrDefault(RULES, -1L);
var componentRulesVersionMatches = fileStatus.getComponentRulesVersion() == dossierTemplateVersions.getOrDefault(COMPONENT_RULES, -1L);
var isRuleExecutionTimeout = fileStatus.getFileErrorInfo() != null && fileStatus.getFileErrorInfo().getErrorCode() != null && fileStatus.getFileErrorInfo()
.getErrorCode()
.equals(ErrorCode.RULES_EXECUTION_TIMEOUT);
var isRuleExecutionTimeout = fileStatus.getFileErrorInfo() != null && fileStatus.getFileErrorInfo().getErrorCode() != null && fileStatus.getFileErrorInfo().getErrorCode().equals(
ErrorCode.RULES_EXECUTION_TIMEOUT);
var fullAnalysisRequired = (!rulesVersionMatches || !componentRulesVersionMatches) && !isRuleExecutionTimeout;
@ -164,7 +153,7 @@ public class ReanalysisRequiredStatusService {
DossierEntity dossier,
Map<String, Map<VersionType, Long>> dossierTemplateVersionMap,
Map<String, Long> dossierVersionMap,
Map<String, Map<String, Integer>> componentMappingVersionMap) {
Map<String, Map<String,Integer>> componentMappingVersionMap) {
// get relevant versions
Map<VersionType, Long> dossierTemplateVersions = dossierTemplateVersionMap.computeIfAbsent(dossier.getDossierTemplateId(),
@ -173,8 +162,7 @@ public class ReanalysisRequiredStatusService {
Map<String, Integer> componentMappingVersions = componentMappingVersionMap.computeIfAbsent(dossier.getDossierTemplateId(),
k -> componentMappingService.getMetaDataByDossierTemplateId(dossier.getDossierTemplateId())
.stream()
.collect(Collectors.toMap(ComponentMappingMetadata::getName,
ComponentMappingMetadata::getVersion)));
.collect(Collectors.toMap(ComponentMappingMetadata::getName, ComponentMappingMetadata::getVersion)));
Long dossierDictionaryVersion = dossierVersionMap.computeIfAbsent(fileStatus.getDossierId(), k -> getDossierVersionData(fileStatus.getDossierId()));
@ -189,23 +177,22 @@ public class ReanalysisRequiredStatusService {
var dossierDictionaryVersionMatches = Math.max(fileStatus.getDossierDictionaryVersion(), 0) == dossierDictionaryVersion;
var reanalysisRequired = !dictionaryVersionMatches || !dossierDictionaryVersionMatches || !mappingVersionAllMatch;
var fullAnalysisRequired = !rulesVersionMatches || !componentRulesVersionMatches || !legalBasisVersionMatches || (settings.isLlmNerServiceEnabled() && !aiVersionMatches);
var fullAnalysisRequired = !rulesVersionMatches || !componentRulesVersionMatches || !legalBasisVersionMatches || !aiVersionMatches;
var componentReanalysisRequired = !dateFormatsVersionMatches;
if (reanalysisRequired || fullAnalysisRequired || componentReanalysisRequired) {
log.debug(buildMessage(fileStatus,
rulesVersionMatches,
dossierTemplateVersions,
componentRulesVersionMatches,
dateFormatsVersionMatches,
dictionaryVersionMatches,
legalBasisVersionMatches,
dossierDictionaryVersionMatches,
dossierDictionaryVersion,
mappingVersionAllMatch,
componentMappingVersions,
aiVersionMatches));
rulesVersionMatches,
dossierTemplateVersions,
componentRulesVersionMatches,
dateFormatsVersionMatches,
dictionaryVersionMatches,
legalBasisVersionMatches,
dossierDictionaryVersionMatches,
dossierDictionaryVersion,
mappingVersionAllMatch,
componentMappingVersions));
}
return new AnalysisRequiredResult(reanalysisRequired, fullAnalysisRequired, componentReanalysisRequired);
@ -215,15 +202,14 @@ public class ReanalysisRequiredStatusService {
private String buildMessage(FileModel fileStatus,
boolean rulesVersionMatches,
Map<VersionType, Long> dossierTemplateVersions,
boolean versionMatches,
boolean componentRulesVersionMatches,
boolean dateFormatsVersionMatches,
boolean dictionaryVersionMatches,
boolean legalBasisVersionMatches,
boolean dossierDictionaryVersionMatches,
Long dossierDictionaryVersion,
boolean mappingVersionAllMatch,
Map<String, Integer> componentMappingVersions,
boolean aiVersionMatches) {
Map<String, Integer> componentMappingVersions) {
StringBuilder messageBuilder = new StringBuilder();
messageBuilder.append("For file: ").append(fileStatus.getId()).append("-").append(fileStatus.getFilename()).append(" analysis is required because -> ");
@ -278,21 +264,6 @@ public class ReanalysisRequiredStatusService {
needComma = true;
}
if (!dateFormatsVersionMatches) {
if (needComma) {
messageBuilder.append(", ");
}
messageBuilder.append("dateFormatsVersions: ").append(fileStatus.getDateFormatsVersion()).append("/").append(dossierTemplateVersions.getOrDefault(DATE_FORMATS, -1L));
needComma = true;
}
if (settings.isLlmNerServiceEnabled() && !aiVersionMatches) {
if (needComma) {
messageBuilder.append(", ");
}
messageBuilder.append("aiVersions: ").append(fileStatus.getAiCreationVersion()).append("/").append(dossierTemplateVersions.getOrDefault(AI_CREATION, 0L));
}
return messageBuilder.toString();
}

View File

@ -10,6 +10,7 @@ import com.iqser.red.service.persistence.management.v1.processor.exception.Confl
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.DossierPersistenceService;
import com.iqser.red.service.persistence.service.v1.api.shared.model.ReanalysisSettings;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.imported.ImportedRedactions;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.DeleteImportedRedactionsRequest;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
@ -177,11 +178,11 @@ public class ReanalysisService {
relevantFiles.stream()
.filter(fileStatus -> fileStatus.getOcrStartTime() == null)
.filter(fileStatus -> fileStatus.getProcessingStatus().equals(ProcessingStatus.PROCESSED))
.forEach(fileStatus -> fileStatusService.setStatusOcrQueued(dossierId, fileStatus.getId(), false));
.forEach(fileStatus -> fileStatusService.setStatusOcrQueued(dossierId, fileStatus.getId()));
}
public void ocrFile(String dossierId, String fileId, boolean force, boolean allPages) {
public void ocrFile(String dossierId, String fileId, boolean force) {
dossierPersistenceService.getAndValidateDossier(dossierId);
FileModel dossierFile = fileStatusService.getStatus(fileId);
@ -201,18 +202,18 @@ public class ReanalysisService {
}
if (force) {
fileStatusService.setStatusOcrQueued(dossierId, fileId, allPages);
fileStatusService.setStatusOcrQueued(dossierId, fileId);
} else {
if (dossierFile.getOcrStartTime() != null) {
throw new ConflictException("File already has been OCR processed");
}
ocrFiles(dossierId, Sets.newHashSet(fileId), allPages);
ocrFiles(dossierId, Sets.newHashSet(fileId));
}
}
public void ocrFiles(String dossierId, Set<String> fileIds, boolean allPages) {
public void ocrFiles(String dossierId, Set<String> fileIds) {
var relevantFiles = getRelevantFiles(dossierId, fileIds);
@ -224,7 +225,7 @@ public class ReanalysisService {
relevantFiles.stream()
.filter(fileStatus -> fileStatus.getOcrStartTime() == null)
.forEach(fileStatus -> fileStatusService.setStatusOcrQueued(dossierId, fileStatus.getId(), allPages));
.forEach(fileStatus -> fileStatusService.setStatusOcrQueued(dossierId, fileStatus.getId()));
}

View File

@ -19,6 +19,7 @@ public class RulesValidationService {
public DroolsValidation validateRules(RuleFileType ruleFileType, String rules) {
return redactionServiceClient.testRules(new RuleValidationModel(ruleFileType.name(), rules));
}
}

View File

@ -14,6 +14,7 @@ import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration;
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.SaasMigrationStatusPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.settings.FileManagementServiceSettings;
import com.iqser.red.service.persistence.management.v1.processor.utils.TenantUtils;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
@ -37,6 +38,7 @@ public class AutomaticAnalysisJob implements Job {
private final FileStatusService fileStatusService;
private final TenantProvider tenantProvider;
private final ObservationRegistry observationRegistry;
private final SaasMigrationStatusPersistenceService saasMigrationStatusPersistenceService;
@Setter
private boolean schedulingStopped;
@ -67,6 +69,11 @@ public class AutomaticAnalysisJob implements Job {
TenantContext.setTenantId(tenant.getTenantId());
if (!saasMigrationStatusPersistenceService.migrationFinishedForTenant()) {
log.info("[Tenant:{}] Skipping scheduling as there are files that require migration.", tenant.getTenantId());
return;
}
String queueName = MessagingConfiguration.REDACTION_REQUEST_QUEUE_PREFIX + "_" + tenant.getTenantId();
var redactionQueueInfo = amqpAdmin.getQueueInfo(queueName);
if (redactionQueueInfo != null) {

View File

@ -21,23 +21,24 @@ import com.iqser.red.service.persistence.management.v1.processor.service.persist
import com.iqser.red.service.persistence.management.v1.processor.utils.StorageIdUtils;
import com.iqser.red.service.persistence.management.v1.processor.utils.TenantUtils;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
import com.iqser.red.service.redaction.v1.server.data.DocumentPageProto;
import com.iqser.red.service.redaction.v1.server.data.DocumentPositionDataProto;
import com.iqser.red.service.redaction.v1.server.data.DocumentStructureProto;
import com.iqser.red.service.redaction.v1.server.data.DocumentTextDataProto;
import com.iqser.red.service.redaction.v1.server.data.EntryDataProto;
import com.iqser.red.service.redaction.v1.server.data.LayoutEngineProto;
import com.iqser.red.service.redaction.v1.server.data.NodeTypeProto;
import com.iqser.red.service.redaction.v1.server.data.old.DocumentPage;
import com.iqser.red.service.redaction.v1.server.data.old.DocumentPositionData;
import com.iqser.red.service.redaction.v1.server.data.old.DocumentStructure;
import com.iqser.red.service.redaction.v1.server.data.old.DocumentTextData;
import com.iqser.red.service.redaction.v1.server.data.old.LayoutEngine;
import com.iqser.red.storage.commons.exception.StorageObjectDoesNotExist;
import com.iqser.red.storage.commons.service.StorageService;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.DocumentPage;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.DocumentPageProto;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.DocumentPositionData;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.DocumentPositionDataProto;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.DocumentStructure;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.DocumentStructureProto;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.DocumentTextData;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.DocumentTextDataProto;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.EntryDataProto;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.LayoutEngine;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.LayoutEngineProto;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeTypeProto;
import com.knecon.fforesight.tenantcommons.TenantContext;
import com.knecon.fforesight.tenantcommons.TenantProvider;
import jakarta.transaction.Transactional;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;

View File

@ -1,57 +0,0 @@
package com.iqser.red.service.persistence.management.v1.processor.service.persistence;
import java.time.OffsetDateTime;
import java.util.Optional;
import org.springframework.stereotype.Service;
import org.springframework.validation.annotation.Validated;
import com.iqser.red.service.persistence.management.v1.processor.entity.configuration.AppVersionEntity;
import com.iqser.red.service.persistence.management.v1.processor.entity.configuration.AppVersionEntityKey;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository.AppVersionRepository;
import jakarta.transaction.Transactional;
import jakarta.validation.ConstraintViolationException;
import jakarta.validation.constraints.NotBlank;
import lombok.RequiredArgsConstructor;
@Service
@Validated
@RequiredArgsConstructor
public class AppVersionPersistenceService {
private final AppVersionRepository appVersionRepository;
/**
* Inserts a new AppVersionEntity if it does not already exist in the repository.
*
* @param appVersion the version of the application, must not be blank
* @param layoutParserVersion the version of the layout parser, must not be blank
* @return {@code true} if the entity was inserted; {@code false} if it already exists
* @throws ConstraintViolationException if the input parameters fail validation
*/
@Transactional
public boolean insertIfNotExists(@NotBlank String appVersion, @NotBlank String layoutParserVersion) throws ConstraintViolationException {
AppVersionEntityKey key = new AppVersionEntityKey(appVersion, layoutParserVersion);
if (!appVersionRepository.existsById(key)) {
AppVersionEntity newAppVersion = new AppVersionEntity(appVersion, layoutParserVersion, OffsetDateTime.now());
appVersionRepository.save(newAppVersion);
return true;
}
return false;
}
/**
* Retrieves the latest AppVersionEntity based on the date field.
*
* @return an Optional containing the latest AppVersionEntity if present, otherwise empty
*/
public Optional<AppVersionEntity> getLatestAppVersion() {
return appVersionRepository.findTopByOrderByDateDesc();
}
}

View File

@ -12,7 +12,6 @@ import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.management.v1.processor.entity.configuration.TypeEntity;
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
import com.iqser.red.service.persistence.management.v1.processor.exception.ConflictException;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository.DossierRepository;
@ -152,7 +151,6 @@ public class DictionaryPersistenceService {
type.setAiDescription(typeValueRequest.getAiDescription());
type.setLabel(typeValueRequest.getLabel());
type.setAddToDictionaryAction(typeValueRequest.isAddToDictionaryAction());
type.setRank(typeValueRequest.getRank());
} else {
BeanUtils.copyProperties(typeValueRequest,
type,
@ -358,14 +356,12 @@ public class DictionaryPersistenceService {
@Transactional
public void updateAddToDictionary(String typeId, boolean addToDictionaryAction) {
public void updateAddToDictionary(String typeId, boolean addToDictionary) {
var typeEntity = getType(typeId);
if (typeEntity.isDossierDictionaryOnly()) {
typeEntity.setAddToDictionaryAction(addToDictionaryAction);
typeEntity.setAddToDictionaryAction(addToDictionary);
typeRepository.saveAndFlush(typeEntity);
} else {
throw new BadRequestException("The addToDictionaryAction flag can only be adjusted for dossierDictionaryOnly-types.");
}
}

View File

@ -60,7 +60,7 @@ public class DossierAttributeConfigPersistenceService {
dossierAttributeConfig.setDossierTemplate(dossierTemplate);
if (dossierAttributeConfig.getId() == null) {
dossierAttributeConfig.setId(UUID.randomUUID().toString());
checkAndSetPlaceholder(dossierAttributeConfig);
setPlaceholder(dossierAttributeConfig);
uniqueLabelAndPlaceholder(dossierAttributeConfig);
return dossierAttributeConfigRepository.save(dossierAttributeConfig);
} else {
@ -72,15 +72,12 @@ public class DossierAttributeConfigPersistenceService {
config.setLabel(dossierAttributeConfig.getLabel());
config.setType(dossierAttributeConfig.getType());
config.setEditable(dossierAttributeConfig.isEditable());
config.setDisplayedInDossierList(dossierAttributeConfig.isDisplayedInDossierList());
config.setFilterable(dossierAttributeConfig.isFilterable());
checkAndSetPlaceholder(dossierAttributeConfig);
config.setPlaceholder(dossierAttributeConfig.getPlaceholder());
setPlaceholder(config);
uniqueLabelAndPlaceholder(dossierAttributeConfig);
return dossierAttributeConfigRepository.save(config);
} else {
dossierAttributeConfig.setId(UUID.randomUUID().toString());
checkAndSetPlaceholder(dossierAttributeConfig);
setPlaceholder(dossierAttributeConfig);
uniqueLabelAndPlaceholder(dossierAttributeConfig);
return dossierAttributeConfigRepository.save(dossierAttributeConfig);
}
@ -95,25 +92,12 @@ public class DossierAttributeConfigPersistenceService {
}
private void checkAndSetPlaceholder(DossierAttributeConfigEntity dossierAttributeConfig) {
private void setPlaceholder(DossierAttributeConfigEntity dossierAttributeConfig) {
if (StringUtils.isEmpty(dossierAttributeConfig.getPlaceholder())) {
String placeholder = "{{dossier.attribute." + StringUtils.remove(WordUtils.capitalizeFully(dossierAttributeConfig.getLabel(), ' '), " ") + "}}";
dossierAttributeConfig.setPlaceholder(placeholder);
} else {
uniquePlaceholder(dossierAttributeConfig);
}
String placeholder = "{{dossier.attribute." + StringUtils.remove(WordUtils.capitalizeFully(dossierAttributeConfig.getLabel(), ' '), " ") + "}}";
dossierAttributeConfig.setPlaceholder(placeholder);
}
private void uniquePlaceholder(DossierAttributeConfigEntity dossierAttributesConfig) {
getDossierAttributes(dossierAttributesConfig.getDossierTemplate().getId()).stream()
.filter(d -> !d.getId().equals(dossierAttributesConfig.getId()))
.forEach(other -> {
if (other.getPlaceholder().equalsIgnoreCase(dossierAttributesConfig.getPlaceholder())) {
throw new ConflictException("Placeholder already exists. " + dossierAttributesConfig.getPlaceholder());
}
});
}
private void uniqueLabelAndPlaceholder(DossierAttributeConfigEntity dossierAttributesConfig) {

View File

@ -34,47 +34,19 @@ public class DownloadStatusPersistenceService {
// use this to create a status for export dossier template.
public void createStatus(String userId, String storageId, String filename, String mimeType) {
this.saveDownloadStatus(userId, storageId, null, filename, mimeType, null, null, null, null);
this.createStatus(userId, storageId, null, filename, mimeType, null, null, null, null);
}
@Transactional
public DownloadStatusEntity createStatus(String userId,
String storageId,
DossierEntity dossier,
String filename,
String mimeType,
List<String> fileIds,
Set<DownloadFileType> downloadFileTypes,
List<String> reportTemplateIds,
String redactionPreviewColor) {
DownloadStatusEntity savedDownloadStatus = saveDownloadStatus(userId,
storageId,
dossier,
filename,
mimeType,
fileIds,
downloadFileTypes,
reportTemplateIds,
redactionPreviewColor);
if (fileIds != null && !fileIds.isEmpty()) {
fileRepository.updateLastDownloadForApprovedFiles(fileIds, savedDownloadStatus);
}
return savedDownloadStatus;
}
private DownloadStatusEntity saveDownloadStatus(String userId,
String storageId,
DossierEntity dossier,
String filename,
String mimeType,
List<String> fileIds,
Set<DownloadFileType> downloadFileTypes,
List<String> reportTemplateIds,
String redactionPreviewColor) {
String storageId,
DossierEntity dossier,
String filename,
String mimeType,
List<String> fileIds,
Set<DownloadFileType> downloadFileTypes,
List<String> reportTemplateIds,
String redactionPreviewColor) {
DownloadStatusEntity downloadStatus = new DownloadStatusEntity();
downloadStatus.setUserId(userId);
@ -90,7 +62,7 @@ public class DownloadStatusPersistenceService {
downloadStatus.setRedactionPreviewColor(redactionPreviewColor);
downloadStatus.setStatus(DownloadStatusValue.QUEUED);
return downloadStatusRepository.saveAndFlush(downloadStatus);
return downloadStatusRepository.save(downloadStatus);
}
@ -162,8 +134,7 @@ public class DownloadStatusPersistenceService {
@Transactional
public DownloadStatusEntity getStatusesByUuid(String uuid) {
return downloadStatusRepository.findByUuid(uuid)
.orElseThrow(() -> new NotFoundException(String.format("DownloadStatus not found for uuid: %s", uuid)));
return downloadStatusRepository.findByUuid(uuid).orElseThrow(() -> new NotFoundException(String.format("DownloadStatus not found for uuid: %s", uuid)));
}

View File

@ -1,8 +1,8 @@
package com.iqser.red.service.persistence.management.v1.processor.service.persistence;
import static com.iqser.red.service.persistence.service.v1.api.external.resource.FileAttributesResource.ASCII_ENCODING;
import static com.iqser.red.service.persistence.service.v1.api.external.resource.FileAttributesResource.ISO_ENCODING;
import static com.iqser.red.service.persistence.service.v1.api.external.resource.FileAttributesResource.UTF_ENCODING;
import static com.iqser.red.service.persistence.management.v1.processor.service.FileAttributesManagementService.ASCII_ENCODING;
import static com.iqser.red.service.persistence.management.v1.processor.service.FileAttributesManagementService.ISO_ENCODING;
import static com.iqser.red.service.persistence.management.v1.processor.service.FileAttributesManagementService.UTF_ENCODING;
import java.util.List;
import java.util.Objects;
@ -81,7 +81,7 @@ public class FileAttributeConfigPersistenceService {
@Transactional
public void setFileAttributesConfig(String dossierTemplateId, List<FileAttributeConfigEntity> fileAttributesConfig) {
public List<FileAttributeConfigEntity> setFileAttributesConfig(String dossierTemplateId, List<FileAttributeConfigEntity> fileAttributesConfig) {
Set<String> toSetIds = fileAttributesConfig.stream()
.map(FileAttributeConfigEntity::getId)
@ -98,6 +98,8 @@ public class FileAttributeConfigPersistenceService {
fileAttributesRepository.deleteByFileAttributeConfigId(ctr.getId());
fileAttributeConfigRepository.deleteById(ctr.getId());
});
return getFileAttributes(dossierTemplateId);
}
@ -115,7 +117,7 @@ public class FileAttributeConfigPersistenceService {
}
if (fileAttributeConfig.getId() == null) {
fileAttributeConfig.setId(UUID.randomUUID().toString());
checkAndSetPlaceholder(fileAttributeConfig);
setPlaceholder(fileAttributeConfig);
uniqueLabelAndPlaceholder(fileAttributeConfig);
return fileAttributeConfigRepository.save(fileAttributeConfig);
} else {
@ -131,14 +133,12 @@ public class FileAttributeConfigPersistenceService {
config.setDisplayedInFileList(fileAttributeConfig.isDisplayedInFileList());
config.setPrimaryAttribute(fileAttributeConfig.isPrimaryAttribute());
config.setFilterable(fileAttributeConfig.isFilterable());
config.setIncludeInCsvExport(fileAttributeConfig.isIncludeInCsvExport());
checkAndSetPlaceholder(fileAttributeConfig);
config.setPlaceholder(fileAttributeConfig.getPlaceholder());
setPlaceholder(config);
uniqueLabelAndPlaceholder(fileAttributeConfig);
return fileAttributeConfigRepository.save(config);
} else {
fileAttributeConfig.setId(UUID.randomUUID().toString());
checkAndSetPlaceholder(fileAttributeConfig);
setPlaceholder(fileAttributeConfig);
uniqueLabelAndPlaceholder(fileAttributeConfig);
return fileAttributeConfigRepository.save(fileAttributeConfig);
}
@ -153,25 +153,12 @@ public class FileAttributeConfigPersistenceService {
}
private void checkAndSetPlaceholder(FileAttributeConfigEntity fileAttributeConfig) {
private void setPlaceholder(FileAttributeConfigEntity fileAttributeConfig) {
if (StringUtils.isEmpty(fileAttributeConfig.getPlaceholder())) {
String placeholder = "{{file.attribute." + StringUtils.remove(WordUtils.capitalizeFully(fileAttributeConfig.getLabel(), ' '), " ") + "}}";
fileAttributeConfig.setPlaceholder(placeholder);
} else {
uniquePlaceholder(fileAttributeConfig);
}
String placeholder = "{{file.attribute." + StringUtils.remove(WordUtils.capitalizeFully(fileAttributeConfig.getLabel(), ' '), " ") + "}}";
fileAttributeConfig.setPlaceholder(placeholder);
}
private void uniquePlaceholder(FileAttributeConfigEntity fileAttributeConfig) {
getFileAttributes(fileAttributeConfig.getDossierTemplate().getId()).stream()
.filter(d -> !d.getId().equals(fileAttributeConfig.getId()))
.forEach(other -> {
if (other.getPlaceholder().equalsIgnoreCase(fileAttributeConfig.getPlaceholder())) {
throw new ConflictException("Placeholder already exists.");
}
});
}
private void uniqueLabelAndPlaceholder(FileAttributeConfigEntity fileAttributeConfig) {

View File

@ -3,6 +3,7 @@ package com.iqser.red.service.persistence.management.v1.processor.service.persis
import java.time.OffsetDateTime;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
@ -602,9 +603,7 @@ public class FileStatusPersistenceService {
public int getNumberOfAssignedFiles(String userId) {
List<FileEntity> files = fileRepository.findFilesByAssignee(userId);
return files.stream()
.filter(fileEntity -> fileEntity.getHardDeletedTime() == null)
.collect(Collectors.toList()).size();
return files.size();
}
@ -692,9 +691,9 @@ public class FileStatusPersistenceService {
}
public void updateLayoutParserVersionAndProcessedTime(String fileId, String version) {
public void updateLayoutProcessedTime(String fileId) {
fileRepository.updateLayoutProcessedTime(fileId, version, OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS));
fileRepository.updateLayoutProcessedTime(fileId, OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS));
}
@ -739,14 +738,11 @@ public class FileStatusPersistenceService {
fileRepository.setAiCreationVersion(fileId, aiCreationVersion);
}
public List<FileEntity> findAllByIds(Set<String> fileIds) {
return fileRepository.findAllById(fileIds);
}
public List<FileEntity> findAllDossierIdAndIds(String dossierId, Set<String> fileIds) {
return fileRepository.findAllDossierIdAndIds(dossierId, fileIds);
@ -758,11 +754,4 @@ public class FileStatusPersistenceService {
return fileRepository.findAllByDossierId(dossierId, includeDeleted);
}
@Transactional
public void clearLastDownload(String fileId) {
fileRepository.updateLastDownloadForFile(fileId, null);
}
}

View File

@ -6,7 +6,6 @@ import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.management.v1.processor.entity.configuration.RuleSetEntity;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository.RuleSetRepository;
import com.iqser.red.service.persistence.service.v1.api.shared.model.RuleFileType;
@ -106,7 +105,6 @@ public class RulesPersistenceService {
private final RuleSetRepository ruleSetRepository;
@Transactional
public Optional<RuleSetEntity> getRules(String dossierTemplateId, RuleFileType ruleFileType) {

View File

@ -0,0 +1,96 @@
package com.iqser.red.service.persistence.management.v1.processor.service.persistence;
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.AnnotationEntityId;
import com.iqser.red.service.persistence.management.v1.processor.entity.migration.SaasMigrationStatusEntity;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository.SaasMigrationStatusRepository;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.SaasMigrationStatus;
import jakarta.transaction.Transactional;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
@RequiredArgsConstructor
public class SaasMigrationStatusPersistenceService {
private final SaasMigrationStatusRepository saasMigrationStatusRepository;
public List<SaasMigrationStatusEntity> findAllByStatus(SaasMigrationStatus status) {
return saasMigrationStatusRepository.findAllByStatus(status);
}
public SaasMigrationStatusEntity findById(String fileId) {
var migrationStatusOptional = saasMigrationStatusRepository.findById(fileId);
if (migrationStatusOptional.isPresent()) {
return migrationStatusOptional.get();
}
throw new NotFoundException("No migration entry found for fileId" + fileId);
}
public boolean isMigrating(String fileId) {
var migrationStatusOptional = saasMigrationStatusRepository.findById(fileId);
return migrationStatusOptional.isPresent() && migrationStatusOptional.get().getStatus() != SaasMigrationStatus.FINISHED;
}
public boolean migrationFinishedForTenant() {
return saasMigrationStatusRepository.findAllWhereStatusNotFinishedAndNotError() == 0;
}
@Transactional
public void createMigrationRequiredStatus(String dossierId, String fileId) {
saasMigrationStatusRepository.save(SaasMigrationStatusEntity.builder().fileId(fileId).dossierId(dossierId).status(SaasMigrationStatus.MIGRATION_REQUIRED).build());
}
@Transactional
public void updateStatus(String fileId, SaasMigrationStatus status) {
saasMigrationStatusRepository.updateStatus(fileId, status);
}
@Transactional
public void updateErrorStatus(String fileId, String errorCause) {
saasMigrationStatusRepository.updateErrorStatus(fileId, SaasMigrationStatus.ERROR, errorCause);
}
@Transactional
public void updateErrorCounter(String fileId, Integer processingErrorCounter, String errorCause) {
saasMigrationStatusRepository.updateErrorCounter(fileId, processingErrorCounter, errorCause);
}
public int countByStatus(SaasMigrationStatus status) {
return saasMigrationStatusRepository.countByStatus(status);
}
public int countAll() {
return saasMigrationStatusRepository.countAll();
}
public List<SaasMigrationStatusEntity> findAll() {
return saasMigrationStatusRepository.findAll();
}
}

View File

@ -1,14 +0,0 @@
package com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository;
import java.util.Optional;
import org.springframework.data.jpa.repository.JpaRepository;
import com.iqser.red.service.persistence.management.v1.processor.entity.configuration.AppVersionEntity;
import com.iqser.red.service.persistence.management.v1.processor.entity.configuration.AppVersionEntityKey;
public interface AppVersionRepository extends JpaRepository<AppVersionEntity, AppVersionEntityKey> {
Optional<AppVersionEntity> findTopByOrderByDateDesc();
}

View File

@ -15,7 +15,7 @@ public interface FileAttributeConfigRepository extends JpaRepository<FileAttribu
List<FileAttributeConfigEntity> findByDossierTemplateId(String dossierTemplateId);
@Modifying(clearAutomatically = true)
@Modifying
@Query("update FileAttributeConfigEntity e set e.primaryAttribute = false where e.dossierTemplate.id = :dossierTemplateId")
void updateAllPrimaryAttributeValuesToFalse(@Param("dossierTemplateId") String dossierTemplateId);

View File

@ -12,8 +12,8 @@ import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import com.iqser.red.service.persistence.management.v1.processor.entity.dossier.FileEntity;
import com.iqser.red.service.persistence.management.v1.processor.entity.download.DownloadStatusEntity;
import com.iqser.red.service.persistence.management.v1.processor.entity.projection.DossierStatsFileProjection;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.projection.DossierChangeProjection;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.projection.FileChangeProjection;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.projection.FilePageCountsProjection;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.projection.FileProcessingStatusProjection;
@ -142,7 +142,8 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Modifying(clearAutomatically = true)
@Query("update FileEntity f set f.processingErrorCounter = :processingErrorCounter "
+ "where f.dossierId in (select fe.dossierId from FileEntity fe inner join DossierEntity d on d.id = fe.dossierId where d.dossierTemplateId = :dossierTemplateId) and f.processingStatus = 'ERROR'")
void updateErrorCounter(@Param("dossierTemplateId") String dossierTemplateId, @Param("processingErrorCounter") int processingErrorCounter);
void updateErrorCounter(@Param("dossierTemplateId") String dossierTemplateId,
@Param("processingErrorCounter") int processingErrorCounter);
@Modifying
@ -400,8 +401,8 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
@Transactional
@Modifying(clearAutomatically = true)
@Query(value = "update FileEntity f set f.layoutParserVersion = :version, f.lastLayoutProcessed = :offsetDateTime where f.id = :fileId")
void updateLayoutProcessedTime(@Param("fileId") String fileId, @Param("version") String version, @Param("offsetDateTime") OffsetDateTime offsetDateTime);
@Query(value = "update FileEntity f set f.lastLayoutProcessed = :offsetDateTime where f.id = :fileId")
void updateLayoutProcessedTime(@Param("fileId") String fileId, @Param("offsetDateTime") OffsetDateTime offsetDateTime);
@Query("select f.filename from FileEntity f where f.id = :fileId")
@ -466,16 +467,6 @@ public interface FileRepository extends JpaRepository<FileEntity, String> {
List<String> findAllByDossierId(@Param("dossierId") String dossierId, @Param("includeDeleted") boolean includeDeleted);
@Modifying
@Query("UPDATE FileEntity f SET f.lastDownload = :lastDownload WHERE f.id IN :fileIds AND f.workflowStatus = 'APPROVED'")
void updateLastDownloadForApprovedFiles(@Param("fileIds") List<String> fileIds, @Param("lastDownload") DownloadStatusEntity lastDownload);
@Modifying
@Query("UPDATE FileEntity f SET f.lastDownload = :lastDownload WHERE f.id = :fileId")
void updateLastDownloadForFile(@Param("fileId") String fileId, @Param("lastDownload") DownloadStatusEntity lastDownload);
@Query("SELECT f FROM FileEntity f WHERE f.id in :fileIds AND f.dossierId = :dossierId")
List<FileEntity> findAllDossierIdAndIds(@Param("dossierId") String dossierId, @Param("fileIds") Set<String> fileIds);

View File

@ -0,0 +1,44 @@
package com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository;
import com.iqser.red.service.persistence.management.v1.processor.entity.migration.SaasMigrationStatusEntity;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.SaasMigrationStatus;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
public interface SaasMigrationStatusRepository extends JpaRepository<SaasMigrationStatusEntity, String> {
List<SaasMigrationStatusEntity> findAllByStatus(SaasMigrationStatus status);
@Modifying
@Query("update SaasMigrationStatusEntity e set e.status = :status where e.fileId = :fileId")
void updateStatus(@Param("fileId") String fileId, @Param("status") SaasMigrationStatus status);
@Modifying
@Query("update SaasMigrationStatusEntity e set e.status = :status, e.errorCause = :errorCause where e.fileId = :fileId")
void updateErrorStatus(@Param("fileId") String fileId, @Param("status") SaasMigrationStatus status, @Param("errorCause") String errorCause);
@Modifying
@Query("update SaasMigrationStatusEntity e set e.processingErrorCounter = :processingErrorCounter, e.errorCause = :errorCause where e.fileId = :fileId")
void updateErrorCounter(@Param("fileId") String fileId, @Param("processingErrorCounter") Integer processingErrorCounter, @Param("errorCause") String errorCause);
@Query("select count(*) from SaasMigrationStatusEntity e where e.status = :status")
int countByStatus(@Param("status") SaasMigrationStatus status);
@Query("select count(*) from SaasMigrationStatusEntity")
int countAll();
@Query("select count(*) from SaasMigrationStatusEntity e where e.status != 'FINISHED' and e.status != 'ERROR'")
int findAllWhereStatusNotFinishedAndNotError();
}

View File

@ -11,12 +11,14 @@ import org.springframework.stereotype.Service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration;
import com.iqser.red.service.persistence.management.v1.processor.migration.SaasMigrationService;
import com.iqser.red.service.persistence.management.v1.processor.model.websocket.AnalyseStatus;
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusProcessingUpdateService;
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusService;
import com.iqser.red.service.persistence.management.v1.processor.service.ImageSimilarityService;
import com.iqser.red.service.persistence.management.v1.processor.service.websocket.WebsocketService;
import com.iqser.red.service.persistence.management.v1.processor.service.layoutparsing.QueueMessageIdentifierService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.SaasMigrationStatusPersistenceService;
import com.iqser.red.service.persistence.management.v1.processor.utils.StorageIdUtils;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileErrorInfo;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
@ -38,6 +40,8 @@ public class LayoutParsingFinishedMessageReceiver {
private final FileStatusService fileStatusService;
private final FileStatusProcessingUpdateService fileStatusProcessingUpdateService;
private final ObjectMapper objectMapper;
private final SaasMigrationStatusPersistenceService saasMigrationStatusPersistenceService;
private final SaasMigrationService saasMigrationService;
private final ImageSimilarityService imageSimilarityService;
private final WebsocketService websocketService;
@ -49,18 +53,24 @@ public class LayoutParsingFinishedMessageReceiver {
var dossierId = QueueMessageIdentifierService.parseDossierId(response.identifier());
var fileId = QueueMessageIdentifierService.parseFileId(response.identifier());
log.info("Layout parsing has finished for {}/{} in {}", dossierId, fileId, LayoutParsingQueueNames.LAYOUT_PARSING_RESPONSE_EXCHANGE);
if (saasMigrationStatusPersistenceService.isMigrating(QueueMessageIdentifierService.parseFileId(response.identifier()))) {
saasMigrationService.handleLayoutParsingFinished(QueueMessageIdentifierService.parseDossierId(response.identifier()),
QueueMessageIdentifierService.parseFileId(response.identifier()));
return;
}
var templateId = "";
var storageId = StorageIdUtils.getStorageId(dossierId, fileId, FileType.DOCUMENT_STRUCTURE);
fileStatusService.setStatusAnalyse(QueueMessageIdentifierService.parseDossierId(response.identifier()),
QueueMessageIdentifierService.parseFileId(response.identifier()),
QueueMessageIdentifierService.parsePriority(response.identifier()));
fileStatusService.updateLayoutParserVersionAndProcessedTime(QueueMessageIdentifierService.parseFileId(response.identifier()), response.layoutParserVersion());
fileStatusService.updateLayoutProcessedTime(QueueMessageIdentifierService.parseFileId(response.identifier()));
websocketService.sendAnalysisEvent(dossierId, fileId, AnalyseStatus.LAYOUT_UPDATE, fileStatusService.getStatus(fileId).getNumberOfAnalyses() + 1);
try {
imageSimilarityService.saveImages("", dossierId, fileId);
imageSimilarityService.saveImages(templateId, dossierId, fileId);
} catch (Exception e) {
log.error("Error occured during save images: {} ", e.getMessage(), e);
throw e;
@ -80,6 +90,13 @@ public class LayoutParsingFinishedMessageReceiver {
if (errorCause == null) {
errorCause = "Error occured during layout parsing!";
}
if (saasMigrationStatusPersistenceService.isMigrating(QueueMessageIdentifierService.parseFileId(analyzeRequest.identifier()))) {
saasMigrationService.handleError(QueueMessageIdentifierService.parseDossierId(analyzeRequest.identifier()),
QueueMessageIdentifierService.parseFileId(analyzeRequest.identifier()),
errorCause,
LayoutParsingQueueNames.LAYOUT_PARSING_REQUEST_EXCHANGE);
return;
}
OffsetDateTime timestamp = failedMessage.getMessageProperties().getHeader(MessagingConfiguration.X_ERROR_INFO_TIMESTAMP_HEADER);
timestamp = timestamp != null ? timestamp : OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS);

View File

@ -0,0 +1,53 @@
package com.iqser.red.service.persistence.management.v1.processor.service.queue;
import static com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration.MIGRATION_DLQ;
import static com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration.MIGRATION_REQUEST_QUEUE;
import static com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration.MIGRATION_RESPONSE_QUEUE;
import static com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration.X_ERROR_INFO_HEADER;
import org.springframework.amqp.core.Message;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.persistence.management.v1.processor.migration.SaasMigrationService;
import com.iqser.red.service.redaction.v1.model.MigrationRequest;
import com.iqser.red.service.redaction.v1.model.MigrationResponse;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
public class RedactionServiceSaasMigrationMessageReceiver {
private final SaasMigrationService saasMigrationService;
private final ObjectMapper objectMapper;
@SneakyThrows
@RabbitListener(queues = MIGRATION_RESPONSE_QUEUE)
public void receive(MigrationResponse response) {
saasMigrationService.handleEntityLogMigrationFinished(response.getDossierId(), response.getFileId());
log.info("Received message {} in {}", response, MIGRATION_RESPONSE_QUEUE);
}
@SneakyThrows
@RabbitListener(queues = MIGRATION_DLQ)
public void handleDLQMessage(Message failedMessage) {
var migrationRequest = objectMapper.readValue(failedMessage.getBody(), MigrationRequest.class);
String errorCause = failedMessage.getMessageProperties().getHeader(X_ERROR_INFO_HEADER);
if (errorCause == null) {
errorCause = "Error occured during entityLog migration!";
}
saasMigrationService.handleError(migrationRequest.getDossierId(), migrationRequest.getFileId(), errorCause, MIGRATION_REQUEST_QUEUE);
}
}

View File

@ -1,16 +0,0 @@
package com.iqser.red.service.persistence.management.v1.processor.utils;
import java.util.function.BiConsumer;
import com.iqser.red.service.persistence.management.v1.processor.entity.dossier.DossierAttributeConfigEntity;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.DossierAttributeConfig;
public class DossierAttributeConfigMapper implements BiConsumer<DossierAttributeConfigEntity, DossierAttributeConfig> {
@Override
public void accept(DossierAttributeConfigEntity dossierAttributeConfigEntity, DossierAttributeConfig dossierAttributeConfig) {
dossierAttributeConfig.setDossierTemplateId(dossierAttributeConfigEntity.getDossierTemplate().getId());
}
}

View File

@ -1,16 +0,0 @@
package com.iqser.red.service.persistence.management.v1.processor.utils;
import java.util.function.BiConsumer;
import com.iqser.red.service.persistence.management.v1.processor.entity.dossier.FileAttributeConfigEntity;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileAttributeConfig;
public class FileAttributeConfigMapper implements BiConsumer<FileAttributeConfigEntity, FileAttributeConfig> {
@Override
public void accept(FileAttributeConfigEntity fileAttributeConfigEntity, FileAttributeConfig fileAttributeConfig) {
fileAttributeConfig.setDossierTemplateId(fileAttributeConfigEntity.getDossierTemplate().getId());
}
}

View File

@ -20,9 +20,6 @@ public class FileModelMapper implements BiConsumer<FileEntity, FileModel> {
.forEach(fa -> fileModel.getFileAttributes().put(fa.getFileAttributeId().getFileAttributeConfigId(), fa.getValue()));
fileModel.setFileErrorInfo(new FileErrorInfo(fileEntity.getErrorCause(), fileEntity.getErrorQueue(), fileEntity.getErrorService(), fileEntity.getErrorTimestamp(), fileEntity.getErrorCode()));
fileModel.setComponentMappingVersions(getComponentMappingVersions(fileEntity));
if (fileEntity.getLastDownload() != null) {
fileModel.setLastDownloadDate(fileEntity.getLastDownload().getCreationDate());
}
}

View File

@ -1,15 +1,10 @@
package com.iqser.red.service.persistence.management.v1.processor.utils;
import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.nio.charset.IllegalCharsetNameException;
import java.nio.charset.StandardCharsets;
import java.nio.charset.UnsupportedCharsetException;
import org.apache.commons.lang3.StringUtils;
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
import lombok.experimental.UtilityClass;
@UtilityClass
@ -35,17 +30,4 @@ public final class StringEncodingUtils {
return result.toString();
}
public static Charset resolveCharset(String encoding) {
try {
return Charset.forName(encoding);
} catch (IllegalCharsetNameException e) {
throw new BadRequestException("Invalid character encoding: " + encoding);
} catch (UnsupportedCharsetException e) {
throw new BadRequestException("Unsupported character encoding: " + encoding);
} catch (IllegalArgumentException e) {
throw new BadRequestException("Encoding can't be null.");
}
}
}

View File

@ -242,22 +242,10 @@ databaseChangeLog:
- include:
file: db/changelog/tenant/149-remove-based-on-dict-annotation-id-columns.yaml
- include:
file: db/changelog/tenant/149-add-indexes-across-tables-for-performance.yaml
file: db/changelog/tenant/150-add-indexes-across-tables-for-performance.yaml
- include:
file: db/changelog/tenant/150-add-component-mapping-indexes.yaml
- include:
file: db/changelog/tenant/151-drop-saas-migration-table.changelog.yaml
file: db/changelog/tenant/151-add-component-mapping-indexes.yaml
- include:
file: db/changelog/tenant/152-add-ai-fields-to-entity.yaml
- include:
file: db/changelog/tenant/153-custom-technical-name-change.yaml
- include:
file: db/changelog/tenant/155-add-displayed-and-filterable-to-dossier-attribute-config.yaml
- include:
file: db/changelog/tenant/156-add-last-download-to-file.yaml
- include:
file: db/changelog/tenant/157-add-included-to-csv-export-field.yaml
- include:
file: db/changelog/tenant/158-add-app-version-history-table-and-layout-parser-version-field-to-file.yaml
- include:
file: db/changelog/tenant/159-cleanup-truncated-indices.yaml

View File

@ -2,12 +2,6 @@ databaseChangeLog:
- changeSet:
id: add-indexes-to-file-table
author: timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: file_dossier_id_last_updated_idx
tableName: file
changes:
- createIndex:
columns:
@ -18,16 +12,6 @@ databaseChangeLog:
indexName: file_dossier_id_last_updated_idx
unique: false
tableName: file
- changeSet:
id: add-indexes-to-file-table2
author: timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: file_dossier_id_deleted_hard_deleted_time_idx
tableName: file
changes:
- createIndex:
columns:
- column:

View File

@ -3,7 +3,6 @@ databaseChangeLog:
id: create_index_if_not_exists_idx_file_last_updated
author: Timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_file_last_updated
@ -20,7 +19,6 @@ databaseChangeLog:
id: create_index_if_not_exists_idx_file_deleted_hard_deleted_time
author: Timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_file_deleted_hard_deleted_time
@ -39,7 +37,6 @@ databaseChangeLog:
id: create_index_if_not_exists_idx_file_dossier_id_deleted_hard_deleted_time
author: Timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_file_dossier_id_deleted_hard_deleted_time
@ -60,7 +57,6 @@ databaseChangeLog:
id: create_index_if_not_exists_idx_dossier_last_updated
author: Timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_dossier_last_updated
@ -77,7 +73,6 @@ databaseChangeLog:
id: create_index_if_not_exists_idx_dossier_soft_deleted_time_hard_deleted_time
author: Timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_dossier_soft_deleted_time_hard_deleted_time
@ -96,7 +91,6 @@ databaseChangeLog:
id: create_index_if_not_exists_idx_dossier_id_soft_deleted_time_hard_deleted_time
author: Timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_dossier_id_soft_deleted_time_hard_deleted_time
@ -117,7 +111,6 @@ databaseChangeLog:
id: create_index_if_not_exists_idx_dossier_soft_deleted_time_hard_deleted_time_archived_time
author: Timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_dossier_soft_deleted_time_hard_deleted_time_archived_time
@ -138,7 +131,6 @@ databaseChangeLog:
id: create_index_if_not_exists_idx_dossier_dossier_template_id_soft_deleted_time_hard_deleted_time_archived_time
author: Timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_dossier_dossier_template_id_soft_deleted_time_hard_deleted_time_archived_time
@ -161,7 +153,6 @@ databaseChangeLog:
id: create_index_if_not_exists_idx_notification_preference_user_id_in_app_notifications_enabled
author: Timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_notification_preference_user_id_in_app_notifications_enabled
@ -180,7 +171,6 @@ databaseChangeLog:
id: create_index_if_not_exists_idx_notification_user_id_creation_date_soft_deleted
author: Timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_notification_user_id_creation_date_soft_deleted
@ -201,7 +191,6 @@ databaseChangeLog:
id: create_index_if_not_exists_idx_entity_dossier_template_id
author: Timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_entity_dossier_template_id
@ -218,7 +207,6 @@ databaseChangeLog:
id: create_index_if_not_exists_idx_entity_dossier_id
author: Timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_entity_dossier_id

View File

@ -3,7 +3,6 @@ databaseChangeLog:
id: create_index_if_not_exists_idx_component_mappings_dossier_template_id
author: Timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_component_mappings_dossier_template_id
@ -20,7 +19,6 @@ databaseChangeLog:
id: create_index_if_not_exists_idx_component_mappings_dossier_template_id_name
author: Timo
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_component_mappings_dossier_template_id_name

View File

@ -1,12 +0,0 @@
databaseChangeLog:
- changeSet:
id: drop-saas_migration_status-if-exists
author: dom
comment: drop saas_migration_status if exists
preConditions:
- onFail: MARK_RAN
- tableExists:
tableName: saas_migration_status
changes:
- dropTable:
tableName: saas_migration_status

View File

@ -11,7 +11,6 @@ databaseChangeLog:
id: technical-name-change-index
author: maverick
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_legal_basis_mapping_entity_dossier_template_id

View File

@ -1,16 +0,0 @@
databaseChangeLog:
- changeSet:
id: 155-add-displayed-and-filterable-to-dossier-attribute-config
author: maverick
changes:
- addColumn:
columns:
- column:
name: displayed_in_dossier_list
type: BOOLEAN
defaultValueBoolean: false
- column:
name: filterable
type: BOOLEAN
defaultValueBoolean: false
tableName: dossier_attribute_config

View File

@ -1,28 +0,0 @@
databaseChangeLog:
- changeSet:
id: add-last-download-to-file
author: maverick
changes:
- addColumn:
tableName: file
columns:
- column:
name: last_download
type: VARCHAR(255)
constraints:
nullable: true
- changeSet:
id: add-fk-last-download
author: maverick
changes:
- addForeignKeyConstraint:
baseTableName: file
baseColumnNames: last_download
constraintName: fk_file_last_download
referencedTableName: download_status
referencedColumnNames: storage_id
onDelete: SET NULL
onUpdate: NO ACTION
deferrable: false
initiallyDeferred: false
validate: true

View File

@ -1,12 +0,0 @@
databaseChangeLog:
- changeSet:
id: add-included-to-csv-export-field
author: colariu
changes:
- addColumn:
columns:
- column:
name: include_in_csv_export
type: BOOLEAN
defaultValue: false
tableName: file_attribute_config

View File

@ -1,37 +0,0 @@
databaseChangeLog:
- changeSet:
id: add-app-version-history-table
author: maverick
changes:
- createTable:
tableName: app_version_history
columns:
- column:
name: app_version
type: VARCHAR(128)
constraints:
nullable: false
primaryKey: true
primaryKeyName: app_version_history_pkey
- column:
name: layout_parser_version
type: VARCHAR(128)
constraints:
nullable: false
primaryKey: true
primaryKeyName: app_version_history_pkey
- column:
name: date
type: TIMESTAMP WITHOUT TIME ZONE
constraints:
nullable: false
- changeSet:
id: add-layout-parser-version-to-file
author: maverick
changes:
- addColumn:
tableName: file
columns:
- column:
name: layout_parser_version
type: VARCHAR(128)

View File

@ -1,68 +0,0 @@
databaseChangeLog:
- changeSet:
id: drop_truncated_idx_dossier_dossier_template_id_soft_deleted_time_hard_deleted_time
author: maverick
preConditions:
- onFail: MARK_RAN
- indexExists:
indexName: idx_dossier_dossier_template_id_soft_deleted_time_hard_deleted_
tableName: dossier
changes:
- dropIndex:
indexName: idx_dossier_dossier_template_id_soft_deleted_time_hard_deleted_
tableName: dossier
- changeSet:
id: create_idx_dossiertemplid_softdeltime_harddeltime_archivtime
author: maverick
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_dossiertemplid_softdeltime_harddeltime_archivtime
tableName: dossier
changes:
- createIndex:
tableName: dossier
indexName: idx_dossiertemplid_softdeltime_harddeltime_archivtime
columns:
- column:
name: dossier_template_id
- column:
name: soft_deleted_time
- column:
name: hard_deleted_time
- column:
name: archived_time
- changeSet:
id: drop_truncated_idx_notification_preference_user_id_in_app_notifications_enabled
author: maverick
preConditions:
- onFail: MARK_RAN
- indexExists:
indexName: idx_notification_preference_user_id_in_app_notifications_enable
tableName: notification_preference
changes:
- dropIndex:
indexName: idx_notification_preference_user_id_in_app_notifications_enable
tableName: notification_preference
- changeSet:
id: create_idx_notify_pref_userid_app_notif_enabled
author: maverick
preConditions:
- onFail: MARK_RAN
- not:
indexExists:
indexName: idx_notify_pref_userid_app_notif_enabled
tableName: notification_preference
changes:
- createIndex:
tableName: notification_preference
indexName: idx_notify_pref_userid_app_notif_enabled
columns:
- column:
name: user_id
- column:
name: in_app_notifications_enabled

View File

@ -28,13 +28,13 @@ dependencies {
api("org.apache.logging.log4j:log4j-slf4j-impl:2.20.0")
api("net.logstash.logback:logstash-logback-encoder:7.4")
implementation("ch.qos.logback:logback-classic")
implementation("org.liquibase:liquibase-core:4.29.2") // Needed to be set explicit, otherwise spring dependency management sets it to 4.20.0
testImplementation("org.springframework.amqp:spring-rabbit-test:3.0.2")
testImplementation("org.springframework.security:spring-security-test:6.0.2")
testImplementation("org.testcontainers:postgresql:1.17.1")
testImplementation("org.springframework.boot:spring-boot-starter-test:3.0.4")
testImplementation("com.yannbriancon:spring-hibernate-query-utils:2.0.0")
testImplementation("com.google.protobuf:protobuf-java:4.27.1")
}
description = "persistence-service-server-v1"

View File

@ -135,14 +135,11 @@ public class Application implements ApplicationContextAware {
@Override
public KeyValues getHighCardinalityKeyValues(ServerRequestObservationContext context) {
// Make sure that KeyValues entries are already sorted by name for better performance
return super.getHighCardinalityKeyValues(context)
.and(getValueFromPathVariableOrRequestParam(context, "dossierId"),
getValueFromPathVariableOrRequestParam(context, "dossierTemplateId"),
getValueFromPathVariableOrRequestParam(context, "fileId"))
.and(getValueFromRequestAttribute(context, "dossierId"),
getValueFromRequestAttribute(context, "dossierTemplateId"),
getValueFromRequestAttribute(context, "fileId"));
getValueFromPathVariableOrRequestParam(context, "fileId"));
}
@ -167,13 +164,6 @@ public class Application implements ApplicationContextAware {
.orElse(""));
}
private KeyValue getValueFromRequestAttribute(ServerRequestObservationContext context, String name) {
Object value = context.getCarrier().getAttribute(name);
return KeyValue.of(name, value == null ? "" : value.toString());
}
};
}

View File

@ -1,94 +0,0 @@
package com.iqser.red.service.peristence.v1.server;
import java.lang.reflect.Type;
import java.util.Map;
import org.springframework.core.MethodParameter;
import org.springframework.http.HttpInputMessage;
import org.springframework.http.converter.HttpMessageConverter;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import org.springframework.web.servlet.mvc.method.annotation.RequestBodyAdvice;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import jakarta.servlet.http.HttpServletRequest;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@ControllerAdvice
public class GenericRequestBodyAdvice implements RequestBodyAdvice {
private final ObjectMapper objectMapper = new ObjectMapper();
@Override
public boolean supports(@NonNull MethodParameter methodParameter, @NonNull Type targetType, @NonNull Class<? extends HttpMessageConverter<?>> converterType) {
return true;
}
@Override
public @NonNull HttpInputMessage beforeBodyRead(@NonNull HttpInputMessage inputMessage,
@NonNull MethodParameter parameter,
@NonNull Type targetType,
@NonNull Class<? extends HttpMessageConverter<?>> converterType) {
return inputMessage;
}
@Override
public @NonNull Object afterBodyRead(@NonNull Object body,
@NonNull HttpInputMessage inputMessage,
@NonNull MethodParameter parameter,
@NonNull Type targetType,
@NonNull Class<? extends HttpMessageConverter<?>> converterType) {
ServletRequestAttributes attrs = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
if (attrs == null) {
return body;
}
HttpServletRequest request = attrs.getRequest();
try {
Map<String, Object> bodyMap = objectMapper.convertValue(body, new TypeReference<>() {
});
storeFieldIfPresent(bodyMap, "dossierId", request);
storeFieldIfPresent(bodyMap, "dossierTemplateId", request);
storeFieldIfPresent(bodyMap, "fileId", request);
} catch (Exception ignored) {
}
return body;
}
@Override
public Object handleEmptyBody(Object body,
@NonNull HttpInputMessage inputMessage,
@NonNull MethodParameter parameter,
@NonNull Type targetType,
@NonNull Class<? extends HttpMessageConverter<?>> converterType) {
return body;
}
private void storeFieldIfPresent(Map<String, Object> bodyMap, String fieldName, HttpServletRequest request) {
if (bodyMap.containsKey(fieldName)) {
Object value = bodyMap.get(fieldName);
if (value != null) {
request.setAttribute(fieldName, value.toString());
}
}
}
}

View File

@ -9,9 +9,6 @@ tenant-user-management-service.url: "http://tenant-user-management-service:8080/
logging.pattern.level: "%5p [${spring.application.name},%X{traceId:-},%X{spanId:-}]"
documine:
components:
filesLimit: 150
logging.type: ${LOGGING_TYPE:CONSOLE}
kubernetes.namespace: ${NAMESPACE:default}
@ -142,8 +139,7 @@ fforesight:
ignored-endpoints: [ '/redaction-gateway-v1', '/actuator/health/**',"/redaction-gateway-v1/websocket","/redaction-gateway-v1/websocket/**", '/redaction-gateway-v1/async/download/with-ott/**',
'/internal-api/**', '/redaction-gateway-v1/docs/swagger-ui',
'/redaction-gateway-v1/docs/**','/redaction-gateway-v1/docs',
'/api', '/api/','/api/docs/**','/api/docs','/api/docs/swagger-ui',
'/actuator/prometheus']
'/api', '/api/','/api/docs/**','/api/docs','/api/docs/swagger-ui' ]
enabled: true
springdoc:
base-path: '/api'

View File

@ -1,18 +1,13 @@
package com.iqser.red.service.peristence.v1.server.integration.tests;
import static org.junit.Assert.assertThrows;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.junit.jupiter.api.Test;
@ -23,8 +18,8 @@ import com.iqser.red.service.peristence.v1.server.integration.client.FileClient;
import com.iqser.red.service.peristence.v1.server.integration.service.DossierTemplateTesterAndProvider;
import com.iqser.red.service.peristence.v1.server.integration.service.DossierTesterAndProvider;
import com.iqser.red.service.peristence.v1.server.integration.service.FileTesterAndProvider;
import com.iqser.red.service.peristence.v1.server.integration.service.TypeProvider;
import com.iqser.red.service.peristence.v1.server.integration.utils.AbstractPersistenceServerServiceTest;
import com.iqser.red.service.persistence.management.v1.processor.acl.custom.dossier.DossierACLService;
import com.iqser.red.service.persistence.management.v1.processor.entity.configuration.LegalBasisEntity;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.LegalBasisMappingPersistenceService;
import com.iqser.red.service.persistence.service.v1.api.shared.model.DossierTemplateModel;
@ -38,8 +33,6 @@ import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemp
import com.iqser.red.service.persistence.service.v1.api.shared.model.warning.ApproveResponse;
import com.iqser.red.service.persistence.service.v1.api.shared.model.warning.WarningType;
import feign.FeignException;
public class ApprovalTest extends AbstractPersistenceServerServiceTest {
@Autowired
@ -52,14 +45,13 @@ public class ApprovalTest extends AbstractPersistenceServerServiceTest {
private DossierTesterAndProvider dossierTesterAndProvider;
@Autowired
private FileClient fileClient;
private TypeProvider typeProvider;
@Autowired
private FileClient fileClient;
@SpyBean
private LegalBasisMappingPersistenceService legalBasisMappingPersistenceService;
@SpyBean
private DossierACLService dossierACLService;
@Test
public void testApprovalNoWarnings() {
@ -188,54 +180,4 @@ public class ApprovalTest extends AbstractPersistenceServerServiceTest {
assertTrue(approveResponse.getFileWarnings().isEmpty());
}
@Test
void testApprovalWhenDossierHasNoOwner() {
DossierTemplateModel dossierTemplateModel = dossierTemplateTesterAndProvider.provideTestTemplate();
Dossier dossier = dossierTesterAndProvider.provideTestDossier(dossierTemplateModel);
FileStatus file = fileTesterAndProvider.testAndProvideFile(dossier, "some-file");
fileTesterAndProvider.markFileAsProcessed(dossier.getId(), file.getFileId());
EntityLog entityLog = new EntityLog();
when(entityLogService.getEntityLog(anyString(), anyString(), anyBoolean())).thenReturn(entityLog);
List<com.iqser.red.service.persistence.management.v1.processor.service.users.model.User> allUsers = new ArrayList<>();
allUsers.add(com.iqser.red.service.persistence.management.v1.processor.service.users.model.User.builder()
.userId("manageradmin1@test.com")
.email("manageradmin1@test.com")
.isActive(true)
.roles(Set.of(getAllRoles()))
.build());
allUsers.add(com.iqser.red.service.persistence.management.v1.processor.service.users.model.User.builder()
.userId("manageradmin2@test.com")
.email("manageradmin2@test.com")
.isActive(true)
.roles(Set.of("RED_USER"))
.build());
when(usersClient.getAllUsers(false)).thenReturn(allUsers);
when(usersClient.getAllUsers(true)).thenReturn(allUsers);
doAnswer(invocation -> {
Dossier arg = invocation.getArgument(0);
if (dossier.getId().equals(arg.getId())) {
Dossier emptyDossier = new Dossier();
emptyDossier.setId(arg.getId());
return emptyDossier;
} else {
return invocation.callRealMethod();
}
}).when(dossierACLService).enhanceDossierWithACLData(any(Dossier.class));
FeignException ex = assertThrows(FeignException.Conflict.class, () -> {
fileClient.setStatusApproved(dossier.getId(), file.getFileId(), false);
});
assertTrue(ex.getMessage().contains("Dossier has no owner!"));
}
}

View File

@ -19,7 +19,6 @@ import org.mockito.MockedStatic;
import com.iqser.red.persistence.service.v1.external.api.impl.controller.StatusController;
import com.iqser.red.persistence.service.v2.external.api.impl.controller.ComponentControllerV2;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotAllowedException;
import com.iqser.red.service.persistence.management.v1.processor.service.AccessControlService;
import com.iqser.red.service.persistence.management.v1.processor.service.ComponentLogService;
import com.iqser.red.service.persistence.management.v1.processor.service.CurrentApplicationTypeProvider;
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusService;
@ -60,8 +59,6 @@ public class ComponentControllerV2Test {
private ComponentLog mockComponentLog;
@Mock
private CurrentApplicationTypeProvider currentApplicationTypeProvider;
@Mock
private AccessControlService accessControlService;
@BeforeEach

View File

@ -11,7 +11,6 @@ import java.util.List;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import com.iqser.red.service.peristence.v1.server.integration.client.DossierTemplateClient;
import com.iqser.red.service.peristence.v1.server.integration.client.DossierTemplateExternalClient;
import com.iqser.red.service.peristence.v1.server.integration.service.DossierTemplateTesterAndProvider;
import com.iqser.red.service.peristence.v1.server.integration.utils.AbstractPersistenceServerServiceTest;
@ -20,7 +19,6 @@ import com.iqser.red.service.persistence.service.v1.api.shared.model.DossierTemp
import com.iqser.red.service.persistence.service.v1.api.shared.model.component.ComponentDefinition;
import com.iqser.red.service.persistence.service.v1.api.shared.model.component.ComponentDefinitionAddRequest;
import com.iqser.red.service.persistence.service.v1.api.shared.model.component.ComponentDefinitionUpdateRequest;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.CloneDossierTemplateRequest;
import feign.FeignException;
@ -35,9 +33,6 @@ public class ComponentDefinitionTests extends AbstractPersistenceServerServiceTe
@Autowired
private ComponentDefinitionPersistenceService componentDefinitionPersistenceService;
@Autowired
private DossierTemplateClient dossierTemplateClient;
@Test
public void testCreateComponentDefinition() {
@ -155,6 +150,7 @@ public class ComponentDefinitionTests extends AbstractPersistenceServerServiceTe
assertTrue(unknownDossierError.getMessage().contains("DossierTemplate with Id 123 not found."));
}
@Test
public void testAddComponentDefinitionAfterSoftDeletion() {
@ -269,7 +265,8 @@ public class ComponentDefinitionTests extends AbstractPersistenceServerServiceTe
assertEquals(newOrder.get(2).getId(), firstComponentId);
assertEquals(newOrder.get(2).getRank(), 3);
var unknownDossierError = assertThrows(FeignException.class, () -> dossierTemplateExternalClient.reorderComponents("123", List.of(secondComponentId, thirdComponentId, firstComponentId)));
var unknownDossierError = assertThrows(FeignException.class,
() -> dossierTemplateExternalClient.reorderComponents("123", List.of(secondComponentId, thirdComponentId, firstComponentId)));
assertTrue(unknownDossierError.getMessage().contains("DossierTemplate with Id 123 not found."));
}
@ -299,50 +296,4 @@ public class ComponentDefinitionTests extends AbstractPersistenceServerServiceTe
assertEquals(newOrder.get(2).getRank(), 3);
}
@Test
public void testSoftDeleteComponentDefinitionAndReOrder() { //RED-10628
var dossierTemplate = dossierTemplateTesterAndProvider.provideTestTemplate();
var componentDefinitionAddRequest1 = ComponentDefinitionAddRequest.builder().technicalName("Component 1").displayName("Component 1").description("Description").build();
var componentDefinitionAddRequest2 = ComponentDefinitionAddRequest.builder().technicalName("Component 2").displayName("Component 2").description("Description").build();
var componentDefinitionAddRequest3 = ComponentDefinitionAddRequest.builder().technicalName("Component 3").displayName("Component 3").description("Description").build();
var componentDefinitionAddRequest4 = ComponentDefinitionAddRequest.builder().technicalName("Component 4").displayName("Component 4").description("Description").build();
var componentDefinitionAddRequest5 = ComponentDefinitionAddRequest.builder().technicalName("Component 5").displayName("Component 5").description("Description").build();
var componentDefinitionAddRequest6 = ComponentDefinitionAddRequest.builder().technicalName("Component 6").displayName("Component 6").description("Description").build();
var componentDefinitionAddRequest7 = ComponentDefinitionAddRequest.builder().technicalName("Component 7").displayName("Component 7").description("Description").build();
var response = dossierTemplateExternalClient.createComponents(dossierTemplate.getId(),
List.of(componentDefinitionAddRequest1, componentDefinitionAddRequest2, componentDefinitionAddRequest3,
componentDefinitionAddRequest4, componentDefinitionAddRequest5, componentDefinitionAddRequest6, componentDefinitionAddRequest7));
assertEquals(response.size(), 7);
// delete first component
dossierTemplateExternalClient.deleteComponents(dossierTemplate.getId(), List.of(response.get(0).getId()));
var softDeletedComponent = dossierTemplateExternalClient.getComponent(dossierTemplate.getId(), response.get(0).getId());
assertNotNull(softDeletedComponent.getSoftDeleteTime());
var response1 = dossierTemplateExternalClient.getComponents(dossierTemplate.getId(), false);
assertEquals(response1.size(), 6);
String firstComponentId = response1.get(0).getId();
String secondComponentId = response1.get(1).getId();
String thirdComponentId = response1.get(2).getId();
var newOrder = dossierTemplateExternalClient.reorderComponents(dossierTemplate.getId(), List.of(secondComponentId));
assertEquals(newOrder.size(), 6);
assertEquals(newOrder.get(0).getId(), secondComponentId);
assertEquals(newOrder.get(0).getRank(), 1);
assertEquals(newOrder.get(1).getId(), firstComponentId);
assertEquals(newOrder.get(1).getRank(), 2);
assertEquals(newOrder.get(2).getId(), thirdComponentId);
assertEquals(newOrder.get(2).getRank(), 3);
// clone dossier template
CloneDossierTemplateRequest cdtr = CloneDossierTemplateRequest.builder().name("Clone of " + dossierTemplate.getName()).cloningUserId("user").build();
var clonedDT = dossierTemplateClient.cloneDossierTemplate(dossierTemplate.getId(), cdtr);
assertEquals(clonedDT.getName(), "Clone of " + dossierTemplate.getName());
var response2 = dossierTemplateExternalClient.getComponents(clonedDT.getId(), false);
assertEquals(response2.size(), 6);
}
}

View File

@ -417,7 +417,6 @@ public class ComponentLogStorageToMongoDBTest extends AbstractPersistenceServerS
}
@Test
@SneakyThrows
@Disabled

Some files were not shown because too many files have changed in this diff Show More