Compare commits
14 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a441909408 | ||
|
|
a7bdbb5495 | ||
|
|
3c0b9d36d4 | ||
|
|
9ff3035730 | ||
|
|
c933793721 | ||
|
|
4746d50627 | ||
|
|
fdfdba550e | ||
|
|
09d53b7743 | ||
|
|
d691164af6 | ||
|
|
43f9db59d4 | ||
|
|
7669d27e7b | ||
|
|
d7d46d5429 | ||
|
|
205f9c678e | ||
|
|
cf5e235fc9 |
@ -1,111 +0,0 @@
|
|||||||
package com.iqser.red.persistence.service.v1.external.api.impl.controller;
|
|
||||||
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.entity.migration.SaasMigrationStatusEntity;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.migration.SaasMigrationService;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusService;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.SaasMigrationStatusPersistenceService;
|
|
||||||
import com.iqser.red.service.persistence.service.v1.api.external.resource.MigrationStatusResource;
|
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.SaasMigrationStatus;
|
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.saas.migration.MigrationStatusResponse;
|
|
||||||
|
|
||||||
import lombok.AccessLevel;
|
|
||||||
import lombok.RequiredArgsConstructor;
|
|
||||||
import lombok.experimental.FieldDefaults;
|
|
||||||
|
|
||||||
import org.springframework.http.ResponseEntity;
|
|
||||||
import org.springframework.web.bind.annotation.RestController;
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.SaasMigrationStatus.*;
|
|
||||||
|
|
||||||
@RestController
|
|
||||||
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
|
|
||||||
@RequiredArgsConstructor
|
|
||||||
public class MigrationStatusController implements MigrationStatusResource {
|
|
||||||
|
|
||||||
SaasMigrationService saasMigrationService;
|
|
||||||
|
|
||||||
SaasMigrationStatusPersistenceService saasMigrationStatusPersistenceService;
|
|
||||||
|
|
||||||
FileStatusService fileStatusService;
|
|
||||||
|
|
||||||
|
|
||||||
public MigrationStatusResponse migrationStatus() {
|
|
||||||
|
|
||||||
int numberOfFilesToMigrate = saasMigrationStatusPersistenceService.countAll();
|
|
||||||
|
|
||||||
Map<SaasMigrationStatus, Integer> filesInStatus = new HashMap<>();
|
|
||||||
filesInStatus.put(MIGRATION_REQUIRED, saasMigrationStatusPersistenceService.countByStatus(MIGRATION_REQUIRED));
|
|
||||||
filesInStatus.put(DOCUMENT_FILES_MIGRATED, saasMigrationStatusPersistenceService.countByStatus(DOCUMENT_FILES_MIGRATED));
|
|
||||||
filesInStatus.put(REDACTION_LOGS_MIGRATED, saasMigrationStatusPersistenceService.countByStatus(REDACTION_LOGS_MIGRATED));
|
|
||||||
filesInStatus.put(ANNOTATION_IDS_MIGRATED, saasMigrationStatusPersistenceService.countByStatus(ANNOTATION_IDS_MIGRATED));
|
|
||||||
filesInStatus.put(FINISHED, saasMigrationStatusPersistenceService.countByStatus(FINISHED));
|
|
||||||
filesInStatus.put(ERROR, saasMigrationStatusPersistenceService.countByStatus(ERROR));
|
|
||||||
|
|
||||||
var filesInErrorState = saasMigrationStatusPersistenceService.findAllByStatus(ERROR);
|
|
||||||
|
|
||||||
var errorCauses = filesInErrorState.stream()
|
|
||||||
.collect(Collectors.toMap(errorFile -> errorFile.getDossierId() + "/" + errorFile.getFileId(), SaasMigrationStatusEntity::getErrorCause));
|
|
||||||
|
|
||||||
return MigrationStatusResponse.builder().numberOfFilesToMigrate(numberOfFilesToMigrate).filesInStatus(filesInStatus).errorCauses(errorCauses).build();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ResponseEntity<?> startMigrationForFile(String dossierId, String fileId) {
|
|
||||||
|
|
||||||
if (!fileStatusService.fileExists(fileId)) {
|
|
||||||
throw new NotFoundException(String.format("File with id %s does not exist", fileId));
|
|
||||||
}
|
|
||||||
|
|
||||||
saasMigrationService.startMigrationForFile(dossierId, fileId);
|
|
||||||
|
|
||||||
return ResponseEntity.ok().build();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ResponseEntity<?> revertMigrationForFile(String dossierId, String fileId) {
|
|
||||||
|
|
||||||
if (!fileStatusService.fileExists(fileId)) {
|
|
||||||
throw new NotFoundException(String.format("File with id %s does not exist", fileId));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!saasMigrationStatusPersistenceService.findById(fileId).getStatus().equals(FINISHED)) {
|
|
||||||
throw new BadRequestException(String.format("File with id %s is not migrated yet, can't revert.", fileId));
|
|
||||||
}
|
|
||||||
|
|
||||||
saasMigrationService.revertMigrationForFile(dossierId, fileId);
|
|
||||||
|
|
||||||
return ResponseEntity.ok().build();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ResponseEntity<?> requeueErrorFiles() {
|
|
||||||
|
|
||||||
MigrationStatusResponse migrationStatus = migrationStatus();
|
|
||||||
if (!migrationIsFinished(migrationStatus)) {
|
|
||||||
throw new BadRequestException("There are still files processing, please wait until migration has finished to retry!");
|
|
||||||
}
|
|
||||||
|
|
||||||
saasMigrationService.requeueErrorFiles();
|
|
||||||
|
|
||||||
return ResponseEntity.ok().build();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private static boolean migrationIsFinished(MigrationStatusResponse migrationStatus) {
|
|
||||||
|
|
||||||
return migrationStatus.getFilesInStatus().entrySet()
|
|
||||||
.stream()
|
|
||||||
.filter(e -> e.getValue() > 0)
|
|
||||||
.allMatch(e -> e.getKey().equals(FINISHED) || e.getKey().equals(ERROR));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@ -118,11 +118,12 @@ public class ReanalysisController implements ReanalysisResource {
|
|||||||
@PreAuthorize("hasAuthority('" + REANALYZE_FILE + "')")
|
@PreAuthorize("hasAuthority('" + REANALYZE_FILE + "')")
|
||||||
public void ocrFile(@PathVariable(DOSSIER_ID) String dossierId,
|
public void ocrFile(@PathVariable(DOSSIER_ID) String dossierId,
|
||||||
@PathVariable(FILE_ID) String fileId,
|
@PathVariable(FILE_ID) String fileId,
|
||||||
@RequestParam(value = FORCE_PARAM, required = false, defaultValue = FALSE) boolean force) {
|
@RequestParam(value = FORCE_PARAM, required = false, defaultValue = FALSE) boolean force,
|
||||||
|
@RequestParam(value = ALL_PAGES, required = false, defaultValue = FALSE) boolean allPages) {
|
||||||
|
|
||||||
accessControlService.checkDossierExistenceAndAccessPermissionsToDossier(dossierId);
|
accessControlService.checkDossierExistenceAndAccessPermissionsToDossier(dossierId);
|
||||||
validateOCR(dossierId, fileId);
|
validateOCR(dossierId, fileId);
|
||||||
reanalysisService.ocrFile(dossierId, fileId, force);
|
reanalysisService.ocrFile(dossierId, fileId, force, allPages);
|
||||||
auditPersistenceService.audit(AuditRequest.builder()
|
auditPersistenceService.audit(AuditRequest.builder()
|
||||||
.userId(KeycloakSecurity.getUserId())
|
.userId(KeycloakSecurity.getUserId())
|
||||||
.objectId(dossierId)
|
.objectId(dossierId)
|
||||||
@ -140,7 +141,7 @@ public class ReanalysisController implements ReanalysisResource {
|
|||||||
|
|
||||||
accessControlService.checkDossierExistenceAndAccessPermissionsToDossier(dossierId);
|
accessControlService.checkDossierExistenceAndAccessPermissionsToDossier(dossierId);
|
||||||
fileIds.forEach(fileId -> validateOCR(dossierId, fileId));
|
fileIds.forEach(fileId -> validateOCR(dossierId, fileId));
|
||||||
reanalysisService.ocrFiles(dossierId, fileIds);
|
reanalysisService.ocrFiles(dossierId, fileIds, false);
|
||||||
auditPersistenceService.audit(AuditRequest.builder()
|
auditPersistenceService.audit(AuditRequest.builder()
|
||||||
.userId(KeycloakSecurity.getUserId())
|
.userId(KeycloakSecurity.getUserId())
|
||||||
.objectId(dossierId)
|
.objectId(dossierId)
|
||||||
|
|||||||
@ -28,6 +28,7 @@ import org.springframework.web.bind.annotation.RestController;
|
|||||||
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.acl.custom.dossier.DossierACLService;
|
import com.iqser.red.service.persistence.management.v1.processor.acl.custom.dossier.DossierACLService;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
|
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
|
||||||
|
import com.iqser.red.service.persistence.management.v1.processor.exception.ConflictException;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.exception.NotAllowedException;
|
import com.iqser.red.service.persistence.management.v1.processor.exception.NotAllowedException;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.roles.ApplicationRoles;
|
import com.iqser.red.service.persistence.management.v1.processor.roles.ApplicationRoles;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.AccessControlService;
|
import com.iqser.red.service.persistence.management.v1.processor.service.AccessControlService;
|
||||||
@ -101,8 +102,13 @@ public class StatusController implements StatusResource {
|
|||||||
var accessibleDossierIds = filterByPermissionsService.onlyViewableDossierIds(new ArrayList<>(filesByDossier.getValue().keySet()));
|
var accessibleDossierIds = filterByPermissionsService.onlyViewableDossierIds(new ArrayList<>(filesByDossier.getValue().keySet()));
|
||||||
var response = new HashMap<String, List<FileStatus>>();
|
var response = new HashMap<String, List<FileStatus>>();
|
||||||
for (var dossierId : accessibleDossierIds) {
|
for (var dossierId : accessibleDossierIds) {
|
||||||
var allFoundFiles = fileStatusManagementService.findAllDossierIdAndIds(dossierId, filesByDossier.getValue().get(dossierId));
|
var allFoundFiles = fileStatusManagementService.findAllDossierIdAndIds(dossierId,
|
||||||
response.put(dossierId, allFoundFiles.stream().map(FileStatusMapper::toFileStatus).collect(Collectors.toList()));
|
filesByDossier.getValue()
|
||||||
|
.get(dossierId));
|
||||||
|
response.put(dossierId,
|
||||||
|
allFoundFiles.stream()
|
||||||
|
.map(FileStatusMapper::toFileStatus)
|
||||||
|
.collect(Collectors.toList()));
|
||||||
}
|
}
|
||||||
|
|
||||||
return new JSONPrimitive<>(response);
|
return new JSONPrimitive<>(response);
|
||||||
@ -351,6 +357,10 @@ public class StatusController implements StatusResource {
|
|||||||
.build());
|
.build());
|
||||||
|
|
||||||
var dossier = dossierACLService.enhanceDossierWithACLData(dossierManagementService.getDossierById(dossierId, false, false));
|
var dossier = dossierACLService.enhanceDossierWithACLData(dossierManagementService.getDossierById(dossierId, false, false));
|
||||||
|
|
||||||
|
if (dossier.getOwnerId() == null) {
|
||||||
|
throw new ConflictException("Dossier has no owner!");
|
||||||
|
}
|
||||||
if (!dossier.getOwnerId().equals(KeycloakSecurity.getUserId())) {
|
if (!dossier.getOwnerId().equals(KeycloakSecurity.getUserId())) {
|
||||||
|
|
||||||
var fileStatus = fileStatusManagementService.getFileStatus(fileId);
|
var fileStatus = fileStatusManagementService.getFileStatus(fileId);
|
||||||
|
|||||||
@ -23,12 +23,11 @@ import org.springframework.web.bind.annotation.RequestPart;
|
|||||||
import org.springframework.web.bind.annotation.RestController;
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
import org.springframework.web.multipart.MultipartFile;
|
import org.springframework.web.multipart.MultipartFile;
|
||||||
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.FileFormatValidationService;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.exception.NotAllowedException;
|
|
||||||
import com.knecon.fforesight.keycloakcommons.security.KeycloakSecurity;
|
|
||||||
import com.iqser.red.service.pdftron.redaction.v1.api.model.ByteContentDocument;
|
import com.iqser.red.service.pdftron.redaction.v1.api.model.ByteContentDocument;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
|
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
|
||||||
|
import com.iqser.red.service.persistence.management.v1.processor.exception.NotAllowedException;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.AccessControlService;
|
import com.iqser.red.service.persistence.management.v1.processor.service.AccessControlService;
|
||||||
|
import com.iqser.red.service.persistence.management.v1.processor.service.FileFormatValidationService;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.ReanalysisService;
|
import com.iqser.red.service.persistence.management.v1.processor.service.ReanalysisService;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.UploadService;
|
import com.iqser.red.service.persistence.management.v1.processor.service.UploadService;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.AuditPersistenceService;
|
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.AuditPersistenceService;
|
||||||
@ -37,6 +36,7 @@ import com.iqser.red.service.persistence.service.v1.api.external.resource.Upload
|
|||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.AuditCategory;
|
import com.iqser.red.service.persistence.service.v1.api.shared.model.AuditCategory;
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.FileUploadResult;
|
import com.iqser.red.service.persistence.service.v1.api.shared.model.FileUploadResult;
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.audit.AuditRequest;
|
import com.iqser.red.service.persistence.service.v1.api.shared.model.audit.AuditRequest;
|
||||||
|
import com.knecon.fforesight.keycloakcommons.security.KeycloakSecurity;
|
||||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||||
|
|
||||||
import feign.FeignException;
|
import feign.FeignException;
|
||||||
@ -53,9 +53,9 @@ import lombok.extern.slf4j.Slf4j;
|
|||||||
@SuppressWarnings("PMD")
|
@SuppressWarnings("PMD")
|
||||||
public class UploadController implements UploadResource {
|
public class UploadController implements UploadResource {
|
||||||
|
|
||||||
private static final int THRESHOLD_ENTRIES = 10000;
|
private static final int THRESHOLD_ENTRIES = 10000; // Maximum number of files allowed
|
||||||
private static final int THRESHOLD_SIZE = 1000000000; // 1 GB
|
private static final int THRESHOLD_SIZE = 1000000000; // 1 GB total unzipped data
|
||||||
private static final double THRESHOLD_RATIO = 10;
|
private static final double THRESHOLD_RATIO = 10; // Max allowed compression ratio
|
||||||
|
|
||||||
private final UploadService uploadService;
|
private final UploadService uploadService;
|
||||||
private final ReanalysisService reanalysisService;
|
private final ReanalysisService reanalysisService;
|
||||||
@ -72,31 +72,25 @@ public class UploadController implements UploadResource {
|
|||||||
@Parameter(name = DISABLE_AUTOMATIC_ANALYSIS_PARAM, description = "Disables automatic redaction for the uploaded file, imports only imported redactions") @RequestParam(value = DISABLE_AUTOMATIC_ANALYSIS_PARAM, required = false, defaultValue = "false") boolean disableAutomaticAnalysis) {
|
@Parameter(name = DISABLE_AUTOMATIC_ANALYSIS_PARAM, description = "Disables automatic redaction for the uploaded file, imports only imported redactions") @RequestParam(value = DISABLE_AUTOMATIC_ANALYSIS_PARAM, required = false, defaultValue = "false") boolean disableAutomaticAnalysis) {
|
||||||
|
|
||||||
accessControlService.checkAccessPermissionsToDossier(dossierId);
|
accessControlService.checkAccessPermissionsToDossier(dossierId);
|
||||||
if (file.getOriginalFilename() == null) {
|
|
||||||
|
String originalFilename = file.getOriginalFilename();
|
||||||
|
if (originalFilename == null) {
|
||||||
throw new BadRequestException("Could not upload file, no filename provided.");
|
throw new BadRequestException("Could not upload file, no filename provided.");
|
||||||
}
|
}
|
||||||
|
|
||||||
var extension = getExtension(file.getOriginalFilename());
|
String extension = getExtension(originalFilename);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
switch (extension) {
|
return switch (extension) {
|
||||||
case "zip":
|
case "zip" -> handleZip(dossierId, file.getBytes(), keepManualRedactions, disableAutomaticAnalysis);
|
||||||
return handleZip(dossierId, file.getBytes(), keepManualRedactions, disableAutomaticAnalysis);
|
case "csv" -> uploadService.importCsv(dossierId, file.getBytes());
|
||||||
case "csv":
|
default -> {
|
||||||
return uploadService.importCsv(dossierId, file.getBytes());
|
validateExtensionOrThrow(extension);
|
||||||
default:
|
yield uploadService.processSingleFile(dossierId, originalFilename, file.getBytes(), keepManualRedactions, disableAutomaticAnalysis);
|
||||||
if (!fileFormatValidationService.getAllFileFormats().contains(extension)) {
|
}
|
||||||
throw new BadRequestException("Invalid file uploaded");
|
};
|
||||||
}
|
|
||||||
if (!fileFormatValidationService.getValidFileFormatsForTenant(TenantContext.getTenantId()).contains(extension)) {
|
|
||||||
throw new NotAllowedException("Insufficient permissions");
|
|
||||||
}
|
|
||||||
return uploadService.processSingleFile(dossierId, file.getOriginalFilename(), file.getBytes(), keepManualRedactions, disableAutomaticAnalysis);
|
|
||||||
}
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new BadRequestException(e.getMessage(), e);
|
throw new BadRequestException("Failed to process file: " + e.getMessage(), e);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -111,7 +105,6 @@ public class UploadController implements UploadResource {
|
|||||||
accessControlService.verifyUserIsReviewerOrApprover(dossierId, fileId);
|
accessControlService.verifyUserIsReviewerOrApprover(dossierId, fileId);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
||||||
reanalysisService.importRedactions(ByteContentDocument.builder().dossierId(dossierId).fileId(fileId).document(file.getBytes()).pages(pageInclusionRequest).build());
|
reanalysisService.importRedactions(ByteContentDocument.builder().dossierId(dossierId).fileId(fileId).document(file.getBytes()).pages(pageInclusionRequest).build());
|
||||||
|
|
||||||
auditPersistenceService.audit(AuditRequest.builder()
|
auditPersistenceService.audit(AuditRequest.builder()
|
||||||
@ -122,84 +115,116 @@ public class UploadController implements UploadResource {
|
|||||||
.details(Map.of("dossierId", dossierId))
|
.details(Map.of("dossierId", dossierId))
|
||||||
.build());
|
.build());
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new BadRequestException(e.getMessage(), e);
|
throw new BadRequestException("Failed to import redactions: " + e.getMessage(), e);
|
||||||
} catch (FeignException e) {
|
} catch (FeignException e) {
|
||||||
throw processFeignException(e);
|
throw processFeignException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private String getExtension(String fileName) {
|
private void validateExtensionOrThrow(String extension) {
|
||||||
|
|
||||||
return fileName.substring(fileName.lastIndexOf(".") + 1).toLowerCase(Locale.ROOT);
|
if (!fileFormatValidationService.getAllFileFormats().contains(extension)) {
|
||||||
|
throw new BadRequestException("Invalid file uploaded (unrecognized extension).");
|
||||||
|
}
|
||||||
|
if (!fileFormatValidationService.getValidFileFormatsForTenant(TenantContext.getTenantId()).contains(extension)) {
|
||||||
|
throw new NotAllowedException("Insufficient permissions for this file type.");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 1. Write the uploaded content to a temp ZIP file
|
||||||
|
* 2. Check the number of entries and reject if too big or if symlinks found
|
||||||
|
* 3. Unzip and process each file, while checking size and ratio.
|
||||||
|
*/
|
||||||
private FileUploadResult handleZip(String dossierId, byte[] fileContent, boolean keepManualRedactions, boolean disableAutomaticAnalysis) throws IOException {
|
private FileUploadResult handleZip(String dossierId, byte[] fileContent, boolean keepManualRedactions, boolean disableAutomaticAnalysis) throws IOException {
|
||||||
|
|
||||||
File tempFile = FileUtils.createTempFile(UUID.randomUUID().toString(), ".zip");
|
File tempZip = FileUtils.createTempFile(UUID.randomUUID().toString(), ".zip");
|
||||||
try (var fileOutputStream = new FileOutputStream(tempFile)) {
|
try (FileOutputStream fos = new FileOutputStream(tempZip)) {
|
||||||
IOUtils.write(fileContent, fileOutputStream);
|
IOUtils.write(fileContent, fos);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
validateZipEntries(tempZip);
|
||||||
checkForSymlinks(tempFile);
|
|
||||||
|
|
||||||
var zipData = unzip(tempFile, dossierId, keepManualRedactions, disableAutomaticAnalysis);
|
try {
|
||||||
|
ZipData zipData = processZipContents(tempZip, dossierId, keepManualRedactions, disableAutomaticAnalysis);
|
||||||
|
|
||||||
if (zipData.csvBytes != null) {
|
if (zipData.csvBytes != null) {
|
||||||
try {
|
try {
|
||||||
var importResult = uploadService.importCsv(dossierId, zipData.csvBytes);
|
FileUploadResult csvResult = uploadService.importCsv(dossierId, zipData.csvBytes);
|
||||||
zipData.fileUploadResult.getProcessedAttributes().addAll(importResult.getProcessedAttributes());
|
zipData.fileUploadResult.getProcessedAttributes().addAll(csvResult.getProcessedAttributes());
|
||||||
zipData.fileUploadResult.getProcessedFileIds().addAll(importResult.getProcessedFileIds());
|
zipData.fileUploadResult.getProcessedFileIds().addAll(csvResult.getProcessedFileIds());
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.debug("CSV file inside ZIP failed", e);
|
log.debug("CSV file inside ZIP failed to import", e);
|
||||||
// TODO return un-processed files to client
|
|
||||||
}
|
}
|
||||||
} else if (zipData.fileUploadResult.getFileIds().isEmpty()) {
|
} else if (zipData.fileUploadResult.getFileIds().isEmpty()) {
|
||||||
if (zipData.containedUnpermittedFiles) {
|
if (zipData.containedUnpermittedFiles) {
|
||||||
throw new NotAllowedException("Zip file contains unpermitted files");
|
throw new NotAllowedException("Zip file contains unpermitted files.");
|
||||||
} else {
|
} else {
|
||||||
throw new BadRequestException("Only unsupported files in zip file");
|
throw new BadRequestException("Only unsupported files in the ZIP.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return zipData.fileUploadResult;
|
return zipData.fileUploadResult;
|
||||||
|
|
||||||
} finally {
|
} finally {
|
||||||
boolean isDeleted = tempFile.delete();
|
|
||||||
if (!isDeleted) {
|
if (!tempZip.delete()) {
|
||||||
log.warn("tempFile could not be deleted");
|
log.warn("Could not delete temporary ZIP file: {}", tempZip);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private void checkForSymlinks(File tempFile) throws IOException {
|
private void validateZipEntries(File tempZip) throws IOException {
|
||||||
|
|
||||||
|
try (FileInputStream fis = new FileInputStream(tempZip); ZipFile zipFile = new ZipFile(fis.getChannel())) {
|
||||||
|
|
||||||
|
int count = 0;
|
||||||
|
var entries = zipFile.getEntries();
|
||||||
|
while (entries.hasMoreElements()) {
|
||||||
|
ZipArchiveEntry ze = entries.nextElement();
|
||||||
|
|
||||||
try (var fis = new FileInputStream(tempFile); var zipFile = new ZipFile(fis.getChannel())) {
|
|
||||||
for (var entryEnum = zipFile.getEntries(); entryEnum.hasMoreElements(); ) {
|
|
||||||
var ze = entryEnum.nextElement();
|
|
||||||
if (ze.isUnixSymlink()) {
|
if (ze.isUnixSymlink()) {
|
||||||
throw new BadRequestException("ZIP-files with symlinks are not allowed");
|
throw new BadRequestException("ZIP-files with symlinks are not allowed.");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!ze.isDirectory() && !ze.getName().startsWith(".")) {
|
||||||
|
count++;
|
||||||
|
if (count > THRESHOLD_ENTRIES) {
|
||||||
|
throw new BadRequestException("ZIP-Bomb detected: too many entries.");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private ZipData unzip(File tempFile, String dossierId, boolean keepManualRedactions, boolean disableAutomaticAnalysis) throws IOException {
|
private ZipData processZipContents(File tempZip, String dossierId, boolean keepManualRedactions, boolean disableAutomaticAnalysis) throws IOException {
|
||||||
|
|
||||||
var zipData = new ZipData();
|
ZipData zipData = new ZipData();
|
||||||
|
|
||||||
try (var fis = new FileInputStream(tempFile); var zipFile = new ZipFile(fis.getChannel())) {
|
try (FileInputStream fis = new FileInputStream(tempZip); ZipFile zipFile = new ZipFile(fis.getChannel())) {
|
||||||
|
|
||||||
for (var entryEnum = zipFile.getEntries(); entryEnum.hasMoreElements(); ) {
|
var entries = zipFile.getEntries();
|
||||||
var ze = entryEnum.nextElement();
|
while (entries.hasMoreElements()) {
|
||||||
zipData.totalEntryArchive++;
|
ZipArchiveEntry entry = entries.nextElement();
|
||||||
|
|
||||||
if (!ze.isDirectory()) {
|
if (entry.isDirectory() || entry.getName().startsWith(".")) {
|
||||||
processFileZipEntry(ze, zipFile, dossierId, keepManualRedactions, zipData, disableAutomaticAnalysis);
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
byte[] entryBytes = readEntryWithRatioCheck(entry, zipFile);
|
||||||
|
zipData.totalSizeArchive += entryBytes.length;
|
||||||
|
if (zipData.totalSizeArchive > THRESHOLD_SIZE) {
|
||||||
|
throw new BadRequestException("ZIP-Bomb detected (exceeds total size limit).");
|
||||||
|
}
|
||||||
|
|
||||||
|
String extension = getExtension(entry.getName());
|
||||||
|
if ("csv".equalsIgnoreCase(extension)) {
|
||||||
|
zipData.csvBytes = entryBytes;
|
||||||
|
} else {
|
||||||
|
handleRegularFile(dossierId, entryBytes, extension, extractFileName(entry.getName()), zipData, keepManualRedactions, disableAutomaticAnalysis);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -207,73 +232,70 @@ public class UploadController implements UploadResource {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private void processFileZipEntry(ZipArchiveEntry ze, ZipFile zipFile, String dossierId, boolean keepManualRedactions, ZipData zipData, boolean disableAutomaticAnalysis) throws IOException {
|
private byte[] readEntryWithRatioCheck(ZipArchiveEntry entry, ZipFile zipFile) throws IOException {
|
||||||
|
|
||||||
var extension = getExtension(ze.getName());
|
long compressedSize = entry.getCompressedSize() > 0 ? entry.getCompressedSize() : 1;
|
||||||
|
try (var is = zipFile.getInputStream(entry); var bos = new ByteArrayOutputStream()) {
|
||||||
|
|
||||||
final String fileName;
|
byte[] buffer = new byte[4096];
|
||||||
if (ze.getName().lastIndexOf("/") >= 0) {
|
int bytesRead;
|
||||||
fileName = ze.getName().substring(ze.getName().lastIndexOf("/") + 1);
|
int totalUncompressed = 0;
|
||||||
} else {
|
|
||||||
fileName = ze.getName();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fileName.startsWith(".")) {
|
while ((bytesRead = is.read(buffer)) != -1) {
|
||||||
return;
|
bos.write(buffer, 0, bytesRead);
|
||||||
}
|
totalUncompressed += bytesRead;
|
||||||
|
|
||||||
var entryAsBytes = readCurrentZipEntry(ze, zipFile);
|
double ratio = (double) totalUncompressed / compressedSize;
|
||||||
zipData.totalSizeArchive += entryAsBytes.length;
|
if (ratio > THRESHOLD_RATIO) {
|
||||||
|
throw new BadRequestException("ZIP-Bomb detected (compression ratio too high).");
|
||||||
// 1. the uncompressed data size is too much for the application resource capacity
|
}
|
||||||
// 2. too many entries in the archive can lead to inode exhaustion of the file-system
|
|
||||||
if (zipData.totalSizeArchive > THRESHOLD_SIZE || zipData.totalEntryArchive > THRESHOLD_ENTRIES) {
|
|
||||||
throw new BadRequestException("ZIP-Bomb detected.");
|
|
||||||
}
|
|
||||||
|
|
||||||
if ("csv".equals(extension)) {
|
|
||||||
zipData.csvBytes = entryAsBytes;
|
|
||||||
} else if (fileFormatValidationService.getAllFileFormats().contains(extension)) {
|
|
||||||
|
|
||||||
if (!fileFormatValidationService.getValidFileFormatsForTenant(TenantContext.getTenantId()).contains(extension)) {
|
|
||||||
zipData.containedUnpermittedFiles = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
zipData.containedUnpermittedFiles = false;
|
|
||||||
|
|
||||||
try {
|
|
||||||
var result = uploadService.processSingleFile(dossierId, fileName, entryAsBytes, keepManualRedactions, disableAutomaticAnalysis);
|
|
||||||
zipData.fileUploadResult.getFileIds().addAll(result.getFileIds());
|
|
||||||
} catch (Exception e) {
|
|
||||||
log.debug("PDF File inside ZIP failed", e);
|
|
||||||
// TODO return un-processed files to client
|
|
||||||
}
|
}
|
||||||
|
return bos.toByteArray();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private byte[] readCurrentZipEntry(ZipArchiveEntry ze, ZipFile zipFile) throws IOException {
|
private void handleRegularFile(String dossierId,
|
||||||
|
byte[] fileBytes,
|
||||||
|
String extension,
|
||||||
|
String fileName,
|
||||||
|
ZipData zipData,
|
||||||
|
boolean keepManualRedactions,
|
||||||
|
boolean disableAutomaticAnalysis) {
|
||||||
|
|
||||||
var bos = new ByteArrayOutputStream();
|
if (!fileFormatValidationService.getAllFileFormats().contains(extension)) {
|
||||||
|
zipData.containedUnpermittedFiles = false;
|
||||||
try (var entryStream = zipFile.getInputStream(ze)) {
|
return;
|
||||||
var buffer = new byte[2048];
|
|
||||||
var nBytes = 0;
|
|
||||||
int totalSizeEntry = 0;
|
|
||||||
|
|
||||||
while ((nBytes = entryStream.read(buffer)) > 0) {
|
|
||||||
bos.write(buffer, 0, nBytes);
|
|
||||||
totalSizeEntry += nBytes;
|
|
||||||
|
|
||||||
double compressionRatio = (float) totalSizeEntry / ze.getCompressedSize();
|
|
||||||
if (compressionRatio > THRESHOLD_RATIO) {
|
|
||||||
// ratio between compressed and uncompressed data is highly suspicious, looks like a Zip Bomb Attack
|
|
||||||
throw new BadRequestException("ZIP-Bomb detected.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return bos.toByteArray();
|
if (!fileFormatValidationService.getValidFileFormatsForTenant(TenantContext.getTenantId()).contains(extension)) {
|
||||||
|
zipData.containedUnpermittedFiles = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
FileUploadResult result = uploadService.processSingleFile(dossierId, fileName, fileBytes, keepManualRedactions, disableAutomaticAnalysis);
|
||||||
|
zipData.fileUploadResult.getFileIds().addAll(result.getFileIds());
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.debug("Failed to process file '{}' in ZIP: {}", fileName, e.getMessage(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private String extractFileName(String path) {
|
||||||
|
|
||||||
|
int idx = path.lastIndexOf('/');
|
||||||
|
return (idx >= 0) ? path.substring(idx + 1) : path;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private String getExtension(String fileName) {
|
||||||
|
|
||||||
|
int idx = fileName.lastIndexOf('.');
|
||||||
|
if (idx < 0) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
return fileName.substring(idx + 1).toLowerCase(Locale.ROOT);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -282,7 +304,6 @@ public class UploadController implements UploadResource {
|
|||||||
|
|
||||||
byte[] csvBytes;
|
byte[] csvBytes;
|
||||||
int totalSizeArchive;
|
int totalSizeArchive;
|
||||||
int totalEntryArchive;
|
|
||||||
FileUploadResult fileUploadResult = new FileUploadResult();
|
FileUploadResult fileUploadResult = new FileUploadResult();
|
||||||
boolean containedUnpermittedFiles;
|
boolean containedUnpermittedFiles;
|
||||||
|
|
||||||
|
|||||||
@ -1,56 +0,0 @@
|
|||||||
package com.iqser.red.service.persistence.service.v1.api.external.resource;
|
|
||||||
|
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.saas.migration.MigrationStatusResponse;
|
|
||||||
|
|
||||||
import io.swagger.v3.oas.annotations.Operation;
|
|
||||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
|
||||||
import io.swagger.v3.oas.annotations.responses.ApiResponses;
|
|
||||||
|
|
||||||
import org.springframework.http.MediaType;
|
|
||||||
import org.springframework.http.ResponseEntity;
|
|
||||||
import org.springframework.web.bind.annotation.PostMapping;
|
|
||||||
import org.springframework.web.bind.annotation.RequestParam;
|
|
||||||
import org.springframework.web.bind.annotation.ResponseBody;
|
|
||||||
|
|
||||||
public interface MigrationStatusResource {
|
|
||||||
|
|
||||||
String MIGRATION_STATUS_REST_PATH = ExternalApi.BASE_PATH + "/migration-status";
|
|
||||||
String START_MIGRATION_REST_PATH = ExternalApi.BASE_PATH + "/start_migration";
|
|
||||||
String REVERT_MIGRATION_REST_PATH = ExternalApi.BASE_PATH + "/revert_migration";
|
|
||||||
String RETRY_MIGRATION_REST_PATH = ExternalApi.BASE_PATH + "/retry_migration";
|
|
||||||
|
|
||||||
String FILE_ID = "fileId";
|
|
||||||
String FILE_ID_PATH_VARIABLE = "/{" + FILE_ID + "}";
|
|
||||||
|
|
||||||
String DOSSIER_ID = "dossierId";
|
|
||||||
String DOSSIER_ID_PATH_VARIABLE = "/{" + DOSSIER_ID + "}";
|
|
||||||
|
|
||||||
|
|
||||||
@ResponseBody
|
|
||||||
@PostMapping(value = MIGRATION_STATUS_REST_PATH, produces = MediaType.APPLICATION_JSON_VALUE)
|
|
||||||
@Operation(summary = "Show the status of the migration", description = "None")
|
|
||||||
@ApiResponses(value = {@ApiResponse(responseCode = "200", description = "Success.")})
|
|
||||||
MigrationStatusResponse migrationStatus();
|
|
||||||
|
|
||||||
|
|
||||||
@ResponseBody
|
|
||||||
@PostMapping(value = START_MIGRATION_REST_PATH + FILE_ID_PATH_VARIABLE + DOSSIER_ID_PATH_VARIABLE)
|
|
||||||
@Operation(summary = "Start SAAS migration for specific file", description = "None")
|
|
||||||
@ApiResponses(value = {@ApiResponse(responseCode = "200", description = "Success.")})
|
|
||||||
ResponseEntity<?> startMigrationForFile(@RequestParam(value = DOSSIER_ID) String dossierId, @RequestParam(value = FILE_ID) String fileId);
|
|
||||||
|
|
||||||
|
|
||||||
@ResponseBody
|
|
||||||
@PostMapping(value = REVERT_MIGRATION_REST_PATH + FILE_ID_PATH_VARIABLE + DOSSIER_ID_PATH_VARIABLE)
|
|
||||||
@Operation(summary = "Start SAAS migration for specific file", description = "None")
|
|
||||||
@ApiResponses(value = {@ApiResponse(responseCode = "200", description = "Success.")})
|
|
||||||
ResponseEntity<?> revertMigrationForFile(@RequestParam(value = DOSSIER_ID) String dossierId, @RequestParam(value = FILE_ID) String fileId);
|
|
||||||
|
|
||||||
|
|
||||||
@ResponseBody
|
|
||||||
@PostMapping(value = RETRY_MIGRATION_REST_PATH)
|
|
||||||
@Operation(summary = "Restart SAAS migration for all files in error state", description = "None")
|
|
||||||
@ApiResponses(value = {@ApiResponse(responseCode = "200", description = "Success.")})
|
|
||||||
ResponseEntity<?> requeueErrorFiles();
|
|
||||||
|
|
||||||
}
|
|
||||||
@ -38,6 +38,7 @@ public interface ReanalysisResource {
|
|||||||
|
|
||||||
String EXCLUDED_STATUS_PARAM = "excluded";
|
String EXCLUDED_STATUS_PARAM = "excluded";
|
||||||
String FORCE_PARAM = "force";
|
String FORCE_PARAM = "force";
|
||||||
|
String ALL_PAGES = "allPages";
|
||||||
|
|
||||||
|
|
||||||
@PostMapping(value = REANALYSIS_REST_PATH + DOSSIER_ID_PATH_VARIABLE)
|
@PostMapping(value = REANALYSIS_REST_PATH + DOSSIER_ID_PATH_VARIABLE)
|
||||||
@ -73,7 +74,8 @@ public interface ReanalysisResource {
|
|||||||
@ApiResponses(value = {@ApiResponse(responseCode = "204", description = "OK"), @ApiResponse(responseCode = "409", description = "Conflict"), @ApiResponse(responseCode = "404", description = "Not found"), @ApiResponse(responseCode = "403", description = "Forbidden"), @ApiResponse(responseCode = "400", description = "Cannot OCR approved file")})
|
@ApiResponses(value = {@ApiResponse(responseCode = "204", description = "OK"), @ApiResponse(responseCode = "409", description = "Conflict"), @ApiResponse(responseCode = "404", description = "Not found"), @ApiResponse(responseCode = "403", description = "Forbidden"), @ApiResponse(responseCode = "400", description = "Cannot OCR approved file")})
|
||||||
void ocrFile(@PathVariable(DOSSIER_ID) String dossierId,
|
void ocrFile(@PathVariable(DOSSIER_ID) String dossierId,
|
||||||
@PathVariable(FILE_ID) String fileId,
|
@PathVariable(FILE_ID) String fileId,
|
||||||
@RequestParam(value = FORCE_PARAM, required = false, defaultValue = FALSE) boolean force);
|
@RequestParam(value = FORCE_PARAM, required = false, defaultValue = FALSE) boolean force,
|
||||||
|
@RequestParam(value = ALL_PAGES, required = false, defaultValue = FALSE) boolean allPages);
|
||||||
|
|
||||||
|
|
||||||
@Operation(summary = "Ocr and reanalyze multiple files for a dossier", description = "None")
|
@Operation(summary = "Ocr and reanalyze multiple files for a dossier", description = "None")
|
||||||
|
|||||||
@ -57,7 +57,7 @@ public class AdminInterfaceController {
|
|||||||
|
|
||||||
fileStatusService.validateFileIsNotDeletedAndNotApproved(fileId);
|
fileStatusService.validateFileIsNotDeletedAndNotApproved(fileId);
|
||||||
|
|
||||||
fileStatusService.setStatusOcrQueued(dossierId, fileId);
|
fileStatusService.setStatusOcrQueued(dossierId, fileId, false);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -91,7 +91,7 @@ public class AdminInterfaceController {
|
|||||||
|
|
||||||
if (!dryRun) {
|
if (!dryRun) {
|
||||||
fileStatusService.validateFileIsNotDeletedAndNotApproved(file.getId());
|
fileStatusService.validateFileIsNotDeletedAndNotApproved(file.getId());
|
||||||
fileStatusService.setStatusOcrQueued(file.getDossierId(), file.getId());
|
fileStatusService.setStatusOcrQueued(file.getDossierId(), file.getId(), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,35 +0,0 @@
|
|||||||
package com.iqser.red.service.persistence.management.v1.processor.entity.migration;
|
|
||||||
|
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.SaasMigrationStatus;
|
|
||||||
|
|
||||||
import jakarta.persistence.*;
|
|
||||||
import lombok.AllArgsConstructor;
|
|
||||||
import lombok.Builder;
|
|
||||||
import lombok.Data;
|
|
||||||
import lombok.NoArgsConstructor;
|
|
||||||
|
|
||||||
@Data
|
|
||||||
@Builder
|
|
||||||
@AllArgsConstructor
|
|
||||||
@NoArgsConstructor
|
|
||||||
@Entity
|
|
||||||
@Table(name = "saas_migration_status")
|
|
||||||
public class SaasMigrationStatusEntity {
|
|
||||||
|
|
||||||
@Id
|
|
||||||
private String fileId;
|
|
||||||
|
|
||||||
@Column
|
|
||||||
private String dossierId;
|
|
||||||
|
|
||||||
@Column
|
|
||||||
@Enumerated(EnumType.STRING)
|
|
||||||
private SaasMigrationStatus status;
|
|
||||||
|
|
||||||
@Column
|
|
||||||
private Integer processingErrorCounter;
|
|
||||||
|
|
||||||
@Column
|
|
||||||
private String errorCause;
|
|
||||||
|
|
||||||
}
|
|
||||||
@ -1,170 +0,0 @@
|
|||||||
package com.iqser.red.service.persistence.management.v1.processor.migration;
|
|
||||||
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.*;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository.CommentRepository;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository.FileRepository;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository.annotationentity.*;
|
|
||||||
import com.knecon.fforesight.databasetenantcommons.providers.utils.MagicConverter;
|
|
||||||
|
|
||||||
import jakarta.transaction.Transactional;
|
|
||||||
import lombok.AccessLevel;
|
|
||||||
import lombok.RequiredArgsConstructor;
|
|
||||||
import lombok.experimental.FieldDefaults;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
|
|
||||||
import org.springframework.stereotype.Service;
|
|
||||||
|
|
||||||
@Slf4j
|
|
||||||
@Service
|
|
||||||
@Transactional
|
|
||||||
@RequiredArgsConstructor
|
|
||||||
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
|
|
||||||
public class SaasAnnotationIdMigrationService {
|
|
||||||
|
|
||||||
ManualRedactionRepository manualRedactionRepository;
|
|
||||||
RemoveRedactionRepository removeRedactionRepository;
|
|
||||||
ForceRedactionRepository forceRedactionRepository;
|
|
||||||
ResizeRedactionRepository resizeRedactionRepository;
|
|
||||||
RecategorizationRepository recategorizationRepository;
|
|
||||||
LegalBasisChangeRepository legalBasisChangeRepository;
|
|
||||||
CommentRepository commentRepository;
|
|
||||||
FileRepository fileRepository;
|
|
||||||
|
|
||||||
|
|
||||||
public int updateManualAddRedaction(AnnotationEntityId oldAnnotationEntityId, AnnotationEntityId newAnnotationEntityId) {
|
|
||||||
|
|
||||||
if (oldAnnotationEntityId.equals(newAnnotationEntityId)) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
var oldEntry = manualRedactionRepository.findById(oldAnnotationEntityId);
|
|
||||||
if (oldEntry.isPresent()) {
|
|
||||||
|
|
||||||
var newEntry = MagicConverter.convert(oldEntry.get(), ManualRedactionEntryEntity.class);
|
|
||||||
newEntry.setPositions(MagicConverter.convert(oldEntry.get().getPositions(), RectangleEntity.class));
|
|
||||||
newEntry.setFileStatus(fileRepository.findById(oldAnnotationEntityId.getFileId())
|
|
||||||
.get());
|
|
||||||
newEntry.setId(newAnnotationEntityId);
|
|
||||||
|
|
||||||
manualRedactionRepository.deleteById(oldAnnotationEntityId);
|
|
||||||
manualRedactionRepository.save(newEntry);
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public int updateRemoveRedaction(AnnotationEntityId oldAnnotationEntityId, AnnotationEntityId newAnnotationEntityId) {
|
|
||||||
|
|
||||||
if (oldAnnotationEntityId.equals(newAnnotationEntityId)) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
var oldEntry = removeRedactionRepository.findById(oldAnnotationEntityId);
|
|
||||||
if (oldEntry.isPresent()) {
|
|
||||||
|
|
||||||
var newEntry = MagicConverter.convert(oldEntry.get(), IdRemovalEntity.class);
|
|
||||||
newEntry.setFileStatus(fileRepository.findById(oldAnnotationEntityId.getFileId())
|
|
||||||
.get());
|
|
||||||
newEntry.setId(newAnnotationEntityId);
|
|
||||||
|
|
||||||
removeRedactionRepository.deleteById(oldAnnotationEntityId);
|
|
||||||
removeRedactionRepository.save(newEntry);
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public int updateForceRedaction(AnnotationEntityId oldAnnotationEntityId, AnnotationEntityId newAnnotationEntityId) {
|
|
||||||
|
|
||||||
if (oldAnnotationEntityId.equals(newAnnotationEntityId)) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
var oldEntry = forceRedactionRepository.findById(oldAnnotationEntityId);
|
|
||||||
if (oldEntry.isPresent()) {
|
|
||||||
|
|
||||||
var newEntry = MagicConverter.convert(oldEntry.get(), ManualForceRedactionEntity.class);
|
|
||||||
newEntry.setFileStatus(fileRepository.findById(oldAnnotationEntityId.getFileId())
|
|
||||||
.get());
|
|
||||||
newEntry.setId(newAnnotationEntityId);
|
|
||||||
|
|
||||||
forceRedactionRepository.deleteById(oldAnnotationEntityId);
|
|
||||||
forceRedactionRepository.save(newEntry);
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public int updateResizeRedaction(AnnotationEntityId oldAnnotationEntityId, AnnotationEntityId newAnnotationEntityId) {
|
|
||||||
|
|
||||||
if (oldAnnotationEntityId.equals(newAnnotationEntityId)) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
var oldEntry = resizeRedactionRepository.findById(oldAnnotationEntityId);
|
|
||||||
if (oldEntry.isPresent()) {
|
|
||||||
|
|
||||||
var newEntry = MagicConverter.convert(oldEntry.get(), ManualResizeRedactionEntity.class);
|
|
||||||
newEntry.setId(newAnnotationEntityId);
|
|
||||||
newEntry.setPositions(MagicConverter.convert(oldEntry.get().getPositions(), RectangleEntity.class));
|
|
||||||
newEntry.setFileStatus(fileRepository.findById(oldAnnotationEntityId.getFileId())
|
|
||||||
.get());
|
|
||||||
|
|
||||||
resizeRedactionRepository.deleteById(oldAnnotationEntityId);
|
|
||||||
resizeRedactionRepository.save(newEntry);
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public int updateRecategorizationRedaction(AnnotationEntityId oldAnnotationEntityId, AnnotationEntityId newAnnotationEntityId) {
|
|
||||||
|
|
||||||
if (oldAnnotationEntityId.equals(newAnnotationEntityId)) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
var oldEntry = recategorizationRepository.findById(oldAnnotationEntityId);
|
|
||||||
if (oldEntry.isPresent()) {
|
|
||||||
|
|
||||||
var newEntry = MagicConverter.convert(oldEntry.get(), ManualRecategorizationEntity.class);
|
|
||||||
newEntry.setId(newAnnotationEntityId);
|
|
||||||
newEntry.setFileStatus(fileRepository.findById(oldAnnotationEntityId.getFileId())
|
|
||||||
.get());
|
|
||||||
|
|
||||||
recategorizationRepository.deleteById(oldAnnotationEntityId);
|
|
||||||
recategorizationRepository.save(newEntry);
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public int updateLegalBasisChangeRedaction(AnnotationEntityId oldAnnotationEntityId, AnnotationEntityId newAnnotationEntityId) {
|
|
||||||
|
|
||||||
if (oldAnnotationEntityId.equals(newAnnotationEntityId)) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
var oldEntry = legalBasisChangeRepository.findById(oldAnnotationEntityId);
|
|
||||||
if (oldEntry.isPresent()) {
|
|
||||||
|
|
||||||
var newEntry = MagicConverter.convert(oldEntry.get(), ManualLegalBasisChangeEntity.class);
|
|
||||||
newEntry.setId(newAnnotationEntityId);
|
|
||||||
newEntry.setFileStatus(fileRepository.findById(oldAnnotationEntityId.getFileId())
|
|
||||||
.get());
|
|
||||||
|
|
||||||
legalBasisChangeRepository.deleteById(oldAnnotationEntityId);
|
|
||||||
legalBasisChangeRepository.save(newEntry);
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public int updateCommentIds(String fileId, String key, String value) {
|
|
||||||
|
|
||||||
if (key.equals(value)) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return commentRepository.saasMigrationUpdateAnnotationIds(fileId, key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@ -1,83 +0,0 @@
|
|||||||
package com.iqser.red.service.persistence.management.v1.processor.migration;
|
|
||||||
|
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
|
||||||
|
|
||||||
import org.springframework.stereotype.Service;
|
|
||||||
|
|
||||||
import com.google.common.hash.HashFunction;
|
|
||||||
import com.google.common.hash.Hashing;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.AnnotationEntityId;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.ManualRedactionEntryEntity;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.model.ManualChangesQueryOptions;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.annotations.AddRedactionPersistenceService;
|
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.type.DictionaryEntryType;
|
|
||||||
|
|
||||||
import lombok.RequiredArgsConstructor;
|
|
||||||
|
|
||||||
@Service
|
|
||||||
@RequiredArgsConstructor
|
|
||||||
public class SaasMigrationManualChangesUpdateService {
|
|
||||||
|
|
||||||
private final AddRedactionPersistenceService addRedactionPersistenceService;
|
|
||||||
|
|
||||||
private final HashFunction hashFunction = Hashing.murmur3_128();
|
|
||||||
|
|
||||||
|
|
||||||
public void convertUnprocessedAddToDictionariesToLocalChanges(String fileId) {
|
|
||||||
|
|
||||||
var unprocessedManualAdds = addRedactionPersistenceService.findEntriesByFileIdAndOptions(fileId, ManualChangesQueryOptions.unprocessedOnly());
|
|
||||||
for (var unprocessedManualAdd : unprocessedManualAdds) {
|
|
||||||
|
|
||||||
if (!unprocessedManualAdd.getDictionaryEntryType().equals(DictionaryEntryType.ENTRY)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (unprocessedManualAdd.isAddToDictionary() || unprocessedManualAdd.isAddToAllDossiers()) {
|
|
||||||
// copy pending dict change to a new one with a different id. Can't reuse the same one, as it's the primary key of the table.
|
|
||||||
// It has no functionality, its only there, such that the UI can show a pending change.
|
|
||||||
ManualRedactionEntryEntity pendingDictAdd = new ManualRedactionEntryEntity(buildSecondaryId(unprocessedManualAdd.getId(), fileId),
|
|
||||||
unprocessedManualAdd.getUser(),
|
|
||||||
unprocessedManualAdd.getTypeId(),
|
|
||||||
unprocessedManualAdd.getValue(),
|
|
||||||
unprocessedManualAdd.getReason(),
|
|
||||||
unprocessedManualAdd.getLegalBasis(),
|
|
||||||
unprocessedManualAdd.getSection(),
|
|
||||||
unprocessedManualAdd.isRectangle(),
|
|
||||||
unprocessedManualAdd.isAddToDictionary(),
|
|
||||||
unprocessedManualAdd.isAddToAllDossiers(),
|
|
||||||
unprocessedManualAdd.isAddToDossierDictionary(),
|
|
||||||
DictionaryEntryType.ENTRY,
|
|
||||||
unprocessedManualAdd.getRequestDate(),
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
new ArrayList<>(unprocessedManualAdd.getPositions()),
|
|
||||||
unprocessedManualAdd.getFileStatus(),
|
|
||||||
unprocessedManualAdd.getTextBefore(),
|
|
||||||
unprocessedManualAdd.getTextAfter(),
|
|
||||||
unprocessedManualAdd.getSourceId(),
|
|
||||||
new HashSet<>(unprocessedManualAdd.getTypeIdsOfModifiedDictionaries()));
|
|
||||||
|
|
||||||
addRedactionPersistenceService.update(pendingDictAdd);
|
|
||||||
|
|
||||||
// change existing dict add to unprocessed manual add. ID must match with prior entry, such that other unprocessed manual changes may be applied to it.
|
|
||||||
unprocessedManualAdd.setAddToDictionary(false);
|
|
||||||
unprocessedManualAdd.setAddToAllDossiers(false);
|
|
||||||
unprocessedManualAdd.setLegalBasis("");
|
|
||||||
unprocessedManualAdd.setTypeIdsOfModifiedDictionaries(Collections.emptySet());
|
|
||||||
unprocessedManualAdd.setDictionaryEntryType(null);
|
|
||||||
|
|
||||||
addRedactionPersistenceService.update(unprocessedManualAdd);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private AnnotationEntityId buildSecondaryId(AnnotationEntityId annotationEntityId, String fileId) {
|
|
||||||
|
|
||||||
return new AnnotationEntityId(hashFunction.hashString(annotationEntityId.getAnnotationId(), StandardCharsets.UTF_8).toString(), fileId);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@ -1,396 +0,0 @@
|
|||||||
package com.iqser.red.service.persistence.management.v1.processor.migration;
|
|
||||||
|
|
||||||
import static com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration.MIGRATION_REQUEST_QUEUE;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
|
||||||
|
|
||||||
import org.springframework.amqp.rabbit.core.RabbitTemplate;
|
|
||||||
import org.springframework.stereotype.Service;
|
|
||||||
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.AnnotationEntityId;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.exception.InternalServerErrorException;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.model.ManualChangesQueryOptions;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.CommentService;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.DossierService;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.IndexingService;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.job.AutomaticAnalysisJob;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.layoutparsing.LayoutParsingRequestFactory;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.manualredactions.ManualRedactionProviderService;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.manualredactions.ManualRedactionService;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.FileStatusPersistenceService;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.SaasMigrationStatusPersistenceService;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.settings.FileManagementServiceSettings;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.utils.StorageIdUtils;
|
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.migration.MigratedIds;
|
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.entitymapped.ManualRedactionEntry;
|
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
|
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.SaasMigrationStatus;
|
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
|
|
||||||
import com.iqser.red.service.redaction.v1.model.MigrationRequest;
|
|
||||||
import com.iqser.red.storage.commons.exception.StorageException;
|
|
||||||
import com.iqser.red.storage.commons.exception.StorageObjectDoesNotExist;
|
|
||||||
import com.iqser.red.storage.commons.service.StorageService;
|
|
||||||
import com.knecon.fforesight.databasetenantcommons.providers.TenantSyncService;
|
|
||||||
import com.knecon.fforesight.service.layoutparser.internal.api.queue.LayoutParsingQueueNames;
|
|
||||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
|
||||||
import com.knecon.fforesight.tenantcommons.TenantProvider;
|
|
||||||
import com.knecon.fforesight.tenantcommons.model.TenantSyncEvent;
|
|
||||||
import com.knecon.fforesight.tenantcommons.model.UpdateDetailsRequest;
|
|
||||||
|
|
||||||
import lombok.AccessLevel;
|
|
||||||
import lombok.RequiredArgsConstructor;
|
|
||||||
import lombok.experimental.FieldDefaults;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
|
|
||||||
@Slf4j
|
|
||||||
@Service
|
|
||||||
@RequiredArgsConstructor
|
|
||||||
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
|
|
||||||
public class SaasMigrationService implements TenantSyncService {
|
|
||||||
|
|
||||||
AutomaticAnalysisJob automaticAnalysisJob;
|
|
||||||
FileStatusPersistenceService fileStatusPersistenceService;
|
|
||||||
SaasMigrationStatusPersistenceService saasMigrationStatusPersistenceService;
|
|
||||||
DossierService dossierService;
|
|
||||||
ManualRedactionProviderService manualRedactionProviderService;
|
|
||||||
TenantProvider tenantProvider;
|
|
||||||
IndexingService indexingService;
|
|
||||||
LayoutParsingRequestFactory layoutParsingRequestFactory;
|
|
||||||
RabbitTemplate rabbitTemplate;
|
|
||||||
FileManagementServiceSettings settings;
|
|
||||||
StorageService storageService;
|
|
||||||
SaasAnnotationIdMigrationService saasAnnotationIdMigrationService;
|
|
||||||
UncompressedFilesMigrationService uncompressedFilesMigrationService;
|
|
||||||
ManualRedactionService manualRedactionService;
|
|
||||||
CommentService commentService;
|
|
||||||
RankDeDuplicationService rankDeDuplicationService;
|
|
||||||
SaasMigrationManualChangesUpdateService saasMigrationManualChangesUpdateService;
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized void syncTenant(TenantSyncEvent tenantSyncEvent) {
|
|
||||||
|
|
||||||
startMigrationForTenant(tenantSyncEvent.getTenantId());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Persistence-Service needs to be scaled to 1.
|
|
||||||
|
|
||||||
|
|
||||||
public void startMigrationForTenant(String tenantId) {
|
|
||||||
|
|
||||||
// TODO migrate rules.
|
|
||||||
automaticAnalysisJob.stopForTenant(tenantId);
|
|
||||||
|
|
||||||
log.info("Starting uncompressed files migration ...");
|
|
||||||
uncompressedFilesMigrationService.migrateUncompressedFiles(tenantId);
|
|
||||||
log.info("Finished uncompressed files migration ...");
|
|
||||||
|
|
||||||
rankDeDuplicationService.deduplicate();
|
|
||||||
int numberOfFiles = 0;
|
|
||||||
|
|
||||||
var files = saasMigrationStatusPersistenceService.findAll();
|
|
||||||
|
|
||||||
for (var file : files) {
|
|
||||||
|
|
||||||
var dossier = dossierService.getDossierById(file.getDossierId());
|
|
||||||
|
|
||||||
if (dossier.getHardDeletedTime() != null) {
|
|
||||||
if (fileStatusPersistenceService.getStatus(file.getFileId()).getHardDeletedTime() != null) {
|
|
||||||
saasMigrationStatusPersistenceService.updateStatus(file.getFileId(), SaasMigrationStatus.FINISHED);
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
fileStatusPersistenceService.hardDelete(file.getFileId(), dossier.getHardDeletedTime());
|
|
||||||
saasMigrationStatusPersistenceService.updateStatus(file.getFileId(), SaasMigrationStatus.FINISHED);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (fileStatusPersistenceService.getStatus(file.getFileId()).getHardDeletedTime() != null) {
|
|
||||||
saasMigrationStatusPersistenceService.updateStatus(file.getFileId(), SaasMigrationStatus.FINISHED);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!file.getStatus().equals(SaasMigrationStatus.MIGRATION_REQUIRED)) {
|
|
||||||
log.info("Skipping {} for tenant {} since migration status is {}", file.getFileId(), TenantContext.getTenantId(), file.getStatus());
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// delete NER_ENTITIES since offsets depend on old document structure.
|
|
||||||
storageService.deleteObject(TenantContext.getTenantId(), StorageIdUtils.getStorageId(file.getDossierId(), file.getFileId(), FileType.NER_ENTITIES));
|
|
||||||
|
|
||||||
var layoutParsingRequest = layoutParsingRequestFactory.build(dossier.getDossierTemplate().getId(), file.getDossierId(), file.getFileId(), false);
|
|
||||||
|
|
||||||
rabbitTemplate.convertAndSend(LayoutParsingQueueNames.LAYOUT_PARSING_REQUEST_EXCHANGE, TenantContext.getTenantId(), layoutParsingRequest);
|
|
||||||
|
|
||||||
numberOfFiles++;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info("Added {} documents for tenant {} to Layout-Parsing queue for saas migration", numberOfFiles, TenantContext.getTenantId());
|
|
||||||
if (numberOfFiles == 0) {
|
|
||||||
finalizeMigration();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public void startMigrationForFile(String dossierId, String fileId) {
|
|
||||||
|
|
||||||
var dossier = dossierService.getDossierById(dossierId);
|
|
||||||
|
|
||||||
if (dossier.getHardDeletedTime() != null) {
|
|
||||||
if (fileStatusPersistenceService.getStatus(fileId).getHardDeletedTime() != null) {
|
|
||||||
saasMigrationStatusPersistenceService.updateStatus(fileId, SaasMigrationStatus.FINISHED);
|
|
||||||
return;
|
|
||||||
} else {
|
|
||||||
fileStatusPersistenceService.hardDelete(fileId, dossier.getHardDeletedTime());
|
|
||||||
saasMigrationStatusPersistenceService.updateStatus(fileId, SaasMigrationStatus.FINISHED);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (fileStatusPersistenceService.getStatus(fileId).getHardDeletedTime() != null) {
|
|
||||||
saasMigrationStatusPersistenceService.updateStatus(fileId, SaasMigrationStatus.FINISHED);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info("Starting Migration for dossierId {} and fileId {}", dossierId, fileId);
|
|
||||||
saasMigrationStatusPersistenceService.createMigrationRequiredStatus(dossierId, fileId);
|
|
||||||
var layoutParsingRequest = layoutParsingRequestFactory.build(dossier.getDossierTemplate().getId(), dossierId, fileId, false);
|
|
||||||
rabbitTemplate.convertAndSend(LayoutParsingQueueNames.LAYOUT_PARSING_REQUEST_EXCHANGE, TenantContext.getTenantId(), layoutParsingRequest);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public void handleLayoutParsingFinished(String dossierId, String fileId) {
|
|
||||||
|
|
||||||
if (!layoutParsingFilesExist(dossierId, fileId)) {
|
|
||||||
saasMigrationStatusPersistenceService.updateErrorStatus(fileId, "Layout parsing files not written!");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info("Layout Parsing finished for saas migration for tenant {} dossier {} and file {}", TenantContext.getTenantId(), dossierId, fileId);
|
|
||||||
saasMigrationStatusPersistenceService.updateStatus(fileId, SaasMigrationStatus.DOCUMENT_FILES_MIGRATED);
|
|
||||||
|
|
||||||
if (fileStatusPersistenceService.getStatus(fileId).getWorkflowStatus().equals(WorkflowStatus.APPROVED)) {
|
|
||||||
saasMigrationManualChangesUpdateService.convertUnprocessedAddToDictionariesToLocalChanges(fileId);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
indexingService.reindex(dossierId, Set.of(fileId), false);
|
|
||||||
|
|
||||||
String dossierTemplateId = dossierService.getDossierById(dossierId).getDossierTemplateId();
|
|
||||||
|
|
||||||
rabbitTemplate.convertAndSend(MIGRATION_REQUEST_QUEUE,
|
|
||||||
MigrationRequest.builder()
|
|
||||||
.dossierTemplateId(dossierTemplateId)
|
|
||||||
.dossierId(dossierId)
|
|
||||||
.fileId(fileId)
|
|
||||||
.fileIsApproved(fileStatusPersistenceService.getStatus(fileId).getWorkflowStatus().equals(WorkflowStatus.APPROVED))
|
|
||||||
.manualRedactions(manualRedactionProviderService.getManualRedactions(fileId, ManualChangesQueryOptions.allWithoutDeleted()))
|
|
||||||
.entitiesWithComments(commentService.getCommentCounts(fileId).keySet())
|
|
||||||
.build());
|
|
||||||
} catch (Exception e) {
|
|
||||||
log.error("Queuing of entityLog migration failed with {}", e.getMessage());
|
|
||||||
saasMigrationStatusPersistenceService.updateErrorStatus(fileId, String.format("Queuing of entityLog migration failed with %s", e.getMessage()));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private boolean layoutParsingFilesExist(String dossierId, String fileId) {
|
|
||||||
|
|
||||||
return storageService.objectExists(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.DOCUMENT_STRUCTURE)) //
|
|
||||||
&& storageService.objectExists(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.DOCUMENT_TEXT)) //
|
|
||||||
&& storageService.objectExists(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.DOCUMENT_PAGES)) //
|
|
||||||
&& storageService.objectExists(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.DOCUMENT_POSITION));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public void handleEntityLogMigrationFinished(String dossierId, String fileId) {
|
|
||||||
|
|
||||||
if (!entityLogMigrationFilesExist(dossierId, fileId)) {
|
|
||||||
saasMigrationStatusPersistenceService.updateErrorStatus(fileId, "Migration Files not written!");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
saasMigrationStatusPersistenceService.updateStatus(fileId, SaasMigrationStatus.REDACTION_LOGS_MIGRATED);
|
|
||||||
|
|
||||||
log.info("EntityLog migration finished for saas migration for tenant {} dossier {} and file {}", TenantContext.getTenantId(), dossierId, fileId);
|
|
||||||
migrateAnnotationIdsAndAddManualAddRedactionsAndDeleteSectionGrid(dossierId, fileId);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private boolean entityLogMigrationFilesExist(String dossierId, String fileId) {
|
|
||||||
|
|
||||||
return storageService.objectExists(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.ENTITY_LOG)) && storageService.objectExists(
|
|
||||||
TenantContext.getTenantId(),
|
|
||||||
StorageIdUtils.getStorageId(dossierId, fileId, FileType.MIGRATED_IDS));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public void handleError(String dossierId, String fileId, String errorCause, String retryExchange) {
|
|
||||||
|
|
||||||
var migrationEntry = saasMigrationStatusPersistenceService.findById(fileId);
|
|
||||||
Integer numErrors = migrationEntry.getProcessingErrorCounter();
|
|
||||||
if (numErrors != null && numErrors <= settings.getMaxErrorRetries()) {
|
|
||||||
saasMigrationStatusPersistenceService.updateErrorCounter(fileId, numErrors + 1, errorCause);
|
|
||||||
rabbitTemplate.convertAndSend(retryExchange, TenantContext.getTenantId(), MigrationRequest.builder().dossierId(dossierId).fileId(fileId).build());
|
|
||||||
log.error("Retrying error during saas migration for tenant {} dossier {} and file {}, cause {}", TenantContext.getTenantId(), dossierId, fileId, errorCause);
|
|
||||||
} else {
|
|
||||||
saasMigrationStatusPersistenceService.updateErrorStatus(fileId, errorCause);
|
|
||||||
log.error("Error during saas migration for tenant {} dossier {} and file {}, cause {}", TenantContext.getTenantId(), dossierId, fileId, errorCause);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public void requeueErrorFiles() {
|
|
||||||
|
|
||||||
automaticAnalysisJob.stopForTenant(TenantContext.getTenantId());
|
|
||||||
saasMigrationStatusPersistenceService.findAllByStatus(SaasMigrationStatus.ERROR)
|
|
||||||
.forEach(migrationStatus -> startMigrationForFile(migrationStatus.getDossierId(), migrationStatus.getFileId()));
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private void migrateAnnotationIdsAndAddManualAddRedactionsAndDeleteSectionGrid(String dossierId, String fileId) {
|
|
||||||
|
|
||||||
MigratedIds migratedIds = getMigratedIds(dossierId, fileId);
|
|
||||||
Map<String, String> oldToNewMapping = migratedIds.buildOldToNewMapping();
|
|
||||||
updateAnnotationIds(dossierId, fileId, oldToNewMapping);
|
|
||||||
List<String> forceRedactionIdsToDelete = migratedIds.getForceRedactionIdsToDelete();
|
|
||||||
softDeleteForceRedactions(fileId, forceRedactionIdsToDelete);
|
|
||||||
log.info("Soft-deleted force redactions.");
|
|
||||||
List<ManualRedactionEntry> manualRedactionEntriesToAdd = migratedIds.getManualRedactionEntriesToAdd();
|
|
||||||
int count = addManualRedactionEntries(manualRedactionEntriesToAdd);
|
|
||||||
log.info("Added {} additional manual entries.", count);
|
|
||||||
deleteSectionGridAndNerEntitiesFiles(dossierId, fileId);
|
|
||||||
saasMigrationStatusPersistenceService.updateStatus(fileId, SaasMigrationStatus.FINISHED);
|
|
||||||
|
|
||||||
log.info("AnnotationIds migration finished for saas migration for tenant {} dossier {} and file {}", TenantContext.getTenantId(), dossierId, fileId);
|
|
||||||
finalizeMigration(); // AutomaticAnalysisJob should be re-enabled by re-starting the persistence service pod after a rule change
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private void deleteSectionGridAndNerEntitiesFiles(String dossierId, String fileId) {
|
|
||||||
|
|
||||||
try {
|
|
||||||
storageService.deleteObject(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.SECTION_GRID));
|
|
||||||
} catch (StorageObjectDoesNotExist e) {
|
|
||||||
log.info("No sectiongrid found for {}, {}, ignoring....", dossierId, fileId);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
storageService.deleteObject(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.NER_ENTITIES));
|
|
||||||
} catch (StorageObjectDoesNotExist e) {
|
|
||||||
log.info("No ner entities file found for {}, {}, ignoring....", dossierId, fileId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private void softDeleteForceRedactions(String fileId, List<String> forceRedactionIdsToDelete) {
|
|
||||||
|
|
||||||
manualRedactionService.softDeleteForceRedactions(fileId, forceRedactionIdsToDelete);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private int addManualRedactionEntries(List<ManualRedactionEntry> manualRedactionEntriesToAdd) {
|
|
||||||
|
|
||||||
manualRedactionEntriesToAdd.forEach(add -> {
|
|
||||||
if (add.getSection() != null && add.getSection().length() > 254) {
|
|
||||||
add.setSection(add.getSection().substring(0, 254));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return manualRedactionService.addManualRedactionEntries(manualRedactionEntriesToAdd, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public void revertMigrationForFile(String dossierId, String fileId) {
|
|
||||||
|
|
||||||
log.info("Reverting Migration for dossierId {} and fileId {}", dossierId, fileId);
|
|
||||||
MigratedIds migratedIds = getMigratedIds(dossierId, fileId);
|
|
||||||
Map<String, String> newToOldMapping = migratedIds.buildNewToOldMapping();
|
|
||||||
updateAnnotationIds(dossierId, fileId, newToOldMapping);
|
|
||||||
deleteManualRedactionEntries(migratedIds.getManualRedactionEntriesToAdd());
|
|
||||||
undeleteForceRedactions(fileId, migratedIds.getForceRedactionIdsToDelete());
|
|
||||||
saasMigrationStatusPersistenceService.createMigrationRequiredStatus(dossierId, fileId);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private void undeleteForceRedactions(String fileId, List<String> forceRedactionIdsToDelete) {
|
|
||||||
|
|
||||||
manualRedactionService.undeleteForceRedactions(fileId, forceRedactionIdsToDelete);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private void deleteManualRedactionEntries(List<ManualRedactionEntry> manualRedactionEntriesToAdd) {
|
|
||||||
|
|
||||||
manualRedactionService.deleteManualRedactionEntries(manualRedactionEntriesToAdd);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private void updateAnnotationIds(String dossierId, String fileId, Map<String, String> idMapping) {
|
|
||||||
|
|
||||||
try {
|
|
||||||
updateAnnotationIds(fileId, idMapping);
|
|
||||||
} catch (Exception e) {
|
|
||||||
String message = String.format("Error during annotation id migration for tenant %s dossier %s and file %s, cause %s",
|
|
||||||
TenantContext.getTenantId(),
|
|
||||||
dossierId,
|
|
||||||
fileId,
|
|
||||||
e.getMessage());
|
|
||||||
saasMigrationStatusPersistenceService.updateErrorStatus(fileId, message);
|
|
||||||
log.error(message);
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private void finalizeMigration() {
|
|
||||||
|
|
||||||
if (saasMigrationStatusPersistenceService.countByStatus(SaasMigrationStatus.FINISHED) == saasMigrationStatusPersistenceService.countAll()) {
|
|
||||||
// automaticAnalysisJob.startForTenant(TenantContext.getTenantId()); // AutomaticAnalysisJob should be re-enabled by re-starting the persistence service pod after a rule change
|
|
||||||
tenantProvider.updateDetails(TenantContext.getTenantId(), UpdateDetailsRequest.builder().key("persistence-service-ready").value(true).build());
|
|
||||||
log.info("Saas migration finished for tenantId {}, re-enabled scheduler", TenantContext.getTenantId());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public void updateAnnotationIds(String fileId, Map<String, String> idMapping) {
|
|
||||||
|
|
||||||
AtomicInteger numUpdates = new AtomicInteger(0);
|
|
||||||
AtomicInteger numCommentUpdates = new AtomicInteger(0);
|
|
||||||
idMapping.forEach((key, value) -> {
|
|
||||||
AnnotationEntityId oldAnnotationEntityId = buildAnnotationId(fileId, key);
|
|
||||||
AnnotationEntityId newAnnotationEntityId = buildAnnotationId(fileId, value);
|
|
||||||
numUpdates.getAndAdd(saasAnnotationIdMigrationService.updateManualAddRedaction(oldAnnotationEntityId, newAnnotationEntityId));
|
|
||||||
numUpdates.getAndAdd(saasAnnotationIdMigrationService.updateRemoveRedaction(oldAnnotationEntityId, newAnnotationEntityId));
|
|
||||||
numUpdates.getAndAdd(saasAnnotationIdMigrationService.updateForceRedaction(oldAnnotationEntityId, newAnnotationEntityId));
|
|
||||||
numUpdates.getAndAdd(saasAnnotationIdMigrationService.updateResizeRedaction(oldAnnotationEntityId, newAnnotationEntityId));
|
|
||||||
numUpdates.getAndAdd(saasAnnotationIdMigrationService.updateRecategorizationRedaction(oldAnnotationEntityId, newAnnotationEntityId));
|
|
||||||
numUpdates.getAndAdd(saasAnnotationIdMigrationService.updateLegalBasisChangeRedaction(oldAnnotationEntityId, newAnnotationEntityId));
|
|
||||||
numCommentUpdates.getAndAdd(saasAnnotationIdMigrationService.updateCommentIds(fileId, key, value));
|
|
||||||
});
|
|
||||||
log.info("Migrated {} annotationIds and {} comments for file {}", numUpdates.get(), numCommentUpdates, fileId);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private AnnotationEntityId buildAnnotationId(String fileId, String annotationId) {
|
|
||||||
|
|
||||||
return new AnnotationEntityId(annotationId, fileId);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private MigratedIds getMigratedIds(String dossierId, String fileId) {
|
|
||||||
|
|
||||||
try {
|
|
||||||
return storageService.readJSONObject(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.MIGRATED_IDS), MigratedIds.class);
|
|
||||||
} catch (StorageObjectDoesNotExist e) {
|
|
||||||
throw new NotFoundException(String.format("MigratedIds does not exist for Dossier ID \"%s\" and File ID \"%s\"!", dossierId, fileId));
|
|
||||||
} catch (StorageException e) {
|
|
||||||
throw new InternalServerErrorException(e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@ -122,7 +122,7 @@ public class FileStatusProcessingUpdateService {
|
|||||||
} else {
|
} else {
|
||||||
fileStatusService.setStatusOcrProcessing(fileId,
|
fileStatusService.setStatusOcrProcessing(fileId,
|
||||||
fileEntity.getProcessingStatus().equals(ProcessingStatus.OCR_PROCESSING) ? fileEntity.getProcessingErrorCounter() + 1 : 0);
|
fileEntity.getProcessingStatus().equals(ProcessingStatus.OCR_PROCESSING) ? fileEntity.getProcessingErrorCounter() + 1 : 0);
|
||||||
fileStatusService.addToOcrQueue(dossierId, fileId, 2);
|
fileStatusService.addToOcrQueue(dossierId, fileId, 2, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -316,7 +316,7 @@ public class FileStatusService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
log.info("Add file: {} from dossier {} to OCR queue", fileId, dossierId);
|
log.info("Add file: {} from dossier {} to OCR queue", fileId, dossierId);
|
||||||
setStatusOcrQueued(dossierId, fileId);
|
setStatusOcrQueued(dossierId, fileId, false);
|
||||||
sendReadOnlyAnalysisEvent(dossierId, fileId, fileEntity);
|
sendReadOnlyAnalysisEvent(dossierId, fileId, fileEntity);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -352,7 +352,6 @@ public class FileStatusService {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
boolean forceAnalysis = false;
|
boolean forceAnalysis = false;
|
||||||
if (settings.isLlmNerServiceEnabled()) {
|
if (settings.isLlmNerServiceEnabled()) {
|
||||||
boolean objectExists = fileManagementStorageService.objectExists(dossierId, fileId, FileType.LLM_NER_ENTITIES);
|
boolean objectExists = fileManagementStorageService.objectExists(dossierId, fileId, FileType.LLM_NER_ENTITIES);
|
||||||
@ -386,7 +385,7 @@ public class FileStatusService {
|
|||||||
|
|
||||||
boolean reanalyse = fileModel.isReanalysisRequired() || analysisType.equals(AnalysisType.MANUAL_REDACTION_REANALYZE);
|
boolean reanalyse = fileModel.isReanalysisRequired() || analysisType.equals(AnalysisType.MANUAL_REDACTION_REANALYZE);
|
||||||
MessageType messageType = calculateMessageType(reanalyse, fileModel.getProcessingStatus(), fileModel);
|
MessageType messageType = calculateMessageType(reanalyse, fileModel.getProcessingStatus(), fileModel);
|
||||||
if(analysisType == AnalysisType.FORCE_ANALYSE || forceAnalysis) {
|
if (analysisType == AnalysisType.FORCE_ANALYSE || forceAnalysis) {
|
||||||
messageType = MessageType.ANALYSE;
|
messageType = MessageType.ANALYSE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -567,7 +566,7 @@ public class FileStatusService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public void setStatusOcrQueued(String dossierId, String fileId) {
|
public void setStatusOcrQueued(String dossierId, String fileId, boolean allPages) {
|
||||||
|
|
||||||
FileEntity fileStatus = fileStatusPersistenceService.getStatus(fileId);
|
FileEntity fileStatus = fileStatusPersistenceService.getStatus(fileId);
|
||||||
|
|
||||||
@ -579,7 +578,7 @@ public class FileStatusService {
|
|||||||
updateOCRStartTime(fileId);
|
updateOCRStartTime(fileId);
|
||||||
fileStatusPersistenceService.updateProcessingStatus(fileId, ProcessingStatus.OCR_PROCESSING_QUEUED);
|
fileStatusPersistenceService.updateProcessingStatus(fileId, ProcessingStatus.OCR_PROCESSING_QUEUED);
|
||||||
websocketService.sendAnalysisEvent(dossierId, fileId, AnalyseStatus.OCR_PROCESSING, fileStatus.getNumberOfAnalyses() + 1);
|
websocketService.sendAnalysisEvent(dossierId, fileId, AnalyseStatus.OCR_PROCESSING, fileStatus.getNumberOfAnalyses() + 1);
|
||||||
addToOcrQueue(dossierId, fileId, 2);
|
addToOcrQueue(dossierId, fileId, 2, allPages);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -760,13 +759,16 @@ public class FileStatusService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public void addToOcrQueue(String dossierId, String fileId, int priority) {
|
public void addToOcrQueue(String dossierId, String fileId, int priority, boolean allPages) {
|
||||||
|
|
||||||
var removeWatermark = dossierTemplatePersistenceService.getDossierTemplate(dossierPersistenceService.getDossierTemplateId(dossierId)).isRemoveWatermark();
|
var removeWatermark = dossierTemplatePersistenceService.getDossierTemplate(dossierPersistenceService.getDossierTemplateId(dossierId)).isRemoveWatermark();
|
||||||
Set<AzureOcrFeature> features = new HashSet<>();
|
Set<AzureOcrFeature> features = new HashSet<>();
|
||||||
if (removeWatermark) {
|
if (removeWatermark) {
|
||||||
features.add(AzureOcrFeature.REMOVE_WATERMARKS);
|
features.add(AzureOcrFeature.REMOVE_WATERMARKS);
|
||||||
}
|
}
|
||||||
|
if (allPages) {
|
||||||
|
features.add(AzureOcrFeature.ALL_PAGES);
|
||||||
|
}
|
||||||
if (currentApplicationTypeProvider.isDocuMine()) {
|
if (currentApplicationTypeProvider.isDocuMine()) {
|
||||||
features.add(AzureOcrFeature.ROTATION_CORRECTION);
|
features.add(AzureOcrFeature.ROTATION_CORRECTION);
|
||||||
features.add(AzureOcrFeature.FONT_STYLE_DETECTION);
|
features.add(AzureOcrFeature.FONT_STYLE_DETECTION);
|
||||||
@ -820,7 +822,7 @@ public class FileStatusService {
|
|||||||
|
|
||||||
fileStatusPersistenceService.updateWorkflowStatus(fileId, newWorkflowStatus, false);
|
fileStatusPersistenceService.updateWorkflowStatus(fileId, newWorkflowStatus, false);
|
||||||
|
|
||||||
if(oldWorkflowStatus == WorkflowStatus.APPROVED && newWorkflowStatus != WorkflowStatus.APPROVED) {
|
if (oldWorkflowStatus == WorkflowStatus.APPROVED && newWorkflowStatus != WorkflowStatus.APPROVED) {
|
||||||
fileStatusPersistenceService.clearLastDownload(fileId);
|
fileStatusPersistenceService.clearLastDownload(fileId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -977,7 +979,7 @@ public class FileStatusService {
|
|||||||
|
|
||||||
if (runOcr) {
|
if (runOcr) {
|
||||||
fileStatusPersistenceService.resetOcrStartAndEndDate(fileId);
|
fileStatusPersistenceService.resetOcrStartAndEndDate(fileId);
|
||||||
setStatusOcrQueued(dossierId, fileId);
|
setStatusOcrQueued(dossierId, fileId, false);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1064,6 +1066,7 @@ public class FileStatusService {
|
|||||||
addToAnalysisQueue(dossierId, fileId, priority, Sets.newHashSet(), AnalysisType.DEFAULT);
|
addToAnalysisQueue(dossierId, fileId, priority, Sets.newHashSet(), AnalysisType.DEFAULT);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public void setStatusForceAnalyse(String dossierId, String fileId, boolean priority) {
|
public void setStatusForceAnalyse(String dossierId, String fileId, boolean priority) {
|
||||||
|
|
||||||
|
|||||||
@ -10,7 +10,6 @@ import com.iqser.red.service.persistence.management.v1.processor.exception.Confl
|
|||||||
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
|
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.DossierPersistenceService;
|
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.DossierPersistenceService;
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.ReanalysisSettings;
|
import com.iqser.red.service.persistence.service.v1.api.shared.model.ReanalysisSettings;
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.imported.ImportedRedactions;
|
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.DeleteImportedRedactionsRequest;
|
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.DeleteImportedRedactionsRequest;
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
|
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
|
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
|
||||||
@ -178,11 +177,11 @@ public class ReanalysisService {
|
|||||||
relevantFiles.stream()
|
relevantFiles.stream()
|
||||||
.filter(fileStatus -> fileStatus.getOcrStartTime() == null)
|
.filter(fileStatus -> fileStatus.getOcrStartTime() == null)
|
||||||
.filter(fileStatus -> fileStatus.getProcessingStatus().equals(ProcessingStatus.PROCESSED))
|
.filter(fileStatus -> fileStatus.getProcessingStatus().equals(ProcessingStatus.PROCESSED))
|
||||||
.forEach(fileStatus -> fileStatusService.setStatusOcrQueued(dossierId, fileStatus.getId()));
|
.forEach(fileStatus -> fileStatusService.setStatusOcrQueued(dossierId, fileStatus.getId(), false));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public void ocrFile(String dossierId, String fileId, boolean force) {
|
public void ocrFile(String dossierId, String fileId, boolean force, boolean allPages) {
|
||||||
|
|
||||||
dossierPersistenceService.getAndValidateDossier(dossierId);
|
dossierPersistenceService.getAndValidateDossier(dossierId);
|
||||||
FileModel dossierFile = fileStatusService.getStatus(fileId);
|
FileModel dossierFile = fileStatusService.getStatus(fileId);
|
||||||
@ -202,18 +201,18 @@ public class ReanalysisService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (force) {
|
if (force) {
|
||||||
fileStatusService.setStatusOcrQueued(dossierId, fileId);
|
fileStatusService.setStatusOcrQueued(dossierId, fileId, allPages);
|
||||||
} else {
|
} else {
|
||||||
if (dossierFile.getOcrStartTime() != null) {
|
if (dossierFile.getOcrStartTime() != null) {
|
||||||
throw new ConflictException("File already has been OCR processed");
|
throw new ConflictException("File already has been OCR processed");
|
||||||
}
|
}
|
||||||
|
|
||||||
ocrFiles(dossierId, Sets.newHashSet(fileId));
|
ocrFiles(dossierId, Sets.newHashSet(fileId), allPages);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public void ocrFiles(String dossierId, Set<String> fileIds) {
|
public void ocrFiles(String dossierId, Set<String> fileIds, boolean allPages) {
|
||||||
|
|
||||||
var relevantFiles = getRelevantFiles(dossierId, fileIds);
|
var relevantFiles = getRelevantFiles(dossierId, fileIds);
|
||||||
|
|
||||||
@ -225,7 +224,7 @@ public class ReanalysisService {
|
|||||||
|
|
||||||
relevantFiles.stream()
|
relevantFiles.stream()
|
||||||
.filter(fileStatus -> fileStatus.getOcrStartTime() == null)
|
.filter(fileStatus -> fileStatus.getOcrStartTime() == null)
|
||||||
.forEach(fileStatus -> fileStatusService.setStatusOcrQueued(dossierId, fileStatus.getId()));
|
.forEach(fileStatus -> fileStatusService.setStatusOcrQueued(dossierId, fileStatus.getId(), allPages));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -14,7 +14,6 @@ import org.springframework.stereotype.Service;
|
|||||||
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration;
|
import com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusService;
|
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusService;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.SaasMigrationStatusPersistenceService;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.settings.FileManagementServiceSettings;
|
import com.iqser.red.service.persistence.management.v1.processor.settings.FileManagementServiceSettings;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.utils.TenantUtils;
|
import com.iqser.red.service.persistence.management.v1.processor.utils.TenantUtils;
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
|
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
|
||||||
@ -38,7 +37,6 @@ public class AutomaticAnalysisJob implements Job {
|
|||||||
private final FileStatusService fileStatusService;
|
private final FileStatusService fileStatusService;
|
||||||
private final TenantProvider tenantProvider;
|
private final TenantProvider tenantProvider;
|
||||||
private final ObservationRegistry observationRegistry;
|
private final ObservationRegistry observationRegistry;
|
||||||
private final SaasMigrationStatusPersistenceService saasMigrationStatusPersistenceService;
|
|
||||||
|
|
||||||
@Setter
|
@Setter
|
||||||
private boolean schedulingStopped;
|
private boolean schedulingStopped;
|
||||||
@ -69,11 +67,6 @@ public class AutomaticAnalysisJob implements Job {
|
|||||||
|
|
||||||
TenantContext.setTenantId(tenant.getTenantId());
|
TenantContext.setTenantId(tenant.getTenantId());
|
||||||
|
|
||||||
if (!saasMigrationStatusPersistenceService.migrationFinishedForTenant()) {
|
|
||||||
log.info("[Tenant:{}] Skipping scheduling as there are files that require migration.", tenant.getTenantId());
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
String queueName = MessagingConfiguration.REDACTION_REQUEST_QUEUE_PREFIX + "_" + tenant.getTenantId();
|
String queueName = MessagingConfiguration.REDACTION_REQUEST_QUEUE_PREFIX + "_" + tenant.getTenantId();
|
||||||
var redactionQueueInfo = amqpAdmin.getQueueInfo(queueName);
|
var redactionQueueInfo = amqpAdmin.getQueueInfo(queueName);
|
||||||
if (redactionQueueInfo != null) {
|
if (redactionQueueInfo != null) {
|
||||||
|
|||||||
@ -1,96 +0,0 @@
|
|||||||
package com.iqser.red.service.persistence.management.v1.processor.service.persistence;
|
|
||||||
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.entity.annotations.AnnotationEntityId;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.entity.migration.SaasMigrationStatusEntity;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository.SaasMigrationStatusRepository;
|
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.SaasMigrationStatus;
|
|
||||||
|
|
||||||
import jakarta.transaction.Transactional;
|
|
||||||
import lombok.RequiredArgsConstructor;
|
|
||||||
|
|
||||||
import org.springframework.stereotype.Service;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Service
|
|
||||||
@RequiredArgsConstructor
|
|
||||||
public class SaasMigrationStatusPersistenceService {
|
|
||||||
|
|
||||||
private final SaasMigrationStatusRepository saasMigrationStatusRepository;
|
|
||||||
|
|
||||||
|
|
||||||
public List<SaasMigrationStatusEntity> findAllByStatus(SaasMigrationStatus status) {
|
|
||||||
|
|
||||||
return saasMigrationStatusRepository.findAllByStatus(status);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public SaasMigrationStatusEntity findById(String fileId) {
|
|
||||||
|
|
||||||
var migrationStatusOptional = saasMigrationStatusRepository.findById(fileId);
|
|
||||||
if (migrationStatusOptional.isPresent()) {
|
|
||||||
return migrationStatusOptional.get();
|
|
||||||
}
|
|
||||||
throw new NotFoundException("No migration entry found for fileId" + fileId);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public boolean isMigrating(String fileId) {
|
|
||||||
|
|
||||||
var migrationStatusOptional = saasMigrationStatusRepository.findById(fileId);
|
|
||||||
return migrationStatusOptional.isPresent() && migrationStatusOptional.get().getStatus() != SaasMigrationStatus.FINISHED;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean migrationFinishedForTenant() {
|
|
||||||
|
|
||||||
return saasMigrationStatusRepository.findAllWhereStatusNotFinishedAndNotError() == 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Transactional
|
|
||||||
public void createMigrationRequiredStatus(String dossierId, String fileId) {
|
|
||||||
|
|
||||||
saasMigrationStatusRepository.save(SaasMigrationStatusEntity.builder().fileId(fileId).dossierId(dossierId).status(SaasMigrationStatus.MIGRATION_REQUIRED).build());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Transactional
|
|
||||||
public void updateStatus(String fileId, SaasMigrationStatus status) {
|
|
||||||
|
|
||||||
saasMigrationStatusRepository.updateStatus(fileId, status);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Transactional
|
|
||||||
public void updateErrorStatus(String fileId, String errorCause) {
|
|
||||||
|
|
||||||
saasMigrationStatusRepository.updateErrorStatus(fileId, SaasMigrationStatus.ERROR, errorCause);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Transactional
|
|
||||||
public void updateErrorCounter(String fileId, Integer processingErrorCounter, String errorCause) {
|
|
||||||
|
|
||||||
saasMigrationStatusRepository.updateErrorCounter(fileId, processingErrorCounter, errorCause);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public int countByStatus(SaasMigrationStatus status) {
|
|
||||||
|
|
||||||
return saasMigrationStatusRepository.countByStatus(status);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public int countAll() {
|
|
||||||
|
|
||||||
return saasMigrationStatusRepository.countAll();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public List<SaasMigrationStatusEntity> findAll() {
|
|
||||||
|
|
||||||
return saasMigrationStatusRepository.findAll();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@ -1,44 +0,0 @@
|
|||||||
package com.iqser.red.service.persistence.management.v1.processor.service.persistence.repository;
|
|
||||||
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.entity.migration.SaasMigrationStatusEntity;
|
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.SaasMigrationStatus;
|
|
||||||
|
|
||||||
import org.springframework.data.jpa.repository.JpaRepository;
|
|
||||||
import org.springframework.data.jpa.repository.Modifying;
|
|
||||||
import org.springframework.data.jpa.repository.Query;
|
|
||||||
import org.springframework.data.repository.query.Param;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
public interface SaasMigrationStatusRepository extends JpaRepository<SaasMigrationStatusEntity, String> {
|
|
||||||
|
|
||||||
List<SaasMigrationStatusEntity> findAllByStatus(SaasMigrationStatus status);
|
|
||||||
|
|
||||||
|
|
||||||
@Modifying
|
|
||||||
@Query("update SaasMigrationStatusEntity e set e.status = :status where e.fileId = :fileId")
|
|
||||||
void updateStatus(@Param("fileId") String fileId, @Param("status") SaasMigrationStatus status);
|
|
||||||
|
|
||||||
|
|
||||||
@Modifying
|
|
||||||
@Query("update SaasMigrationStatusEntity e set e.status = :status, e.errorCause = :errorCause where e.fileId = :fileId")
|
|
||||||
void updateErrorStatus(@Param("fileId") String fileId, @Param("status") SaasMigrationStatus status, @Param("errorCause") String errorCause);
|
|
||||||
|
|
||||||
|
|
||||||
@Modifying
|
|
||||||
@Query("update SaasMigrationStatusEntity e set e.processingErrorCounter = :processingErrorCounter, e.errorCause = :errorCause where e.fileId = :fileId")
|
|
||||||
void updateErrorCounter(@Param("fileId") String fileId, @Param("processingErrorCounter") Integer processingErrorCounter, @Param("errorCause") String errorCause);
|
|
||||||
|
|
||||||
|
|
||||||
@Query("select count(*) from SaasMigrationStatusEntity e where e.status = :status")
|
|
||||||
int countByStatus(@Param("status") SaasMigrationStatus status);
|
|
||||||
|
|
||||||
|
|
||||||
@Query("select count(*) from SaasMigrationStatusEntity")
|
|
||||||
int countAll();
|
|
||||||
|
|
||||||
|
|
||||||
@Query("select count(*) from SaasMigrationStatusEntity e where e.status != 'FINISHED' and e.status != 'ERROR'")
|
|
||||||
int findAllWhereStatusNotFinishedAndNotError();
|
|
||||||
|
|
||||||
}
|
|
||||||
@ -11,14 +11,12 @@ import org.springframework.stereotype.Service;
|
|||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration;
|
import com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.migration.SaasMigrationService;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.model.websocket.AnalyseStatus;
|
import com.iqser.red.service.persistence.management.v1.processor.model.websocket.AnalyseStatus;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusProcessingUpdateService;
|
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusProcessingUpdateService;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusService;
|
import com.iqser.red.service.persistence.management.v1.processor.service.FileStatusService;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.ImageSimilarityService;
|
import com.iqser.red.service.persistence.management.v1.processor.service.ImageSimilarityService;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.websocket.WebsocketService;
|
import com.iqser.red.service.persistence.management.v1.processor.service.websocket.WebsocketService;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.layoutparsing.QueueMessageIdentifierService;
|
import com.iqser.red.service.persistence.management.v1.processor.service.layoutparsing.QueueMessageIdentifierService;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.SaasMigrationStatusPersistenceService;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.utils.StorageIdUtils;
|
import com.iqser.red.service.persistence.management.v1.processor.utils.StorageIdUtils;
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileErrorInfo;
|
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileErrorInfo;
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
|
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
|
||||||
@ -40,8 +38,6 @@ public class LayoutParsingFinishedMessageReceiver {
|
|||||||
private final FileStatusService fileStatusService;
|
private final FileStatusService fileStatusService;
|
||||||
private final FileStatusProcessingUpdateService fileStatusProcessingUpdateService;
|
private final FileStatusProcessingUpdateService fileStatusProcessingUpdateService;
|
||||||
private final ObjectMapper objectMapper;
|
private final ObjectMapper objectMapper;
|
||||||
private final SaasMigrationStatusPersistenceService saasMigrationStatusPersistenceService;
|
|
||||||
private final SaasMigrationService saasMigrationService;
|
|
||||||
private final ImageSimilarityService imageSimilarityService;
|
private final ImageSimilarityService imageSimilarityService;
|
||||||
private final WebsocketService websocketService;
|
private final WebsocketService websocketService;
|
||||||
|
|
||||||
@ -53,11 +49,7 @@ public class LayoutParsingFinishedMessageReceiver {
|
|||||||
var dossierId = QueueMessageIdentifierService.parseDossierId(response.identifier());
|
var dossierId = QueueMessageIdentifierService.parseDossierId(response.identifier());
|
||||||
var fileId = QueueMessageIdentifierService.parseFileId(response.identifier());
|
var fileId = QueueMessageIdentifierService.parseFileId(response.identifier());
|
||||||
log.info("Layout parsing has finished for {}/{} in {}", dossierId, fileId, LayoutParsingQueueNames.LAYOUT_PARSING_RESPONSE_EXCHANGE);
|
log.info("Layout parsing has finished for {}/{} in {}", dossierId, fileId, LayoutParsingQueueNames.LAYOUT_PARSING_RESPONSE_EXCHANGE);
|
||||||
if (saasMigrationStatusPersistenceService.isMigrating(QueueMessageIdentifierService.parseFileId(response.identifier()))) {
|
|
||||||
saasMigrationService.handleLayoutParsingFinished(QueueMessageIdentifierService.parseDossierId(response.identifier()),
|
|
||||||
QueueMessageIdentifierService.parseFileId(response.identifier()));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
fileStatusService.setStatusAnalyse(QueueMessageIdentifierService.parseDossierId(response.identifier()),
|
fileStatusService.setStatusAnalyse(QueueMessageIdentifierService.parseDossierId(response.identifier()),
|
||||||
QueueMessageIdentifierService.parseFileId(response.identifier()),
|
QueueMessageIdentifierService.parseFileId(response.identifier()),
|
||||||
@ -88,13 +80,6 @@ public class LayoutParsingFinishedMessageReceiver {
|
|||||||
if (errorCause == null) {
|
if (errorCause == null) {
|
||||||
errorCause = "Error occured during layout parsing!";
|
errorCause = "Error occured during layout parsing!";
|
||||||
}
|
}
|
||||||
if (saasMigrationStatusPersistenceService.isMigrating(QueueMessageIdentifierService.parseFileId(analyzeRequest.identifier()))) {
|
|
||||||
saasMigrationService.handleError(QueueMessageIdentifierService.parseDossierId(analyzeRequest.identifier()),
|
|
||||||
QueueMessageIdentifierService.parseFileId(analyzeRequest.identifier()),
|
|
||||||
errorCause,
|
|
||||||
LayoutParsingQueueNames.LAYOUT_PARSING_REQUEST_EXCHANGE);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
OffsetDateTime timestamp = failedMessage.getMessageProperties().getHeader(MessagingConfiguration.X_ERROR_INFO_TIMESTAMP_HEADER);
|
OffsetDateTime timestamp = failedMessage.getMessageProperties().getHeader(MessagingConfiguration.X_ERROR_INFO_TIMESTAMP_HEADER);
|
||||||
timestamp = timestamp != null ? timestamp : OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS);
|
timestamp = timestamp != null ? timestamp : OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS);
|
||||||
|
|||||||
@ -1,53 +0,0 @@
|
|||||||
package com.iqser.red.service.persistence.management.v1.processor.service.queue;
|
|
||||||
|
|
||||||
import static com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration.MIGRATION_DLQ;
|
|
||||||
import static com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration.MIGRATION_REQUEST_QUEUE;
|
|
||||||
import static com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration.MIGRATION_RESPONSE_QUEUE;
|
|
||||||
import static com.iqser.red.service.persistence.management.v1.processor.configuration.MessagingConfiguration.X_ERROR_INFO_HEADER;
|
|
||||||
|
|
||||||
import org.springframework.amqp.core.Message;
|
|
||||||
import org.springframework.amqp.rabbit.annotation.RabbitListener;
|
|
||||||
import org.springframework.stereotype.Service;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.migration.SaasMigrationService;
|
|
||||||
import com.iqser.red.service.redaction.v1.model.MigrationRequest;
|
|
||||||
import com.iqser.red.service.redaction.v1.model.MigrationResponse;
|
|
||||||
|
|
||||||
import lombok.RequiredArgsConstructor;
|
|
||||||
import lombok.SneakyThrows;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
|
|
||||||
@Slf4j
|
|
||||||
@Service
|
|
||||||
@RequiredArgsConstructor
|
|
||||||
public class RedactionServiceSaasMigrationMessageReceiver {
|
|
||||||
|
|
||||||
private final SaasMigrationService saasMigrationService;
|
|
||||||
private final ObjectMapper objectMapper;
|
|
||||||
|
|
||||||
|
|
||||||
@SneakyThrows
|
|
||||||
@RabbitListener(queues = MIGRATION_RESPONSE_QUEUE)
|
|
||||||
public void receive(MigrationResponse response) {
|
|
||||||
|
|
||||||
saasMigrationService.handleEntityLogMigrationFinished(response.getDossierId(), response.getFileId());
|
|
||||||
|
|
||||||
log.info("Received message {} in {}", response, MIGRATION_RESPONSE_QUEUE);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@SneakyThrows
|
|
||||||
@RabbitListener(queues = MIGRATION_DLQ)
|
|
||||||
public void handleDLQMessage(Message failedMessage) {
|
|
||||||
|
|
||||||
var migrationRequest = objectMapper.readValue(failedMessage.getBody(), MigrationRequest.class);
|
|
||||||
String errorCause = failedMessage.getMessageProperties().getHeader(X_ERROR_INFO_HEADER);
|
|
||||||
if (errorCause == null) {
|
|
||||||
errorCause = "Error occured during entityLog migration!";
|
|
||||||
}
|
|
||||||
saasMigrationService.handleError(migrationRequest.getDossierId(), migrationRequest.getFileId(), errorCause, MIGRATION_REQUEST_QUEUE);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@ -242,9 +242,11 @@ databaseChangeLog:
|
|||||||
- include:
|
- include:
|
||||||
file: db/changelog/tenant/149-remove-based-on-dict-annotation-id-columns.yaml
|
file: db/changelog/tenant/149-remove-based-on-dict-annotation-id-columns.yaml
|
||||||
- include:
|
- include:
|
||||||
file: db/changelog/tenant/150-add-indexes-across-tables-for-performance.yaml
|
file: db/changelog/tenant/149-add-indexes-across-tables-for-performance.yaml
|
||||||
- include:
|
- include:
|
||||||
file: db/changelog/tenant/151-add-component-mapping-indexes.yaml
|
file: db/changelog/tenant/150-add-component-mapping-indexes.yaml
|
||||||
|
- include:
|
||||||
|
file: db/changelog/tenant/151-drop-saas-migration-table.changelog.yaml
|
||||||
- include:
|
- include:
|
||||||
file: db/changelog/tenant/152-add-ai-fields-to-entity.yaml
|
file: db/changelog/tenant/152-add-ai-fields-to-entity.yaml
|
||||||
- include:
|
- include:
|
||||||
@ -257,3 +259,5 @@ databaseChangeLog:
|
|||||||
file: db/changelog/tenant/157-add-included-to-csv-export-field.yaml
|
file: db/changelog/tenant/157-add-included-to-csv-export-field.yaml
|
||||||
- include:
|
- include:
|
||||||
file: db/changelog/tenant/158-add-app-version-history-table-and-layout-parser-version-field-to-file.yaml
|
file: db/changelog/tenant/158-add-app-version-history-table-and-layout-parser-version-field-to-file.yaml
|
||||||
|
- include:
|
||||||
|
file: db/changelog/tenant/159-cleanup-truncated-indices.yaml
|
||||||
|
|||||||
@ -0,0 +1,12 @@
|
|||||||
|
databaseChangeLog:
|
||||||
|
- changeSet:
|
||||||
|
id: drop-saas_migration_status-if-exists
|
||||||
|
author: dom
|
||||||
|
comment: drop saas_migration_status if exists
|
||||||
|
preConditions:
|
||||||
|
- onFail: MARK_RAN
|
||||||
|
- tableExists:
|
||||||
|
tableName: saas_migration_status
|
||||||
|
changes:
|
||||||
|
- dropTable:
|
||||||
|
tableName: saas_migration_status
|
||||||
@ -0,0 +1,68 @@
|
|||||||
|
databaseChangeLog:
|
||||||
|
- changeSet:
|
||||||
|
id: drop_truncated_idx_dossier_dossier_template_id_soft_deleted_time_hard_deleted_time
|
||||||
|
author: maverick
|
||||||
|
preConditions:
|
||||||
|
- onFail: MARK_RAN
|
||||||
|
- indexExists:
|
||||||
|
indexName: idx_dossier_dossier_template_id_soft_deleted_time_hard_deleted_
|
||||||
|
tableName: dossier
|
||||||
|
changes:
|
||||||
|
- dropIndex:
|
||||||
|
indexName: idx_dossier_dossier_template_id_soft_deleted_time_hard_deleted_
|
||||||
|
tableName: dossier
|
||||||
|
|
||||||
|
- changeSet:
|
||||||
|
id: create_idx_dossiertemplid_softdeltime_harddeltime_archivtime
|
||||||
|
author: maverick
|
||||||
|
preConditions:
|
||||||
|
- onFail: MARK_RAN
|
||||||
|
- not:
|
||||||
|
indexExists:
|
||||||
|
indexName: idx_dossiertemplid_softdeltime_harddeltime_archivtime
|
||||||
|
tableName: dossier
|
||||||
|
changes:
|
||||||
|
- createIndex:
|
||||||
|
tableName: dossier
|
||||||
|
indexName: idx_dossiertemplid_softdeltime_harddeltime_archivtime
|
||||||
|
columns:
|
||||||
|
- column:
|
||||||
|
name: dossier_template_id
|
||||||
|
- column:
|
||||||
|
name: soft_deleted_time
|
||||||
|
- column:
|
||||||
|
name: hard_deleted_time
|
||||||
|
- column:
|
||||||
|
name: archived_time
|
||||||
|
|
||||||
|
- changeSet:
|
||||||
|
id: drop_truncated_idx_notification_preference_user_id_in_app_notifications_enabled
|
||||||
|
author: maverick
|
||||||
|
preConditions:
|
||||||
|
- onFail: MARK_RAN
|
||||||
|
- indexExists:
|
||||||
|
indexName: idx_notification_preference_user_id_in_app_notifications_enable
|
||||||
|
tableName: notification_preference
|
||||||
|
changes:
|
||||||
|
- dropIndex:
|
||||||
|
indexName: idx_notification_preference_user_id_in_app_notifications_enable
|
||||||
|
tableName: notification_preference
|
||||||
|
|
||||||
|
- changeSet:
|
||||||
|
id: create_idx_notify_pref_userid_app_notif_enabled
|
||||||
|
author: maverick
|
||||||
|
preConditions:
|
||||||
|
- onFail: MARK_RAN
|
||||||
|
- not:
|
||||||
|
indexExists:
|
||||||
|
indexName: idx_notify_pref_userid_app_notif_enabled
|
||||||
|
tableName: notification_preference
|
||||||
|
changes:
|
||||||
|
- createIndex:
|
||||||
|
tableName: notification_preference
|
||||||
|
indexName: idx_notify_pref_userid_app_notif_enabled
|
||||||
|
columns:
|
||||||
|
- column:
|
||||||
|
name: user_id
|
||||||
|
- column:
|
||||||
|
name: in_app_notifications_enabled
|
||||||
@ -142,7 +142,8 @@ fforesight:
|
|||||||
ignored-endpoints: [ '/redaction-gateway-v1', '/actuator/health/**',"/redaction-gateway-v1/websocket","/redaction-gateway-v1/websocket/**", '/redaction-gateway-v1/async/download/with-ott/**',
|
ignored-endpoints: [ '/redaction-gateway-v1', '/actuator/health/**',"/redaction-gateway-v1/websocket","/redaction-gateway-v1/websocket/**", '/redaction-gateway-v1/async/download/with-ott/**',
|
||||||
'/internal-api/**', '/redaction-gateway-v1/docs/swagger-ui',
|
'/internal-api/**', '/redaction-gateway-v1/docs/swagger-ui',
|
||||||
'/redaction-gateway-v1/docs/**','/redaction-gateway-v1/docs',
|
'/redaction-gateway-v1/docs/**','/redaction-gateway-v1/docs',
|
||||||
'/api', '/api/','/api/docs/**','/api/docs','/api/docs/swagger-ui' ]
|
'/api', '/api/','/api/docs/**','/api/docs','/api/docs/swagger-ui',
|
||||||
|
'/actuator/prometheus']
|
||||||
enabled: true
|
enabled: true
|
||||||
springdoc:
|
springdoc:
|
||||||
base-path: '/api'
|
base-path: '/api'
|
||||||
|
|||||||
@ -1,13 +1,18 @@
|
|||||||
package com.iqser.red.service.peristence.v1.server.integration.tests;
|
package com.iqser.red.service.peristence.v1.server.integration.tests;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertThrows;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.ArgumentMatchers.anyBoolean;
|
import static org.mockito.ArgumentMatchers.anyBoolean;
|
||||||
import static org.mockito.ArgumentMatchers.anyString;
|
import static org.mockito.ArgumentMatchers.anyString;
|
||||||
|
import static org.mockito.Mockito.doAnswer;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
@ -18,8 +23,8 @@ import com.iqser.red.service.peristence.v1.server.integration.client.FileClient;
|
|||||||
import com.iqser.red.service.peristence.v1.server.integration.service.DossierTemplateTesterAndProvider;
|
import com.iqser.red.service.peristence.v1.server.integration.service.DossierTemplateTesterAndProvider;
|
||||||
import com.iqser.red.service.peristence.v1.server.integration.service.DossierTesterAndProvider;
|
import com.iqser.red.service.peristence.v1.server.integration.service.DossierTesterAndProvider;
|
||||||
import com.iqser.red.service.peristence.v1.server.integration.service.FileTesterAndProvider;
|
import com.iqser.red.service.peristence.v1.server.integration.service.FileTesterAndProvider;
|
||||||
import com.iqser.red.service.peristence.v1.server.integration.service.TypeProvider;
|
|
||||||
import com.iqser.red.service.peristence.v1.server.integration.utils.AbstractPersistenceServerServiceTest;
|
import com.iqser.red.service.peristence.v1.server.integration.utils.AbstractPersistenceServerServiceTest;
|
||||||
|
import com.iqser.red.service.persistence.management.v1.processor.acl.custom.dossier.DossierACLService;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.entity.configuration.LegalBasisEntity;
|
import com.iqser.red.service.persistence.management.v1.processor.entity.configuration.LegalBasisEntity;
|
||||||
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.LegalBasisMappingPersistenceService;
|
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.LegalBasisMappingPersistenceService;
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.DossierTemplateModel;
|
import com.iqser.red.service.persistence.service.v1.api.shared.model.DossierTemplateModel;
|
||||||
@ -33,6 +38,8 @@ import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemp
|
|||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.warning.ApproveResponse;
|
import com.iqser.red.service.persistence.service.v1.api.shared.model.warning.ApproveResponse;
|
||||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.warning.WarningType;
|
import com.iqser.red.service.persistence.service.v1.api.shared.model.warning.WarningType;
|
||||||
|
|
||||||
|
import feign.FeignException;
|
||||||
|
|
||||||
public class ApprovalTest extends AbstractPersistenceServerServiceTest {
|
public class ApprovalTest extends AbstractPersistenceServerServiceTest {
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
@ -44,15 +51,15 @@ public class ApprovalTest extends AbstractPersistenceServerServiceTest {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private DossierTesterAndProvider dossierTesterAndProvider;
|
private DossierTesterAndProvider dossierTesterAndProvider;
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private TypeProvider typeProvider;
|
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private FileClient fileClient;
|
private FileClient fileClient;
|
||||||
|
|
||||||
@SpyBean
|
@SpyBean
|
||||||
private LegalBasisMappingPersistenceService legalBasisMappingPersistenceService;
|
private LegalBasisMappingPersistenceService legalBasisMappingPersistenceService;
|
||||||
|
|
||||||
|
@SpyBean
|
||||||
|
private DossierACLService dossierACLService;
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testApprovalNoWarnings() {
|
public void testApprovalNoWarnings() {
|
||||||
@ -181,4 +188,54 @@ public class ApprovalTest extends AbstractPersistenceServerServiceTest {
|
|||||||
assertTrue(approveResponse.getFileWarnings().isEmpty());
|
assertTrue(approveResponse.getFileWarnings().isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testApprovalWhenDossierHasNoOwner() {
|
||||||
|
|
||||||
|
DossierTemplateModel dossierTemplateModel = dossierTemplateTesterAndProvider.provideTestTemplate();
|
||||||
|
|
||||||
|
Dossier dossier = dossierTesterAndProvider.provideTestDossier(dossierTemplateModel);
|
||||||
|
|
||||||
|
FileStatus file = fileTesterAndProvider.testAndProvideFile(dossier, "some-file");
|
||||||
|
fileTesterAndProvider.markFileAsProcessed(dossier.getId(), file.getFileId());
|
||||||
|
|
||||||
|
EntityLog entityLog = new EntityLog();
|
||||||
|
|
||||||
|
when(entityLogService.getEntityLog(anyString(), anyString(), anyBoolean())).thenReturn(entityLog);
|
||||||
|
|
||||||
|
List<com.iqser.red.service.persistence.management.v1.processor.service.users.model.User> allUsers = new ArrayList<>();
|
||||||
|
allUsers.add(com.iqser.red.service.persistence.management.v1.processor.service.users.model.User.builder()
|
||||||
|
.userId("manageradmin1@test.com")
|
||||||
|
.email("manageradmin1@test.com")
|
||||||
|
.isActive(true)
|
||||||
|
.roles(Set.of(getAllRoles()))
|
||||||
|
.build());
|
||||||
|
allUsers.add(com.iqser.red.service.persistence.management.v1.processor.service.users.model.User.builder()
|
||||||
|
.userId("manageradmin2@test.com")
|
||||||
|
.email("manageradmin2@test.com")
|
||||||
|
.isActive(true)
|
||||||
|
.roles(Set.of("RED_USER"))
|
||||||
|
.build());
|
||||||
|
|
||||||
|
when(usersClient.getAllUsers(false)).thenReturn(allUsers);
|
||||||
|
when(usersClient.getAllUsers(true)).thenReturn(allUsers);
|
||||||
|
|
||||||
|
doAnswer(invocation -> {
|
||||||
|
Dossier arg = invocation.getArgument(0);
|
||||||
|
if (dossier.getId().equals(arg.getId())) {
|
||||||
|
Dossier emptyDossier = new Dossier();
|
||||||
|
emptyDossier.setId(arg.getId());
|
||||||
|
return emptyDossier;
|
||||||
|
} else {
|
||||||
|
return invocation.callRealMethod();
|
||||||
|
}
|
||||||
|
}).when(dossierACLService).enhanceDossierWithACLData(any(Dossier.class));
|
||||||
|
|
||||||
|
FeignException ex = assertThrows(FeignException.Conflict.class, () -> {
|
||||||
|
fileClient.setStatusApproved(dossier.getId(), file.getFileId(), false);
|
||||||
|
});
|
||||||
|
|
||||||
|
assertTrue(ex.getMessage().contains("Dossier has no owner!"));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -56,7 +56,7 @@ public class ReanalysisTest extends AbstractPersistenceServerServiceTest {
|
|||||||
assertThat(loadedFile.getProcessingStatus()).isEqualTo(ProcessingStatus.OCR_PROCESSING_QUEUED);
|
assertThat(loadedFile.getProcessingStatus()).isEqualTo(ProcessingStatus.OCR_PROCESSING_QUEUED);
|
||||||
resetProcessingStatus(file);
|
resetProcessingStatus(file);
|
||||||
|
|
||||||
reanalysisClient.ocrFile(dossier.getId(), file.getId(), true);
|
reanalysisClient.ocrFile(dossier.getId(), file.getId(), true, false);
|
||||||
loadedFile = fileClient.getFileStatus(dossier.getId(), file.getId());
|
loadedFile = fileClient.getFileStatus(dossier.getId(), file.getId());
|
||||||
assertThat(loadedFile.getProcessingStatus()).isEqualTo(ProcessingStatus.OCR_PROCESSING_QUEUED);
|
assertThat(loadedFile.getProcessingStatus()).isEqualTo(ProcessingStatus.OCR_PROCESSING_QUEUED);
|
||||||
resetProcessingStatus(file);
|
resetProcessingStatus(file);
|
||||||
|
|||||||
@ -0,0 +1,79 @@
|
|||||||
|
package com.iqser.red.service.peristence.v1.server.integration.tests;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.core.io.ClassPathResource;
|
||||||
|
import org.springframework.mock.web.MockMultipartFile;
|
||||||
|
import org.springframework.test.context.TestPropertySource;
|
||||||
|
|
||||||
|
import com.iqser.red.service.peristence.v1.server.integration.client.FileClient;
|
||||||
|
import com.iqser.red.service.peristence.v1.server.integration.client.UploadClient;
|
||||||
|
import com.iqser.red.service.peristence.v1.server.integration.service.DossierTesterAndProvider;
|
||||||
|
import com.iqser.red.service.peristence.v1.server.integration.utils.AbstractPersistenceServerServiceTest;
|
||||||
|
|
||||||
|
import feign.FeignException;
|
||||||
|
import lombok.SneakyThrows;
|
||||||
|
|
||||||
|
@TestPropertySource(properties = {
|
||||||
|
"spring.servlet.multipart.max-file-size=50MB",
|
||||||
|
"spring.servlet.multipart.max-request-size=50MB"
|
||||||
|
})
|
||||||
|
public class ZipFileUploadTest extends AbstractPersistenceServerServiceTest {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DossierTesterAndProvider dossierTesterAndProvider;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private UploadClient uploadClient;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private FileClient fileClient;
|
||||||
|
|
||||||
|
@SneakyThrows
|
||||||
|
@Test
|
||||||
|
void testZipUploadWithEntryCountCheck() {
|
||||||
|
|
||||||
|
var dossier = dossierTesterAndProvider.provideTestDossier();
|
||||||
|
|
||||||
|
var smallZipResource = new ClassPathResource("files/zip/ArchiveWithManyFiles.zip");
|
||||||
|
var smallZip = new MockMultipartFile(
|
||||||
|
"ArchiveWithManyFiles.zip",
|
||||||
|
"ArchiveWithManyFiles.zip",
|
||||||
|
"application/zip",
|
||||||
|
IOUtils.toByteArray(smallZipResource.getInputStream())
|
||||||
|
);
|
||||||
|
|
||||||
|
var uploadResult = uploadClient.upload(smallZip, dossier.getId(), false, false);
|
||||||
|
|
||||||
|
assertNotNull(uploadResult, "Upload result for small zip should not be null.");
|
||||||
|
assertEquals(9993, uploadResult.getFileIds().size());
|
||||||
|
|
||||||
|
var largeZipResource = new ClassPathResource("files/zip/ArchiveWithTooManyFiles.zip");
|
||||||
|
var largeZip = new MockMultipartFile(
|
||||||
|
"ArchiveWithTooManyFiles.zip",
|
||||||
|
"ArchiveWithTooManyFiles.zip",
|
||||||
|
"application/zip",
|
||||||
|
IOUtils.toByteArray(largeZipResource.getInputStream())
|
||||||
|
);
|
||||||
|
|
||||||
|
FeignException ex = assertThrows(
|
||||||
|
FeignException.class,
|
||||||
|
() -> uploadClient.upload(largeZip, dossier.getId(), false, false),
|
||||||
|
"Uploading a zip with more than 10000 entries should throw a FeignException."
|
||||||
|
);
|
||||||
|
|
||||||
|
assertEquals(400, ex.status(), "Expected HTTP 400 (Bad Request) for a ZIP bomb scenario");
|
||||||
|
assertTrue(ex.getMessage().contains("ZIP-Bomb detected"),
|
||||||
|
"Exception message should contain 'ZIP-Bomb detected' or similar.");
|
||||||
|
|
||||||
|
var filesInDossier = fileClient.getDossierStatus(dossier.getId());
|
||||||
|
assertEquals(9993, filesInDossier.size());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@ -261,7 +261,7 @@ public abstract class AbstractPersistenceServerServiceTest {
|
|||||||
@MockBean
|
@MockBean
|
||||||
protected TenantsClient tenantsClient;
|
protected TenantsClient tenantsClient;
|
||||||
@MockBean
|
@MockBean
|
||||||
private UsersClient usersClient;
|
protected UsersClient usersClient;
|
||||||
@Autowired
|
@Autowired
|
||||||
protected EncryptionDecryptionService encryptionDecryptionService;
|
protected EncryptionDecryptionService encryptionDecryptionService;
|
||||||
@Autowired
|
@Autowired
|
||||||
@ -286,7 +286,7 @@ public abstract class AbstractPersistenceServerServiceTest {
|
|||||||
private CurrentApplicationTypeProvider currentApplicationTypeProvider;
|
private CurrentApplicationTypeProvider currentApplicationTypeProvider;
|
||||||
|
|
||||||
|
|
||||||
private static String[] getAllRoles() {
|
protected static String[] getAllRoles() {
|
||||||
|
|
||||||
var allRoles = ApplicationRoles.ROLE_DATA.entrySet()
|
var allRoles = ApplicationRoles.ROLE_DATA.entrySet()
|
||||||
.stream()
|
.stream()
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Loading…
x
Reference in New Issue
Block a user