Compare commits

...

10 Commits

Author SHA1 Message Date
Maverick Studer
a441909408 Merge branch 'RE-10691-fp' into 'master'
RED-10691: 500 when approving file in a dossier where dossier owner lost manager role

Closes RE-10691

See merge request redactmanager/persistence-service!930
2025-02-04 09:37:04 +01:00
maverickstuder
a7bdbb5495 RED-10691: 500 when approving file in a dossier where dossier owner lost manager role 2025-01-31 13:35:31 +01:00
Dominique Eifländer
3c0b9d36d4 Merge branch 'OPS-284' into 'master'
OPS-284: add prometheus endpoint

Closes OPS-284

See merge request redactmanager/persistence-service!928
2025-01-24 11:01:07 +01:00
Christoph Schabert
9ff3035730 OPS-284: add prometheus endpoint 2025-01-23 13:34:26 +01:00
Maverick Studer
c933793721 Merge branch 'RED-10731' into 'master'
RED-10731: Zip Bombing (~10.000 files in one zip within file limit) should be detected and aborted

Closes RED-10731

See merge request redactmanager/persistence-service!926
2025-01-21 12:21:11 +01:00
maverickstuder
4746d50627 RED-10731: Zip Bombing (~10.000 files in one zip within file limit) should be detected and aborted 2025-01-21 11:53:57 +01:00
Dominique Eifländer
fdfdba550e Merge branch 'RED-10715-masterA' into 'master'
RED-10715: Remove Saas migration fix

Closes RED-10715

See merge request redactmanager/persistence-service!925
2025-01-17 10:01:29 +01:00
Dominique Eifländer
09d53b7743 RED-10715: Remove Saas migration fix 2025-01-17 09:35:07 +01:00
Maverick Studer
d691164af6 Merge branch 'RED-10728' into 'master'
RED-10728: Endpoint to execute full OCR on specific file

Closes RED-10728

See merge request redactmanager/persistence-service!922
2025-01-16 15:54:23 +01:00
maverickstuder
43f9db59d4 RED-10728: Endpoint to execute full OCR on specific file 2025-01-16 14:38:17 +01:00
16 changed files with 322 additions and 147 deletions

View File

@ -118,11 +118,12 @@ public class ReanalysisController implements ReanalysisResource {
@PreAuthorize("hasAuthority('" + REANALYZE_FILE + "')") @PreAuthorize("hasAuthority('" + REANALYZE_FILE + "')")
public void ocrFile(@PathVariable(DOSSIER_ID) String dossierId, public void ocrFile(@PathVariable(DOSSIER_ID) String dossierId,
@PathVariable(FILE_ID) String fileId, @PathVariable(FILE_ID) String fileId,
@RequestParam(value = FORCE_PARAM, required = false, defaultValue = FALSE) boolean force) { @RequestParam(value = FORCE_PARAM, required = false, defaultValue = FALSE) boolean force,
@RequestParam(value = ALL_PAGES, required = false, defaultValue = FALSE) boolean allPages) {
accessControlService.checkDossierExistenceAndAccessPermissionsToDossier(dossierId); accessControlService.checkDossierExistenceAndAccessPermissionsToDossier(dossierId);
validateOCR(dossierId, fileId); validateOCR(dossierId, fileId);
reanalysisService.ocrFile(dossierId, fileId, force); reanalysisService.ocrFile(dossierId, fileId, force, allPages);
auditPersistenceService.audit(AuditRequest.builder() auditPersistenceService.audit(AuditRequest.builder()
.userId(KeycloakSecurity.getUserId()) .userId(KeycloakSecurity.getUserId())
.objectId(dossierId) .objectId(dossierId)
@ -140,7 +141,7 @@ public class ReanalysisController implements ReanalysisResource {
accessControlService.checkDossierExistenceAndAccessPermissionsToDossier(dossierId); accessControlService.checkDossierExistenceAndAccessPermissionsToDossier(dossierId);
fileIds.forEach(fileId -> validateOCR(dossierId, fileId)); fileIds.forEach(fileId -> validateOCR(dossierId, fileId));
reanalysisService.ocrFiles(dossierId, fileIds); reanalysisService.ocrFiles(dossierId, fileIds, false);
auditPersistenceService.audit(AuditRequest.builder() auditPersistenceService.audit(AuditRequest.builder()
.userId(KeycloakSecurity.getUserId()) .userId(KeycloakSecurity.getUserId())
.objectId(dossierId) .objectId(dossierId)

View File

@ -28,6 +28,7 @@ import org.springframework.web.bind.annotation.RestController;
import com.iqser.red.service.persistence.management.v1.processor.acl.custom.dossier.DossierACLService; import com.iqser.red.service.persistence.management.v1.processor.acl.custom.dossier.DossierACLService;
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException; import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
import com.iqser.red.service.persistence.management.v1.processor.exception.ConflictException;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotAllowedException; import com.iqser.red.service.persistence.management.v1.processor.exception.NotAllowedException;
import com.iqser.red.service.persistence.management.v1.processor.roles.ApplicationRoles; import com.iqser.red.service.persistence.management.v1.processor.roles.ApplicationRoles;
import com.iqser.red.service.persistence.management.v1.processor.service.AccessControlService; import com.iqser.red.service.persistence.management.v1.processor.service.AccessControlService;
@ -101,8 +102,13 @@ public class StatusController implements StatusResource {
var accessibleDossierIds = filterByPermissionsService.onlyViewableDossierIds(new ArrayList<>(filesByDossier.getValue().keySet())); var accessibleDossierIds = filterByPermissionsService.onlyViewableDossierIds(new ArrayList<>(filesByDossier.getValue().keySet()));
var response = new HashMap<String, List<FileStatus>>(); var response = new HashMap<String, List<FileStatus>>();
for (var dossierId : accessibleDossierIds) { for (var dossierId : accessibleDossierIds) {
var allFoundFiles = fileStatusManagementService.findAllDossierIdAndIds(dossierId, filesByDossier.getValue().get(dossierId)); var allFoundFiles = fileStatusManagementService.findAllDossierIdAndIds(dossierId,
response.put(dossierId, allFoundFiles.stream().map(FileStatusMapper::toFileStatus).collect(Collectors.toList())); filesByDossier.getValue()
.get(dossierId));
response.put(dossierId,
allFoundFiles.stream()
.map(FileStatusMapper::toFileStatus)
.collect(Collectors.toList()));
} }
return new JSONPrimitive<>(response); return new JSONPrimitive<>(response);
@ -351,6 +357,10 @@ public class StatusController implements StatusResource {
.build()); .build());
var dossier = dossierACLService.enhanceDossierWithACLData(dossierManagementService.getDossierById(dossierId, false, false)); var dossier = dossierACLService.enhanceDossierWithACLData(dossierManagementService.getDossierById(dossierId, false, false));
if (dossier.getOwnerId() == null) {
throw new ConflictException("Dossier has no owner!");
}
if (!dossier.getOwnerId().equals(KeycloakSecurity.getUserId())) { if (!dossier.getOwnerId().equals(KeycloakSecurity.getUserId())) {
var fileStatus = fileStatusManagementService.getFileStatus(fileId); var fileStatus = fileStatusManagementService.getFileStatus(fileId);

View File

@ -23,12 +23,11 @@ import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile; import org.springframework.web.multipart.MultipartFile;
import com.iqser.red.service.persistence.management.v1.processor.service.FileFormatValidationService;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotAllowedException;
import com.knecon.fforesight.keycloakcommons.security.KeycloakSecurity;
import com.iqser.red.service.pdftron.redaction.v1.api.model.ByteContentDocument; import com.iqser.red.service.pdftron.redaction.v1.api.model.ByteContentDocument;
import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException; import com.iqser.red.service.persistence.management.v1.processor.exception.BadRequestException;
import com.iqser.red.service.persistence.management.v1.processor.exception.NotAllowedException;
import com.iqser.red.service.persistence.management.v1.processor.service.AccessControlService; import com.iqser.red.service.persistence.management.v1.processor.service.AccessControlService;
import com.iqser.red.service.persistence.management.v1.processor.service.FileFormatValidationService;
import com.iqser.red.service.persistence.management.v1.processor.service.ReanalysisService; import com.iqser.red.service.persistence.management.v1.processor.service.ReanalysisService;
import com.iqser.red.service.persistence.management.v1.processor.service.UploadService; import com.iqser.red.service.persistence.management.v1.processor.service.UploadService;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.AuditPersistenceService; import com.iqser.red.service.persistence.management.v1.processor.service.persistence.AuditPersistenceService;
@ -37,6 +36,7 @@ import com.iqser.red.service.persistence.service.v1.api.external.resource.Upload
import com.iqser.red.service.persistence.service.v1.api.shared.model.AuditCategory; import com.iqser.red.service.persistence.service.v1.api.shared.model.AuditCategory;
import com.iqser.red.service.persistence.service.v1.api.shared.model.FileUploadResult; import com.iqser.red.service.persistence.service.v1.api.shared.model.FileUploadResult;
import com.iqser.red.service.persistence.service.v1.api.shared.model.audit.AuditRequest; import com.iqser.red.service.persistence.service.v1.api.shared.model.audit.AuditRequest;
import com.knecon.fforesight.keycloakcommons.security.KeycloakSecurity;
import com.knecon.fforesight.tenantcommons.TenantContext; import com.knecon.fforesight.tenantcommons.TenantContext;
import feign.FeignException; import feign.FeignException;
@ -53,9 +53,9 @@ import lombok.extern.slf4j.Slf4j;
@SuppressWarnings("PMD") @SuppressWarnings("PMD")
public class UploadController implements UploadResource { public class UploadController implements UploadResource {
private static final int THRESHOLD_ENTRIES = 10000; private static final int THRESHOLD_ENTRIES = 10000; // Maximum number of files allowed
private static final int THRESHOLD_SIZE = 1000000000; // 1 GB private static final int THRESHOLD_SIZE = 1000000000; // 1 GB total unzipped data
private static final double THRESHOLD_RATIO = 10; private static final double THRESHOLD_RATIO = 10; // Max allowed compression ratio
private final UploadService uploadService; private final UploadService uploadService;
private final ReanalysisService reanalysisService; private final ReanalysisService reanalysisService;
@ -72,31 +72,25 @@ public class UploadController implements UploadResource {
@Parameter(name = DISABLE_AUTOMATIC_ANALYSIS_PARAM, description = "Disables automatic redaction for the uploaded file, imports only imported redactions") @RequestParam(value = DISABLE_AUTOMATIC_ANALYSIS_PARAM, required = false, defaultValue = "false") boolean disableAutomaticAnalysis) { @Parameter(name = DISABLE_AUTOMATIC_ANALYSIS_PARAM, description = "Disables automatic redaction for the uploaded file, imports only imported redactions") @RequestParam(value = DISABLE_AUTOMATIC_ANALYSIS_PARAM, required = false, defaultValue = "false") boolean disableAutomaticAnalysis) {
accessControlService.checkAccessPermissionsToDossier(dossierId); accessControlService.checkAccessPermissionsToDossier(dossierId);
if (file.getOriginalFilename() == null) {
String originalFilename = file.getOriginalFilename();
if (originalFilename == null) {
throw new BadRequestException("Could not upload file, no filename provided."); throw new BadRequestException("Could not upload file, no filename provided.");
} }
var extension = getExtension(file.getOriginalFilename()); String extension = getExtension(originalFilename);
try { try {
switch (extension) { return switch (extension) {
case "zip": case "zip" -> handleZip(dossierId, file.getBytes(), keepManualRedactions, disableAutomaticAnalysis);
return handleZip(dossierId, file.getBytes(), keepManualRedactions, disableAutomaticAnalysis); case "csv" -> uploadService.importCsv(dossierId, file.getBytes());
case "csv": default -> {
return uploadService.importCsv(dossierId, file.getBytes()); validateExtensionOrThrow(extension);
default: yield uploadService.processSingleFile(dossierId, originalFilename, file.getBytes(), keepManualRedactions, disableAutomaticAnalysis);
if (!fileFormatValidationService.getAllFileFormats().contains(extension)) { }
throw new BadRequestException("Invalid file uploaded"); };
}
if (!fileFormatValidationService.getValidFileFormatsForTenant(TenantContext.getTenantId()).contains(extension)) {
throw new NotAllowedException("Insufficient permissions");
}
return uploadService.processSingleFile(dossierId, file.getOriginalFilename(), file.getBytes(), keepManualRedactions, disableAutomaticAnalysis);
}
} catch (IOException e) { } catch (IOException e) {
throw new BadRequestException(e.getMessage(), e); throw new BadRequestException("Failed to process file: " + e.getMessage(), e);
} }
} }
@ -111,7 +105,6 @@ public class UploadController implements UploadResource {
accessControlService.verifyUserIsReviewerOrApprover(dossierId, fileId); accessControlService.verifyUserIsReviewerOrApprover(dossierId, fileId);
try { try {
reanalysisService.importRedactions(ByteContentDocument.builder().dossierId(dossierId).fileId(fileId).document(file.getBytes()).pages(pageInclusionRequest).build()); reanalysisService.importRedactions(ByteContentDocument.builder().dossierId(dossierId).fileId(fileId).document(file.getBytes()).pages(pageInclusionRequest).build());
auditPersistenceService.audit(AuditRequest.builder() auditPersistenceService.audit(AuditRequest.builder()
@ -122,84 +115,116 @@ public class UploadController implements UploadResource {
.details(Map.of("dossierId", dossierId)) .details(Map.of("dossierId", dossierId))
.build()); .build());
} catch (IOException e) { } catch (IOException e) {
throw new BadRequestException(e.getMessage(), e); throw new BadRequestException("Failed to import redactions: " + e.getMessage(), e);
} catch (FeignException e) { } catch (FeignException e) {
throw processFeignException(e); throw processFeignException(e);
} }
} }
private String getExtension(String fileName) { private void validateExtensionOrThrow(String extension) {
return fileName.substring(fileName.lastIndexOf(".") + 1).toLowerCase(Locale.ROOT); if (!fileFormatValidationService.getAllFileFormats().contains(extension)) {
throw new BadRequestException("Invalid file uploaded (unrecognized extension).");
}
if (!fileFormatValidationService.getValidFileFormatsForTenant(TenantContext.getTenantId()).contains(extension)) {
throw new NotAllowedException("Insufficient permissions for this file type.");
}
} }
/**
* 1. Write the uploaded content to a temp ZIP file
* 2. Check the number of entries and reject if too big or if symlinks found
* 3. Unzip and process each file, while checking size and ratio.
*/
private FileUploadResult handleZip(String dossierId, byte[] fileContent, boolean keepManualRedactions, boolean disableAutomaticAnalysis) throws IOException { private FileUploadResult handleZip(String dossierId, byte[] fileContent, boolean keepManualRedactions, boolean disableAutomaticAnalysis) throws IOException {
File tempFile = FileUtils.createTempFile(UUID.randomUUID().toString(), ".zip"); File tempZip = FileUtils.createTempFile(UUID.randomUUID().toString(), ".zip");
try (var fileOutputStream = new FileOutputStream(tempFile)) { try (FileOutputStream fos = new FileOutputStream(tempZip)) {
IOUtils.write(fileContent, fileOutputStream); IOUtils.write(fileContent, fos);
} }
try { validateZipEntries(tempZip);
checkForSymlinks(tempFile);
var zipData = unzip(tempFile, dossierId, keepManualRedactions, disableAutomaticAnalysis); try {
ZipData zipData = processZipContents(tempZip, dossierId, keepManualRedactions, disableAutomaticAnalysis);
if (zipData.csvBytes != null) { if (zipData.csvBytes != null) {
try { try {
var importResult = uploadService.importCsv(dossierId, zipData.csvBytes); FileUploadResult csvResult = uploadService.importCsv(dossierId, zipData.csvBytes);
zipData.fileUploadResult.getProcessedAttributes().addAll(importResult.getProcessedAttributes()); zipData.fileUploadResult.getProcessedAttributes().addAll(csvResult.getProcessedAttributes());
zipData.fileUploadResult.getProcessedFileIds().addAll(importResult.getProcessedFileIds()); zipData.fileUploadResult.getProcessedFileIds().addAll(csvResult.getProcessedFileIds());
} catch (Exception e) { } catch (Exception e) {
log.debug("CSV file inside ZIP failed", e); log.debug("CSV file inside ZIP failed to import", e);
// TODO return un-processed files to client
} }
} else if (zipData.fileUploadResult.getFileIds().isEmpty()) { } else if (zipData.fileUploadResult.getFileIds().isEmpty()) {
if (zipData.containedUnpermittedFiles) { if (zipData.containedUnpermittedFiles) {
throw new NotAllowedException("Zip file contains unpermitted files"); throw new NotAllowedException("Zip file contains unpermitted files.");
} else { } else {
throw new BadRequestException("Only unsupported files in zip file"); throw new BadRequestException("Only unsupported files in the ZIP.");
} }
} }
return zipData.fileUploadResult; return zipData.fileUploadResult;
} finally { } finally {
boolean isDeleted = tempFile.delete();
if (!isDeleted) { if (!tempZip.delete()) {
log.warn("tempFile could not be deleted"); log.warn("Could not delete temporary ZIP file: {}", tempZip);
} }
} }
} }
private void checkForSymlinks(File tempFile) throws IOException { private void validateZipEntries(File tempZip) throws IOException {
try (FileInputStream fis = new FileInputStream(tempZip); ZipFile zipFile = new ZipFile(fis.getChannel())) {
int count = 0;
var entries = zipFile.getEntries();
while (entries.hasMoreElements()) {
ZipArchiveEntry ze = entries.nextElement();
try (var fis = new FileInputStream(tempFile); var zipFile = new ZipFile(fis.getChannel())) {
for (var entryEnum = zipFile.getEntries(); entryEnum.hasMoreElements(); ) {
var ze = entryEnum.nextElement();
if (ze.isUnixSymlink()) { if (ze.isUnixSymlink()) {
throw new BadRequestException("ZIP-files with symlinks are not allowed"); throw new BadRequestException("ZIP-files with symlinks are not allowed.");
}
if (!ze.isDirectory() && !ze.getName().startsWith(".")) {
count++;
if (count > THRESHOLD_ENTRIES) {
throw new BadRequestException("ZIP-Bomb detected: too many entries.");
}
} }
} }
} }
} }
private ZipData unzip(File tempFile, String dossierId, boolean keepManualRedactions, boolean disableAutomaticAnalysis) throws IOException { private ZipData processZipContents(File tempZip, String dossierId, boolean keepManualRedactions, boolean disableAutomaticAnalysis) throws IOException {
var zipData = new ZipData(); ZipData zipData = new ZipData();
try (var fis = new FileInputStream(tempFile); var zipFile = new ZipFile(fis.getChannel())) { try (FileInputStream fis = new FileInputStream(tempZip); ZipFile zipFile = new ZipFile(fis.getChannel())) {
for (var entryEnum = zipFile.getEntries(); entryEnum.hasMoreElements(); ) { var entries = zipFile.getEntries();
var ze = entryEnum.nextElement(); while (entries.hasMoreElements()) {
zipData.totalEntryArchive++; ZipArchiveEntry entry = entries.nextElement();
if (!ze.isDirectory()) { if (entry.isDirectory() || entry.getName().startsWith(".")) {
processFileZipEntry(ze, zipFile, dossierId, keepManualRedactions, zipData, disableAutomaticAnalysis); continue;
}
byte[] entryBytes = readEntryWithRatioCheck(entry, zipFile);
zipData.totalSizeArchive += entryBytes.length;
if (zipData.totalSizeArchive > THRESHOLD_SIZE) {
throw new BadRequestException("ZIP-Bomb detected (exceeds total size limit).");
}
String extension = getExtension(entry.getName());
if ("csv".equalsIgnoreCase(extension)) {
zipData.csvBytes = entryBytes;
} else {
handleRegularFile(dossierId, entryBytes, extension, extractFileName(entry.getName()), zipData, keepManualRedactions, disableAutomaticAnalysis);
} }
} }
} }
@ -207,73 +232,70 @@ public class UploadController implements UploadResource {
} }
private void processFileZipEntry(ZipArchiveEntry ze, ZipFile zipFile, String dossierId, boolean keepManualRedactions, ZipData zipData, boolean disableAutomaticAnalysis) throws IOException { private byte[] readEntryWithRatioCheck(ZipArchiveEntry entry, ZipFile zipFile) throws IOException {
var extension = getExtension(ze.getName()); long compressedSize = entry.getCompressedSize() > 0 ? entry.getCompressedSize() : 1;
try (var is = zipFile.getInputStream(entry); var bos = new ByteArrayOutputStream()) {
final String fileName; byte[] buffer = new byte[4096];
if (ze.getName().lastIndexOf("/") >= 0) { int bytesRead;
fileName = ze.getName().substring(ze.getName().lastIndexOf("/") + 1); int totalUncompressed = 0;
} else {
fileName = ze.getName();
}
if (fileName.startsWith(".")) { while ((bytesRead = is.read(buffer)) != -1) {
return; bos.write(buffer, 0, bytesRead);
} totalUncompressed += bytesRead;
var entryAsBytes = readCurrentZipEntry(ze, zipFile); double ratio = (double) totalUncompressed / compressedSize;
zipData.totalSizeArchive += entryAsBytes.length; if (ratio > THRESHOLD_RATIO) {
throw new BadRequestException("ZIP-Bomb detected (compression ratio too high).");
// 1. the uncompressed data size is too much for the application resource capacity }
// 2. too many entries in the archive can lead to inode exhaustion of the file-system
if (zipData.totalSizeArchive > THRESHOLD_SIZE || zipData.totalEntryArchive > THRESHOLD_ENTRIES) {
throw new BadRequestException("ZIP-Bomb detected.");
}
if ("csv".equals(extension)) {
zipData.csvBytes = entryAsBytes;
} else if (fileFormatValidationService.getAllFileFormats().contains(extension)) {
if (!fileFormatValidationService.getValidFileFormatsForTenant(TenantContext.getTenantId()).contains(extension)) {
zipData.containedUnpermittedFiles = true;
return;
}
zipData.containedUnpermittedFiles = false;
try {
var result = uploadService.processSingleFile(dossierId, fileName, entryAsBytes, keepManualRedactions, disableAutomaticAnalysis);
zipData.fileUploadResult.getFileIds().addAll(result.getFileIds());
} catch (Exception e) {
log.debug("PDF File inside ZIP failed", e);
// TODO return un-processed files to client
} }
return bos.toByteArray();
} }
} }
private byte[] readCurrentZipEntry(ZipArchiveEntry ze, ZipFile zipFile) throws IOException { private void handleRegularFile(String dossierId,
byte[] fileBytes,
String extension,
String fileName,
ZipData zipData,
boolean keepManualRedactions,
boolean disableAutomaticAnalysis) {
var bos = new ByteArrayOutputStream(); if (!fileFormatValidationService.getAllFileFormats().contains(extension)) {
zipData.containedUnpermittedFiles = false;
try (var entryStream = zipFile.getInputStream(ze)) { return;
var buffer = new byte[2048];
var nBytes = 0;
int totalSizeEntry = 0;
while ((nBytes = entryStream.read(buffer)) > 0) {
bos.write(buffer, 0, nBytes);
totalSizeEntry += nBytes;
double compressionRatio = (float) totalSizeEntry / ze.getCompressedSize();
if (compressionRatio > THRESHOLD_RATIO) {
// ratio between compressed and uncompressed data is highly suspicious, looks like a Zip Bomb Attack
throw new BadRequestException("ZIP-Bomb detected.");
}
}
} }
return bos.toByteArray(); if (!fileFormatValidationService.getValidFileFormatsForTenant(TenantContext.getTenantId()).contains(extension)) {
zipData.containedUnpermittedFiles = true;
return;
}
try {
FileUploadResult result = uploadService.processSingleFile(dossierId, fileName, fileBytes, keepManualRedactions, disableAutomaticAnalysis);
zipData.fileUploadResult.getFileIds().addAll(result.getFileIds());
} catch (Exception e) {
log.debug("Failed to process file '{}' in ZIP: {}", fileName, e.getMessage(), e);
}
}
private String extractFileName(String path) {
int idx = path.lastIndexOf('/');
return (idx >= 0) ? path.substring(idx + 1) : path;
}
private String getExtension(String fileName) {
int idx = fileName.lastIndexOf('.');
if (idx < 0) {
return "";
}
return fileName.substring(idx + 1).toLowerCase(Locale.ROOT);
} }
@ -282,7 +304,6 @@ public class UploadController implements UploadResource {
byte[] csvBytes; byte[] csvBytes;
int totalSizeArchive; int totalSizeArchive;
int totalEntryArchive;
FileUploadResult fileUploadResult = new FileUploadResult(); FileUploadResult fileUploadResult = new FileUploadResult();
boolean containedUnpermittedFiles; boolean containedUnpermittedFiles;

View File

@ -38,6 +38,7 @@ public interface ReanalysisResource {
String EXCLUDED_STATUS_PARAM = "excluded"; String EXCLUDED_STATUS_PARAM = "excluded";
String FORCE_PARAM = "force"; String FORCE_PARAM = "force";
String ALL_PAGES = "allPages";
@PostMapping(value = REANALYSIS_REST_PATH + DOSSIER_ID_PATH_VARIABLE) @PostMapping(value = REANALYSIS_REST_PATH + DOSSIER_ID_PATH_VARIABLE)
@ -73,7 +74,8 @@ public interface ReanalysisResource {
@ApiResponses(value = {@ApiResponse(responseCode = "204", description = "OK"), @ApiResponse(responseCode = "409", description = "Conflict"), @ApiResponse(responseCode = "404", description = "Not found"), @ApiResponse(responseCode = "403", description = "Forbidden"), @ApiResponse(responseCode = "400", description = "Cannot OCR approved file")}) @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "OK"), @ApiResponse(responseCode = "409", description = "Conflict"), @ApiResponse(responseCode = "404", description = "Not found"), @ApiResponse(responseCode = "403", description = "Forbidden"), @ApiResponse(responseCode = "400", description = "Cannot OCR approved file")})
void ocrFile(@PathVariable(DOSSIER_ID) String dossierId, void ocrFile(@PathVariable(DOSSIER_ID) String dossierId,
@PathVariable(FILE_ID) String fileId, @PathVariable(FILE_ID) String fileId,
@RequestParam(value = FORCE_PARAM, required = false, defaultValue = FALSE) boolean force); @RequestParam(value = FORCE_PARAM, required = false, defaultValue = FALSE) boolean force,
@RequestParam(value = ALL_PAGES, required = false, defaultValue = FALSE) boolean allPages);
@Operation(summary = "Ocr and reanalyze multiple files for a dossier", description = "None") @Operation(summary = "Ocr and reanalyze multiple files for a dossier", description = "None")

View File

@ -57,7 +57,7 @@ public class AdminInterfaceController {
fileStatusService.validateFileIsNotDeletedAndNotApproved(fileId); fileStatusService.validateFileIsNotDeletedAndNotApproved(fileId);
fileStatusService.setStatusOcrQueued(dossierId, fileId); fileStatusService.setStatusOcrQueued(dossierId, fileId, false);
} }
@ -91,7 +91,7 @@ public class AdminInterfaceController {
if (!dryRun) { if (!dryRun) {
fileStatusService.validateFileIsNotDeletedAndNotApproved(file.getId()); fileStatusService.validateFileIsNotDeletedAndNotApproved(file.getId());
fileStatusService.setStatusOcrQueued(file.getDossierId(), file.getId()); fileStatusService.setStatusOcrQueued(file.getDossierId(), file.getId(), false);
} }
} }

View File

@ -122,7 +122,7 @@ public class FileStatusProcessingUpdateService {
} else { } else {
fileStatusService.setStatusOcrProcessing(fileId, fileStatusService.setStatusOcrProcessing(fileId,
fileEntity.getProcessingStatus().equals(ProcessingStatus.OCR_PROCESSING) ? fileEntity.getProcessingErrorCounter() + 1 : 0); fileEntity.getProcessingStatus().equals(ProcessingStatus.OCR_PROCESSING) ? fileEntity.getProcessingErrorCounter() + 1 : 0);
fileStatusService.addToOcrQueue(dossierId, fileId, 2); fileStatusService.addToOcrQueue(dossierId, fileId, 2, false);
} }
} }

View File

@ -316,7 +316,7 @@ public class FileStatusService {
} }
log.info("Add file: {} from dossier {} to OCR queue", fileId, dossierId); log.info("Add file: {} from dossier {} to OCR queue", fileId, dossierId);
setStatusOcrQueued(dossierId, fileId); setStatusOcrQueued(dossierId, fileId, false);
sendReadOnlyAnalysisEvent(dossierId, fileId, fileEntity); sendReadOnlyAnalysisEvent(dossierId, fileId, fileEntity);
return; return;
} }
@ -352,7 +352,6 @@ public class FileStatusService {
return; return;
} }
boolean forceAnalysis = false; boolean forceAnalysis = false;
if (settings.isLlmNerServiceEnabled()) { if (settings.isLlmNerServiceEnabled()) {
boolean objectExists = fileManagementStorageService.objectExists(dossierId, fileId, FileType.LLM_NER_ENTITIES); boolean objectExists = fileManagementStorageService.objectExists(dossierId, fileId, FileType.LLM_NER_ENTITIES);
@ -386,7 +385,7 @@ public class FileStatusService {
boolean reanalyse = fileModel.isReanalysisRequired() || analysisType.equals(AnalysisType.MANUAL_REDACTION_REANALYZE); boolean reanalyse = fileModel.isReanalysisRequired() || analysisType.equals(AnalysisType.MANUAL_REDACTION_REANALYZE);
MessageType messageType = calculateMessageType(reanalyse, fileModel.getProcessingStatus(), fileModel); MessageType messageType = calculateMessageType(reanalyse, fileModel.getProcessingStatus(), fileModel);
if(analysisType == AnalysisType.FORCE_ANALYSE || forceAnalysis) { if (analysisType == AnalysisType.FORCE_ANALYSE || forceAnalysis) {
messageType = MessageType.ANALYSE; messageType = MessageType.ANALYSE;
} }
@ -567,7 +566,7 @@ public class FileStatusService {
} }
public void setStatusOcrQueued(String dossierId, String fileId) { public void setStatusOcrQueued(String dossierId, String fileId, boolean allPages) {
FileEntity fileStatus = fileStatusPersistenceService.getStatus(fileId); FileEntity fileStatus = fileStatusPersistenceService.getStatus(fileId);
@ -579,7 +578,7 @@ public class FileStatusService {
updateOCRStartTime(fileId); updateOCRStartTime(fileId);
fileStatusPersistenceService.updateProcessingStatus(fileId, ProcessingStatus.OCR_PROCESSING_QUEUED); fileStatusPersistenceService.updateProcessingStatus(fileId, ProcessingStatus.OCR_PROCESSING_QUEUED);
websocketService.sendAnalysisEvent(dossierId, fileId, AnalyseStatus.OCR_PROCESSING, fileStatus.getNumberOfAnalyses() + 1); websocketService.sendAnalysisEvent(dossierId, fileId, AnalyseStatus.OCR_PROCESSING, fileStatus.getNumberOfAnalyses() + 1);
addToOcrQueue(dossierId, fileId, 2); addToOcrQueue(dossierId, fileId, 2, allPages);
} }
@ -760,13 +759,16 @@ public class FileStatusService {
} }
public void addToOcrQueue(String dossierId, String fileId, int priority) { public void addToOcrQueue(String dossierId, String fileId, int priority, boolean allPages) {
var removeWatermark = dossierTemplatePersistenceService.getDossierTemplate(dossierPersistenceService.getDossierTemplateId(dossierId)).isRemoveWatermark(); var removeWatermark = dossierTemplatePersistenceService.getDossierTemplate(dossierPersistenceService.getDossierTemplateId(dossierId)).isRemoveWatermark();
Set<AzureOcrFeature> features = new HashSet<>(); Set<AzureOcrFeature> features = new HashSet<>();
if (removeWatermark) { if (removeWatermark) {
features.add(AzureOcrFeature.REMOVE_WATERMARKS); features.add(AzureOcrFeature.REMOVE_WATERMARKS);
} }
if (allPages) {
features.add(AzureOcrFeature.ALL_PAGES);
}
if (currentApplicationTypeProvider.isDocuMine()) { if (currentApplicationTypeProvider.isDocuMine()) {
features.add(AzureOcrFeature.ROTATION_CORRECTION); features.add(AzureOcrFeature.ROTATION_CORRECTION);
features.add(AzureOcrFeature.FONT_STYLE_DETECTION); features.add(AzureOcrFeature.FONT_STYLE_DETECTION);
@ -820,7 +822,7 @@ public class FileStatusService {
fileStatusPersistenceService.updateWorkflowStatus(fileId, newWorkflowStatus, false); fileStatusPersistenceService.updateWorkflowStatus(fileId, newWorkflowStatus, false);
if(oldWorkflowStatus == WorkflowStatus.APPROVED && newWorkflowStatus != WorkflowStatus.APPROVED) { if (oldWorkflowStatus == WorkflowStatus.APPROVED && newWorkflowStatus != WorkflowStatus.APPROVED) {
fileStatusPersistenceService.clearLastDownload(fileId); fileStatusPersistenceService.clearLastDownload(fileId);
} }
} }
@ -977,7 +979,7 @@ public class FileStatusService {
if (runOcr) { if (runOcr) {
fileStatusPersistenceService.resetOcrStartAndEndDate(fileId); fileStatusPersistenceService.resetOcrStartAndEndDate(fileId);
setStatusOcrQueued(dossierId, fileId); setStatusOcrQueued(dossierId, fileId, false);
return; return;
} }
@ -1064,6 +1066,7 @@ public class FileStatusService {
addToAnalysisQueue(dossierId, fileId, priority, Sets.newHashSet(), AnalysisType.DEFAULT); addToAnalysisQueue(dossierId, fileId, priority, Sets.newHashSet(), AnalysisType.DEFAULT);
} }
@Transactional @Transactional
public void setStatusForceAnalyse(String dossierId, String fileId, boolean priority) { public void setStatusForceAnalyse(String dossierId, String fileId, boolean priority) {

View File

@ -10,7 +10,6 @@ import com.iqser.red.service.persistence.management.v1.processor.exception.Confl
import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException; import com.iqser.red.service.persistence.management.v1.processor.exception.NotFoundException;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.DossierPersistenceService; import com.iqser.red.service.persistence.management.v1.processor.service.persistence.DossierPersistenceService;
import com.iqser.red.service.persistence.service.v1.api.shared.model.ReanalysisSettings; import com.iqser.red.service.persistence.service.v1.api.shared.model.ReanalysisSettings;
import com.iqser.red.service.persistence.service.v1.api.shared.model.analysislog.entitylog.imported.ImportedRedactions;
import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.DeleteImportedRedactionsRequest; import com.iqser.red.service.persistence.service.v1.api.shared.model.annotations.DeleteImportedRedactionsRequest;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel; import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType; import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
@ -178,11 +177,11 @@ public class ReanalysisService {
relevantFiles.stream() relevantFiles.stream()
.filter(fileStatus -> fileStatus.getOcrStartTime() == null) .filter(fileStatus -> fileStatus.getOcrStartTime() == null)
.filter(fileStatus -> fileStatus.getProcessingStatus().equals(ProcessingStatus.PROCESSED)) .filter(fileStatus -> fileStatus.getProcessingStatus().equals(ProcessingStatus.PROCESSED))
.forEach(fileStatus -> fileStatusService.setStatusOcrQueued(dossierId, fileStatus.getId())); .forEach(fileStatus -> fileStatusService.setStatusOcrQueued(dossierId, fileStatus.getId(), false));
} }
public void ocrFile(String dossierId, String fileId, boolean force) { public void ocrFile(String dossierId, String fileId, boolean force, boolean allPages) {
dossierPersistenceService.getAndValidateDossier(dossierId); dossierPersistenceService.getAndValidateDossier(dossierId);
FileModel dossierFile = fileStatusService.getStatus(fileId); FileModel dossierFile = fileStatusService.getStatus(fileId);
@ -202,18 +201,18 @@ public class ReanalysisService {
} }
if (force) { if (force) {
fileStatusService.setStatusOcrQueued(dossierId, fileId); fileStatusService.setStatusOcrQueued(dossierId, fileId, allPages);
} else { } else {
if (dossierFile.getOcrStartTime() != null) { if (dossierFile.getOcrStartTime() != null) {
throw new ConflictException("File already has been OCR processed"); throw new ConflictException("File already has been OCR processed");
} }
ocrFiles(dossierId, Sets.newHashSet(fileId)); ocrFiles(dossierId, Sets.newHashSet(fileId), allPages);
} }
} }
public void ocrFiles(String dossierId, Set<String> fileIds) { public void ocrFiles(String dossierId, Set<String> fileIds, boolean allPages) {
var relevantFiles = getRelevantFiles(dossierId, fileIds); var relevantFiles = getRelevantFiles(dossierId, fileIds);
@ -225,7 +224,7 @@ public class ReanalysisService {
relevantFiles.stream() relevantFiles.stream()
.filter(fileStatus -> fileStatus.getOcrStartTime() == null) .filter(fileStatus -> fileStatus.getOcrStartTime() == null)
.forEach(fileStatus -> fileStatusService.setStatusOcrQueued(dossierId, fileStatus.getId())); .forEach(fileStatus -> fileStatusService.setStatusOcrQueued(dossierId, fileStatus.getId(), allPages));
} }

View File

@ -245,6 +245,8 @@ databaseChangeLog:
file: db/changelog/tenant/149-add-indexes-across-tables-for-performance.yaml file: db/changelog/tenant/149-add-indexes-across-tables-for-performance.yaml
- include: - include:
file: db/changelog/tenant/150-add-component-mapping-indexes.yaml file: db/changelog/tenant/150-add-component-mapping-indexes.yaml
- include:
file: db/changelog/tenant/151-drop-saas-migration-table.changelog.yaml
- include: - include:
file: db/changelog/tenant/152-add-ai-fields-to-entity.yaml file: db/changelog/tenant/152-add-ai-fields-to-entity.yaml
- include: - include:

View File

@ -142,7 +142,8 @@ fforesight:
ignored-endpoints: [ '/redaction-gateway-v1', '/actuator/health/**',"/redaction-gateway-v1/websocket","/redaction-gateway-v1/websocket/**", '/redaction-gateway-v1/async/download/with-ott/**', ignored-endpoints: [ '/redaction-gateway-v1', '/actuator/health/**',"/redaction-gateway-v1/websocket","/redaction-gateway-v1/websocket/**", '/redaction-gateway-v1/async/download/with-ott/**',
'/internal-api/**', '/redaction-gateway-v1/docs/swagger-ui', '/internal-api/**', '/redaction-gateway-v1/docs/swagger-ui',
'/redaction-gateway-v1/docs/**','/redaction-gateway-v1/docs', '/redaction-gateway-v1/docs/**','/redaction-gateway-v1/docs',
'/api', '/api/','/api/docs/**','/api/docs','/api/docs/swagger-ui' ] '/api', '/api/','/api/docs/**','/api/docs','/api/docs/swagger-ui',
'/actuator/prometheus']
enabled: true enabled: true
springdoc: springdoc:
base-path: '/api' base-path: '/api'

View File

@ -1,13 +1,18 @@
package com.iqser.red.service.peristence.v1.server.integration.tests; package com.iqser.red.service.peristence.v1.server.integration.tests;
import static org.junit.Assert.assertThrows;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@ -18,8 +23,8 @@ import com.iqser.red.service.peristence.v1.server.integration.client.FileClient;
import com.iqser.red.service.peristence.v1.server.integration.service.DossierTemplateTesterAndProvider; import com.iqser.red.service.peristence.v1.server.integration.service.DossierTemplateTesterAndProvider;
import com.iqser.red.service.peristence.v1.server.integration.service.DossierTesterAndProvider; import com.iqser.red.service.peristence.v1.server.integration.service.DossierTesterAndProvider;
import com.iqser.red.service.peristence.v1.server.integration.service.FileTesterAndProvider; import com.iqser.red.service.peristence.v1.server.integration.service.FileTesterAndProvider;
import com.iqser.red.service.peristence.v1.server.integration.service.TypeProvider;
import com.iqser.red.service.peristence.v1.server.integration.utils.AbstractPersistenceServerServiceTest; import com.iqser.red.service.peristence.v1.server.integration.utils.AbstractPersistenceServerServiceTest;
import com.iqser.red.service.persistence.management.v1.processor.acl.custom.dossier.DossierACLService;
import com.iqser.red.service.persistence.management.v1.processor.entity.configuration.LegalBasisEntity; import com.iqser.red.service.persistence.management.v1.processor.entity.configuration.LegalBasisEntity;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.LegalBasisMappingPersistenceService; import com.iqser.red.service.persistence.management.v1.processor.service.persistence.LegalBasisMappingPersistenceService;
import com.iqser.red.service.persistence.service.v1.api.shared.model.DossierTemplateModel; import com.iqser.red.service.persistence.service.v1.api.shared.model.DossierTemplateModel;
@ -33,6 +38,8 @@ import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemp
import com.iqser.red.service.persistence.service.v1.api.shared.model.warning.ApproveResponse; import com.iqser.red.service.persistence.service.v1.api.shared.model.warning.ApproveResponse;
import com.iqser.red.service.persistence.service.v1.api.shared.model.warning.WarningType; import com.iqser.red.service.persistence.service.v1.api.shared.model.warning.WarningType;
import feign.FeignException;
public class ApprovalTest extends AbstractPersistenceServerServiceTest { public class ApprovalTest extends AbstractPersistenceServerServiceTest {
@Autowired @Autowired
@ -44,15 +51,15 @@ public class ApprovalTest extends AbstractPersistenceServerServiceTest {
@Autowired @Autowired
private DossierTesterAndProvider dossierTesterAndProvider; private DossierTesterAndProvider dossierTesterAndProvider;
@Autowired
private TypeProvider typeProvider;
@Autowired @Autowired
private FileClient fileClient; private FileClient fileClient;
@SpyBean @SpyBean
private LegalBasisMappingPersistenceService legalBasisMappingPersistenceService; private LegalBasisMappingPersistenceService legalBasisMappingPersistenceService;
@SpyBean
private DossierACLService dossierACLService;
@Test @Test
public void testApprovalNoWarnings() { public void testApprovalNoWarnings() {
@ -181,4 +188,54 @@ public class ApprovalTest extends AbstractPersistenceServerServiceTest {
assertTrue(approveResponse.getFileWarnings().isEmpty()); assertTrue(approveResponse.getFileWarnings().isEmpty());
} }
@Test
void testApprovalWhenDossierHasNoOwner() {
DossierTemplateModel dossierTemplateModel = dossierTemplateTesterAndProvider.provideTestTemplate();
Dossier dossier = dossierTesterAndProvider.provideTestDossier(dossierTemplateModel);
FileStatus file = fileTesterAndProvider.testAndProvideFile(dossier, "some-file");
fileTesterAndProvider.markFileAsProcessed(dossier.getId(), file.getFileId());
EntityLog entityLog = new EntityLog();
when(entityLogService.getEntityLog(anyString(), anyString(), anyBoolean())).thenReturn(entityLog);
List<com.iqser.red.service.persistence.management.v1.processor.service.users.model.User> allUsers = new ArrayList<>();
allUsers.add(com.iqser.red.service.persistence.management.v1.processor.service.users.model.User.builder()
.userId("manageradmin1@test.com")
.email("manageradmin1@test.com")
.isActive(true)
.roles(Set.of(getAllRoles()))
.build());
allUsers.add(com.iqser.red.service.persistence.management.v1.processor.service.users.model.User.builder()
.userId("manageradmin2@test.com")
.email("manageradmin2@test.com")
.isActive(true)
.roles(Set.of("RED_USER"))
.build());
when(usersClient.getAllUsers(false)).thenReturn(allUsers);
when(usersClient.getAllUsers(true)).thenReturn(allUsers);
doAnswer(invocation -> {
Dossier arg = invocation.getArgument(0);
if (dossier.getId().equals(arg.getId())) {
Dossier emptyDossier = new Dossier();
emptyDossier.setId(arg.getId());
return emptyDossier;
} else {
return invocation.callRealMethod();
}
}).when(dossierACLService).enhanceDossierWithACLData(any(Dossier.class));
FeignException ex = assertThrows(FeignException.Conflict.class, () -> {
fileClient.setStatusApproved(dossier.getId(), file.getFileId(), false);
});
assertTrue(ex.getMessage().contains("Dossier has no owner!"));
}
} }

View File

@ -56,7 +56,7 @@ public class ReanalysisTest extends AbstractPersistenceServerServiceTest {
assertThat(loadedFile.getProcessingStatus()).isEqualTo(ProcessingStatus.OCR_PROCESSING_QUEUED); assertThat(loadedFile.getProcessingStatus()).isEqualTo(ProcessingStatus.OCR_PROCESSING_QUEUED);
resetProcessingStatus(file); resetProcessingStatus(file);
reanalysisClient.ocrFile(dossier.getId(), file.getId(), true); reanalysisClient.ocrFile(dossier.getId(), file.getId(), true, false);
loadedFile = fileClient.getFileStatus(dossier.getId(), file.getId()); loadedFile = fileClient.getFileStatus(dossier.getId(), file.getId());
assertThat(loadedFile.getProcessingStatus()).isEqualTo(ProcessingStatus.OCR_PROCESSING_QUEUED); assertThat(loadedFile.getProcessingStatus()).isEqualTo(ProcessingStatus.OCR_PROCESSING_QUEUED);
resetProcessingStatus(file); resetProcessingStatus(file);

View File

@ -0,0 +1,79 @@
package com.iqser.red.service.peristence.v1.server.integration.tests;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.apache.commons.io.IOUtils;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.ClassPathResource;
import org.springframework.mock.web.MockMultipartFile;
import org.springframework.test.context.TestPropertySource;
import com.iqser.red.service.peristence.v1.server.integration.client.FileClient;
import com.iqser.red.service.peristence.v1.server.integration.client.UploadClient;
import com.iqser.red.service.peristence.v1.server.integration.service.DossierTesterAndProvider;
import com.iqser.red.service.peristence.v1.server.integration.utils.AbstractPersistenceServerServiceTest;
import feign.FeignException;
import lombok.SneakyThrows;
@TestPropertySource(properties = {
"spring.servlet.multipart.max-file-size=50MB",
"spring.servlet.multipart.max-request-size=50MB"
})
public class ZipFileUploadTest extends AbstractPersistenceServerServiceTest {
@Autowired
private DossierTesterAndProvider dossierTesterAndProvider;
@Autowired
private UploadClient uploadClient;
@Autowired
private FileClient fileClient;
@SneakyThrows
@Test
void testZipUploadWithEntryCountCheck() {
var dossier = dossierTesterAndProvider.provideTestDossier();
var smallZipResource = new ClassPathResource("files/zip/ArchiveWithManyFiles.zip");
var smallZip = new MockMultipartFile(
"ArchiveWithManyFiles.zip",
"ArchiveWithManyFiles.zip",
"application/zip",
IOUtils.toByteArray(smallZipResource.getInputStream())
);
var uploadResult = uploadClient.upload(smallZip, dossier.getId(), false, false);
assertNotNull(uploadResult, "Upload result for small zip should not be null.");
assertEquals(9993, uploadResult.getFileIds().size());
var largeZipResource = new ClassPathResource("files/zip/ArchiveWithTooManyFiles.zip");
var largeZip = new MockMultipartFile(
"ArchiveWithTooManyFiles.zip",
"ArchiveWithTooManyFiles.zip",
"application/zip",
IOUtils.toByteArray(largeZipResource.getInputStream())
);
FeignException ex = assertThrows(
FeignException.class,
() -> uploadClient.upload(largeZip, dossier.getId(), false, false),
"Uploading a zip with more than 10000 entries should throw a FeignException."
);
assertEquals(400, ex.status(), "Expected HTTP 400 (Bad Request) for a ZIP bomb scenario");
assertTrue(ex.getMessage().contains("ZIP-Bomb detected"),
"Exception message should contain 'ZIP-Bomb detected' or similar.");
var filesInDossier = fileClient.getDossierStatus(dossier.getId());
assertEquals(9993, filesInDossier.size());
}
}

View File

@ -261,7 +261,7 @@ public abstract class AbstractPersistenceServerServiceTest {
@MockBean @MockBean
protected TenantsClient tenantsClient; protected TenantsClient tenantsClient;
@MockBean @MockBean
private UsersClient usersClient; protected UsersClient usersClient;
@Autowired @Autowired
protected EncryptionDecryptionService encryptionDecryptionService; protected EncryptionDecryptionService encryptionDecryptionService;
@Autowired @Autowired
@ -286,7 +286,7 @@ public abstract class AbstractPersistenceServerServiceTest {
private CurrentApplicationTypeProvider currentApplicationTypeProvider; private CurrentApplicationTypeProvider currentApplicationTypeProvider;
private static String[] getAllRoles() { protected static String[] getAllRoles() {
var allRoles = ApplicationRoles.ROLE_DATA.entrySet() var allRoles = ApplicationRoles.ROLE_DATA.entrySet()
.stream() .stream()